#Be sure to have git-lfs installed (https://git-lfs.com) git lfs install git clone https://huggingface.co/bert-base-uncased #To clone the repo without large files – just their pointers #prepend git clone with the following env var: GIT_LFS_SKIP_SMUDGE=1
Bert-base-uncased model
Hugging Face
Name
bert-base-uncased
Task
Library
Dataset
Language
Paper
License
Related to
Bert-base-uncased model is a Natural Language Processing model used for Fill-Mask.
Model Description
Clone Model Repository
#Be sure to have git-lfs installed (https://git-lfs.com) git lfs install git clone [email protected]:bert-base-uncased #To clone the repo without large files – just their pointers #prepend git clone with the following env var: GIT_LFS_SKIP_SMUDGE=1
Hugging Face Transformers Library
from transformers import AutoTokenizer, AutoModelForMaskedLM tokenizer = AutoTokenizer.from_pretrained("bert-base-uncased") model = AutoModelForMaskedLM.from_pretrained("bert-base-uncased")
Deployment
Inference API
import requests API_URL = "https://api-inference.huggingface.co/models/bert-base-uncased" headers = {"Authorization": f"Bearer {API_TOKEN}"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "inputs": "The answer to the universe is [MASK].", })
async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/bert-base-uncased", { headers: { Authorization: "Bearer {API_TOKEN}" }, method: "POST", body: JSON.stringify(data), } ); const result = await response.json(); return result; } query({"inputs": "The answer to the universe is [MASK]."}).then((response) => { console.log(JSON.stringify(response)); });
curl https://api-inference.huggingface.co/models/bert-base-uncased \ -X POST \ -d '{"inputs": "The answer to the universe is [MASK]."}' \ -H "Authorization: Bearer {API_TOKEN}"
Amazon SageMaker
import sagemaker import boto3 from sagemaker.huggingface import HuggingFaceModel try: role = sagemaker.get_execution_role() except ValueError: iam = boto3.client('iam') role = iam.get_role(RoleName='sagemaker_execution_role')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'bert-base-uncased', 'HF_TASK':'fill-mask' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.26.0', pytorch_version='1.13.1', py_version='py39', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ "inputs": "The answer to the universe is [MASK].", })
Spaces
import gradio as gr gr.Interface.load("models/bert-base-uncased").launch()
Training
Amazon SageMaker
Model Card
Comments
Loading comments...