#Be sure to have git-lfs installed (https://git-lfs.com) git lfs install git clone https://huggingface.co/facebook/bart-large-cnn #To clone the repo without large files – just their pointers #prepend git clone with the following env var: GIT_LFS_SKIP_SMUDGE=1
Facebook/bart-large-cnn model: Difference between revisions
(Created page with "{{Model infobox | hugging-face-uri = facebook/bart-large-cnn | creator = | type = Natural Language Processing | task = Summarization, Text2Text Generation | library = PyTorch, TensorFlow, JAX, Rust, Transformers | dataset = cnn_dailymail | language = English | paper = | license = arxiv:1910.13461, mit | related-to = bart, Eval Results, AutoTrain Compatible | all-tags = Summarization, PyTorch, TensorFlow, JAX, Rust, Transformers, cnn_dailymail, English, bart, text2text-...") |
(No difference)
|
Revision as of 23:32, 17 May 2023
Hugging Face
Name
bart-large-cnn
User / Organization
Library
Dataset
Language
License
Related to
Facebook/bart-large-cnn model is a Natural Language Processing model used for Summarization, Text2Text Generation.
Model Description
Clone Model Repository
#Be sure to have git-lfs installed (https://git-lfs.com) git lfs install git clone [email protected]:facebook/bart-large-cnn #To clone the repo without large files – just their pointers #prepend git clone with the following env var: GIT_LFS_SKIP_SMUDGE=1
Hugging Face Transformers Library
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM tokenizer = AutoTokenizer.from_pretrained("facebook/bart-large-cnn") model = AutoModelForSeq2SeqLM.from_pretrained("facebook/bart-large-cnn")
Deployment
Inference API
import requests API_URL = "https://api-inference.huggingface.co/models/facebook/bart-large-cnn" headers = {"Authorization": f"Bearer {API_TOKEN}"} def query(payload): response = requests.post(API_URL, headers=headers, json=payload) return response.json() output = query({ "inputs": "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct.", })
async function query(data) { const response = await fetch( "https://api-inference.huggingface.co/models/facebook/bart-large-cnn", { headers: { Authorization: "Bearer {API_TOKEN}" }, method: "POST", body: JSON.stringify(data), } ); const result = await response.json(); return result; } query({"inputs": "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct."}).then((response) => { console.log(JSON.stringify(response)); });
curl https://api-inference.huggingface.co/models/facebook/bart-large-cnn \ -X POST \ -d '{"inputs": "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct."}' \ -H "Authorization: Bearer {API_TOKEN}"
Amazon SageMaker
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'automatic-speech-recognition' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'automatic-speech-recognition' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'conversational' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'conversational' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'feature-extraction' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'feature-extraction' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'fill-mask' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'fill-mask' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'image-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'image-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'question-answering' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'question-answering' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'summarization' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'summarization' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'table-question-answering' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'table-question-answering' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text-generation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text-generation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text2text-generation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'text2text-generation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'token-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'token-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'translation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'translation' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import sagemaker role = sagemaker.get_execution_role() # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'zero-shot-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
from sagemaker.huggingface import HuggingFaceModel import boto3 iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] # Hub Model configuration. https://huggingface.co/models hub = { 'HF_MODEL_ID':'facebook/bart-large-cnn', 'HF_TASK':'zero-shot-classification' } # create Hugging Face Model Class huggingface_model = HuggingFaceModel( transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', env=hub, role=role, ) # deploy model to SageMaker Inference predictor = huggingface_model.deploy( initial_instance_count=1, # number of instances instance_type='ml.m5.xlarge' # ec2 instance type ) predictor.predict({ 'inputs': "The tower is 324 metres (1,063 ft) tall, about the same height as an 81-storey building, and the tallest structure in Paris. Its base is square, measuring 125 metres (410 ft) on each side. During its construction, the Eiffel Tower surpassed the Washington Monument to become the tallest man-made structure in the world, a title it held for 41 years until the Chrysler Building in New York City was finished in 1930. It was the first structure to reach a height of 300 metres. Due to the addition of a broadcasting aerial at the top of the tower in 1957, it is now taller than the Chrysler Building by 5.2 metres (17 ft). Excluding transmitters, the Eiffel Tower is the second tallest free-standing structure in France after the Millau Viaduct." })
Spaces
import gradio as gr gr.Interface.load("models/facebook/bart-large-cnn").launch()
Training
Amazon SageMaker
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/language-modeling } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_clm.py', source_dir='./examples/pytorch/language-modeling', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/language-modeling } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_clm.py', source_dir='./examples/pytorch/language-modeling', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/language-modeling } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_mlm.py', source_dir='./examples/pytorch/language-modeling', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/language-modeling } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_mlm.py', source_dir='./examples/pytorch/language-modeling', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/question-answering } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_qa.py', source_dir='./examples/pytorch/question-answering', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/question-answering } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_qa.py', source_dir='./examples/pytorch/question-answering', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/seq2seq } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_summarization.py', source_dir='./examples/pytorch/seq2seq', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/seq2seq } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_summarization.py', source_dir='./examples/pytorch/seq2seq', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/text-classification } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_glue.py', source_dir='./examples/pytorch/text-classification', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/text-classification } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_glue.py', source_dir='./examples/pytorch/text-classification', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/token-classification } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_ner.py', source_dir='./examples/pytorch/token-classification', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/token-classification } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_ner.py', source_dir='./examples/pytorch/token-classification', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker from sagemaker.huggingface import HuggingFace # gets role for executing training job role = sagemaker.get_execution_role() hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/seq2seq } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_translation.py', source_dir='./examples/pytorch/seq2seq', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()
import sagemaker import boto3 from sagemaker.huggingface import HuggingFace # gets role for executing training job iam_client = boto3.client('iam') role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] hyperparameters = { 'model_name_or_path':'facebook/bart-large-cnn', 'output_dir':'/opt/ml/model' # add your remaining hyperparameters # more info here https://github.com/huggingface/transformers/tree/v4.17.0/examples/pytorch/seq2seq } # git configuration to download our fine-tuning script git_config = {'repo': 'https://github.com/huggingface/transformers.git','branch': 'v4.17.0'} # creates Hugging Face estimator huggingface_estimator = HuggingFace( entry_point='run_translation.py', source_dir='./examples/pytorch/seq2seq', instance_type='ml.p3.2xlarge', instance_count=1, role=role, git_config=git_config, transformers_version='4.17.0', pytorch_version='1.10.2', py_version='py38', hyperparameters = hyperparameters ) # starting the train job huggingface_estimator.fit()