2,535
edits
(Created page with "{{Model infobox | hugging-face-uri = openai/clip-vit-base-patch16 | creator = | type = Computer Vision | task = Zero-Shot Image Classification | library = PyTorch, JAX, Transformers | dataset = | language = | paper = | license = arxiv:2103.00020, arxiv:1908.04913 | related-to = clip, vision | all-tags = Zero-Shot Image Classification, PyTorch, JAX, Transformers, clip, vision, arxiv:2103.00020, arxiv:1908.04913 | all-lang-tags = }} ==Model Description== ==Clone Mod...") |
No edit summary |
||
Line 1: | Line 1: | ||
{{Model infobox | {{Model infobox | ||
| hugging-face-uri = openai/clip-vit- | | hugging-face-uri = openai/clip-vit-large-patch14 | ||
| creator = | | creator = | ||
| type = Computer Vision | | type = Computer Vision | ||
| task = Zero-Shot Image Classification | | task = Zero-Shot Image Classification | ||
| library = PyTorch, JAX, Transformers | | library = PyTorch, TensorFlow, JAX, Transformers | ||
| dataset = | | dataset = | ||
| language = | | language = | ||
Line 10: | Line 10: | ||
| license = arxiv:2103.00020, arxiv:1908.04913 | | license = arxiv:2103.00020, arxiv:1908.04913 | ||
| related-to = clip, vision | | related-to = clip, vision | ||
| all-tags = Zero-Shot Image Classification, PyTorch, JAX, Transformers, clip, vision, arxiv:2103.00020, arxiv:1908.04913 | | all-tags = Zero-Shot Image Classification, PyTorch, TensorFlow, JAX, Transformers, clip, vision, arxiv:2103.00020, arxiv:1908.04913 | ||
| all-lang-tags = | | all-lang-tags = | ||
}} | }} | ||
==Model Description== | ==Model Description== | ||
==Comments== | |||
<comments /> | |||
==Clone Model Repository== | ==Clone Model Repository== | ||
Line 22: | Line 25: | ||
#Be sure to have git-lfs installed (https://git-lfs.com) | #Be sure to have git-lfs installed (https://git-lfs.com) | ||
git lfs install | git lfs install | ||
git clone https://huggingface.co/openai/clip-vit- | git clone https://huggingface.co/openai/clip-vit-large-patch14 | ||
#To clone the repo without large files – just their pointers | #To clone the repo without large files – just their pointers | ||
Line 33: | Line 36: | ||
#Be sure to have git-lfs installed (https://git-lfs.com) | #Be sure to have git-lfs installed (https://git-lfs.com) | ||
git lfs install | git lfs install | ||
git clone [email protected]:openai/clip-vit-large-patch14 | |||
#To clone the repo without large files – just their pointers | #To clone the repo without large files – just their pointers | ||
Line 45: | Line 48: | ||
from transformers import AutoProcessor, AutoModelForZeroShotImageClassification | from transformers import AutoProcessor, AutoModelForZeroShotImageClassification | ||
processor = AutoProcessor.from_pretrained("openai/clip-vit- | processor = AutoProcessor.from_pretrained("openai/clip-vit-large-patch14") | ||
model = AutoModelForZeroShotImageClassification.from_pretrained("openai/clip-vit- | model = AutoModelForZeroShotImageClassification.from_pretrained("openai/clip-vit-large-patch14") | ||
</pre> | </pre> | ||
==Deployment== | ==Deployment== | ||
===Inference API=== | ===Inference API=== | ||
===Amazon SageMaker=== | ===Amazon SageMaker=== | ||
Line 67: | Line 69: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'automatic-speech-recognition' | 'HF_TASK':'automatic-speech-recognition' | ||
} | } | ||
Line 100: | Line 102: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'automatic-speech-recognition' | 'HF_TASK':'automatic-speech-recognition' | ||
} | } | ||
Line 134: | Line 136: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'conversational' | 'HF_TASK':'conversational' | ||
} | } | ||
Line 167: | Line 169: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'conversational' | 'HF_TASK':'conversational' | ||
} | } | ||
Line 201: | Line 203: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'feature-extraction' | 'HF_TASK':'feature-extraction' | ||
} | } | ||
Line 234: | Line 236: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'feature-extraction' | 'HF_TASK':'feature-extraction' | ||
} | } | ||
Line 268: | Line 270: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'fill-mask' | 'HF_TASK':'fill-mask' | ||
} | } | ||
Line 301: | Line 303: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'fill-mask' | 'HF_TASK':'fill-mask' | ||
} | } | ||
Line 335: | Line 337: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'image-classification' | 'HF_TASK':'image-classification' | ||
} | } | ||
Line 368: | Line 370: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'image-classification' | 'HF_TASK':'image-classification' | ||
} | } | ||
Line 402: | Line 404: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'question-answering' | 'HF_TASK':'question-answering' | ||
} | } | ||
Line 435: | Line 437: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'question-answering' | 'HF_TASK':'question-answering' | ||
} | } | ||
Line 469: | Line 471: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'summarization' | 'HF_TASK':'summarization' | ||
} | } | ||
Line 502: | Line 504: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'summarization' | 'HF_TASK':'summarization' | ||
} | } | ||
Line 536: | Line 538: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'table-question-answering' | 'HF_TASK':'table-question-answering' | ||
} | } | ||
Line 569: | Line 571: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'table-question-answering' | 'HF_TASK':'table-question-answering' | ||
} | } | ||
Line 603: | Line 605: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text-classification' | 'HF_TASK':'text-classification' | ||
} | } | ||
Line 636: | Line 638: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text-classification' | 'HF_TASK':'text-classification' | ||
} | } | ||
Line 670: | Line 672: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text-generation' | 'HF_TASK':'text-generation' | ||
} | } | ||
Line 703: | Line 705: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text-generation' | 'HF_TASK':'text-generation' | ||
} | } | ||
Line 737: | Line 739: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text2text-generation' | 'HF_TASK':'text2text-generation' | ||
} | } | ||
Line 770: | Line 772: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'text2text-generation' | 'HF_TASK':'text2text-generation' | ||
} | } | ||
Line 804: | Line 806: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'token-classification' | 'HF_TASK':'token-classification' | ||
} | } | ||
Line 837: | Line 839: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'token-classification' | 'HF_TASK':'token-classification' | ||
} | } | ||
Line 871: | Line 873: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'translation' | 'HF_TASK':'translation' | ||
} | } | ||
Line 904: | Line 906: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'translation' | 'HF_TASK':'translation' | ||
} | } | ||
Line 938: | Line 940: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'zero-shot-classification' | 'HF_TASK':'zero-shot-classification' | ||
} | } | ||
Line 971: | Line 973: | ||
# Hub Model configuration. https://huggingface.co/models | # Hub Model configuration. https://huggingface.co/models | ||
hub = { | hub = { | ||
'HF_MODEL_ID':'openai/clip-vit- | 'HF_MODEL_ID':'openai/clip-vit-large-patch14', | ||
'HF_TASK':'zero-shot-classification' | 'HF_TASK':'zero-shot-classification' | ||
} | } | ||
Line 1,001: | Line 1,003: | ||
import gradio as gr | import gradio as gr | ||
gr.Interface.load("models/openai/clip-vit- | gr.Interface.load("models/openai/clip-vit-large-patch14").launch() | ||
</pre> | </pre> | ||
Line 1,017: | Line 1,019: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,054: | Line 1,056: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,091: | Line 1,093: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,128: | Line 1,130: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,165: | Line 1,167: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,202: | Line 1,204: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,239: | Line 1,241: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,276: | Line 1,278: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,313: | Line 1,315: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,350: | Line 1,352: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,387: | Line 1,389: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,424: | Line 1,426: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,461: | Line 1,463: | ||
role = sagemaker.get_execution_role() | role = sagemaker.get_execution_role() | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters | ||
Line 1,498: | Line 1,500: | ||
role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | role = iam_client.get_role(RoleName='{IAM_ROLE_WITH_SAGEMAKER_PERMISSIONS}')['Role']['Arn'] | ||
hyperparameters = { | hyperparameters = { | ||
'model_name_or_path':'openai/clip-vit- | 'model_name_or_path':'openai/clip-vit-large-patch14', | ||
'output_dir':'/opt/ml/model' | 'output_dir':'/opt/ml/model' | ||
# add your remaining hyperparameters | # add your remaining hyperparameters |