Instructions to use laion/CLIP-ViT-B-32-CommonPool.S.image-s13M-b4K with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- OpenCLIP
How to use laion/CLIP-ViT-B-32-CommonPool.S.image-s13M-b4K with OpenCLIP:
import open_clip model, preprocess_train, preprocess_val = open_clip.create_model_and_transforms('hf-hub:laion/CLIP-ViT-B-32-CommonPool.S.image-s13M-b4K') tokenizer = open_clip.get_tokenizer('hf-hub:laion/CLIP-ViT-B-32-CommonPool.S.image-s13M-b4K') - Notebooks
- Google Colab
- Kaggle
| { | |
| "add_prefix_space": false, | |
| "bos_token": { | |
| "__type": "AddedToken", | |
| "content": "<|startoftext|>", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false | |
| }, | |
| "do_lower_case": true, | |
| "eos_token": { | |
| "__type": "AddedToken", | |
| "content": "<|endoftext|>", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false | |
| }, | |
| "errors": "replace", | |
| "model_max_length": 77, | |
| "name_or_path": "openai/clip-vit-large-patch14", | |
| "pad_token": "<|endoftext|>", | |
| "special_tokens_map_file": "./special_tokens_map.json", | |
| "tokenizer_class": "CLIPTokenizer", | |
| "unk_token": { | |
| "__type": "AddedToken", | |
| "content": "<|endoftext|>", | |
| "lstrip": false, | |
| "normalized": true, | |
| "rstrip": false, | |
| "single_word": false | |
| } | |
| } | |