Config file not found
#1
by
thesofakillers
- opened
Hi. I am trying to load the model as usual
MODEL_VARIANT = "laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup"
model = transformers.CLIPModel.from_pretrained(MODEL_VARIANT)
processor = transformers.CLIPProcessor.from_pretrained(MODEL_VARIANT)
But i get the following error:
HTTPError Traceback (most recent call last)
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/utils/_errors.py:259, in hf_raise_for_status(response, endpoint_name)
258 try:
--> 259 response.raise_for_status()
260 except HTTPError as e:
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/requests/models.py:1021, in Response.raise_for_status(self)
1020 if http_error_msg:
-> 1021 raise HTTPError(http_error_msg, response=self)
HTTPError: 404 Client Error: Not Found for url: https://huggingface.co/laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup/resolve/main/config.json
The above exception was the direct cause of the following exception:
EntryNotFoundError Traceback (most recent call last)
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/utils/hub.py:409, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, subfolder, user_agent, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash)
407 try:
408 # Load from URL or cache if already cached
--> 409 resolved_file = hf_hub_download(
410 path_or_repo_id,
411 filename,
412 subfolder=None if len(subfolder) == 0 else subfolder,
413 revision=revision,
414 cache_dir=cache_dir,
415 user_agent=user_agent,
416 force_download=force_download,
417 proxies=proxies,
418 resume_download=resume_download,
419 use_auth_token=use_auth_token,
420 local_files_only=local_files_only,
421 )
423 except RepositoryNotFoundError:
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py:120, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
118 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 120 return fn(*args, **kwargs)
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/file_download.py:1166, in hf_hub_download(repo_id, filename, subfolder, repo_type, revision, library_name, library_version, cache_dir, local_dir, local_dir_use_symlinks, user_agent, force_download, force_filename, proxies, etag_timeout, resume_download, token, local_files_only, legacy_cache_layout)
1165 try:
-> 1166 metadata = get_hf_file_metadata(
1167 url=url,
1168 token=token,
1169 proxies=proxies,
1170 timeout=etag_timeout,
1171 )
1172 except EntryNotFoundError as http_error:
1173 # Cache the non-existence of the file and raise
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/utils/_validators.py:120, in validate_hf_hub_args.<locals>._inner_fn(*args, **kwargs)
118 kwargs = smoothly_deprecate_use_auth_token(fn_name=fn.__name__, has_token=has_token, kwargs=kwargs)
--> 120 return fn(*args, **kwargs)
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/file_download.py:1507, in get_hf_file_metadata(url, token, proxies, timeout)
1498 r = _request_wrapper(
1499 method="HEAD",
1500 url=url,
(...)
1505 timeout=timeout,
1506 )
-> 1507 hf_raise_for_status(r)
1509 # Return
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/huggingface_hub/utils/_errors.py:269, in hf_raise_for_status(response, endpoint_name)
268 message = f"{response.status_code} Client Error." + "\n\n" + f"Entry Not Found for url: {response.url}."
--> 269 raise EntryNotFoundError(message, response) from e
271 elif error_code == "GatedRepo":
EntryNotFoundError: 404 Client Error. (Request ID: Root=1-64396785-5de042e44737906e415c76d1)
Entry Not Found for url: https://huggingface.co/laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup/resolve/main/config.json.
During handling of the above exception, another exception occurred:
OSError Traceback (most recent call last)
Cell In[33], line 5
1 # First setup the model
3 MODEL_VARIANT = "laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup"
----> 5 model = transformers.CLIPModel.from_pretrained(MODEL_VARIANT)
6 processor = transformers.CLIPProcessor.from_pretrained(MODEL_VARIANT)
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/modeling_utils.py:2269, in PreTrainedModel.from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs)
2267 if not isinstance(config, PretrainedConfig):
2268 config_path = config if config is not None else pretrained_model_name_or_path
-> 2269 config, model_kwargs = cls.config_class.from_pretrained(
2270 config_path,
2271 cache_dir=cache_dir,
2272 return_unused_kwargs=True,
2273 force_download=force_download,
2274 resume_download=resume_download,
2275 proxies=proxies,
2276 local_files_only=local_files_only,
2277 use_auth_token=use_auth_token,
2278 revision=revision,
2279 subfolder=subfolder,
2280 _from_auto=from_auto_class,
2281 _from_pipeline=from_pipeline,
2282 **kwargs,
2283 )
2284 else:
2285 model_kwargs = kwargs
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/configuration_utils.py:546, in PretrainedConfig.from_pretrained(cls, pretrained_model_name_or_path, **kwargs)
468 @classmethod
469 def from_pretrained(cls, pretrained_model_name_or_path: Union[str, os.PathLike], **kwargs) -> "PretrainedConfig":
470 r"""
471 Instantiate a [`PretrainedConfig`] (or a derived class) from a pretrained model configuration.
472
(...)
544 assert unused_kwargs == {"foo": False}
545 ```"""
--> 546 config_dict, kwargs = cls.get_config_dict(pretrained_model_name_or_path, **kwargs)
547 if "model_type" in config_dict and hasattr(cls, "model_type") and config_dict["model_type"] != cls.model_type:
548 logger.warning(
549 f"You are using a model of type {config_dict['model_type']} to instantiate a model of type "
550 f"{cls.model_type}. This is not supported for all configurations of models and can yield errors."
551 )
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/configuration_utils.py:573, in PretrainedConfig.get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
571 original_kwargs = copy.deepcopy(kwargs)
572 # Get config dict associated with the base config file
--> 573 config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs)
574 if "_commit_hash" in config_dict:
575 original_kwargs["_commit_hash"] = config_dict["_commit_hash"]
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/configuration_utils.py:628, in PretrainedConfig._get_config_dict(cls, pretrained_model_name_or_path, **kwargs)
624 configuration_file = kwargs.pop("_configuration_file", CONFIG_NAME)
626 try:
627 # Load from local folder or from cache or download from model Hub and cache
--> 628 resolved_config_file = cached_file(
629 pretrained_model_name_or_path,
630 configuration_file,
631 cache_dir=cache_dir,
632 force_download=force_download,
633 proxies=proxies,
634 resume_download=resume_download,
635 local_files_only=local_files_only,
636 use_auth_token=use_auth_token,
637 user_agent=user_agent,
638 revision=revision,
639 subfolder=subfolder,
640 _commit_hash=commit_hash,
641 )
642 commit_hash = extract_commit_hash(resolved_config_file, commit_hash)
643 except EnvironmentError:
644 # Raise any environment error raise by `cached_file`. It will have a helpful error message adapted to
645 # the original exception.
File ~/miniconda3/envs/thesis/lib/python3.8/site-packages/transformers/utils/hub.py:454, in cached_file(path_or_repo_id, filename, cache_dir, force_download, resume_download, proxies, use_auth_token, revision, local_files_only, subfolder, user_agent, _raise_exceptions_for_missing_entries, _raise_exceptions_for_connection_errors, _commit_hash)
452 if revision is None:
453 revision = "main"
--> 454 raise EnvironmentError(
455 f"{path_or_repo_id} does not appear to have a file named {full_filename}. Checkout "
456 f"'https://huggingface.co/{path_or_repo_id}/{revision}' for available files."
457 )
458 except HTTPError as err:
459 # First we try to see if we have a cached version (not up to date):
460 resolved_file = try_to_load_from_cache(path_or_repo_id, full_filename, cache_dir=cache_dir, revision=revision)
OSError: laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup does not appear to have a file named config.json. Checkout 'https://huggingface.co/laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup/main' for available files.
@thesofakillers this model is an OpenCLIP only model right now (open_clip_config.json and no config.json), there is no supporting transformers CLIP model w/ convnext. Someone would have to add support to transformers. https://github.com/mlfoundations/open_clip .... have a TODO to add a usage example for this to model cards
If you have open_clip installed (pip install open_clip_torch) you can do following to create model/tokenizer for OpenCLIP models from the hub, use is very similar to the OpenAI CLIP code for getting emeddings, zero-shot classification, etc
model, preprocess_train, preprocess_val = open_clip.create_model_and_transforms('hf-hub:laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup')
tokenizer = open_clip.get_tokenizer('hf-hub:laion/CLIP-convnext_large_d_320.laion2B-s29B-b131K-ft-soup')
Ah, understood. Thanks!
thesofakillers
changed discussion status to
closed