runtime error
force_download, local_files_only) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/file_download.py", line 1826, in _raise_on_head_call_error raise LocalEntryNotFoundError( huggingface_hub.utils._errors.LocalEntryNotFoundError: An error happened while trying to locate the file on the Hub and we cannot find the requested files in the local cache. Please check your connection and try again or make sure your Internet connection is on. The above exception was the direct cause of the following exception: Traceback (most recent call last): File "/home/user/app/app.py", line 9, in <module> from backend.query_llm import generate File "/home/user/app/backend/query_llm.py", line 9, in <module> tokenizer = AutoTokenizer.from_pretrained(MODEL) File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/tokenization_auto.py", line 794, in from_pretrained config = AutoConfig.from_pretrained( File "/usr/local/lib/python3.10/site-packages/transformers/models/auto/configuration_auto.py", line 1138, in from_pretrained config_dict, unused_kwargs = PretrainedConfig.get_config_dict(pretrained_model_name_or_path, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/configuration_utils.py", line 631, in get_config_dict config_dict, kwargs = cls._get_config_dict(pretrained_model_name_or_path, **kwargs) File "/usr/local/lib/python3.10/site-packages/transformers/configuration_utils.py", line 686, in _get_config_dict resolved_config_file = cached_file( File "/usr/local/lib/python3.10/site-packages/transformers/utils/hub.py", line 441, in cached_file raise EnvironmentError( OSError: We couldn't connect to 'https://huggingface.co' to load this file, couldn't find it in the cached files and it looks like CohereForAI/c4ai-command-r-plus is not the path to a directory containing a file named config.json. Checkout your internet connection or see how to run the library in offline mode at 'https://huggingface.co/docs/transformers/installation#offline-mode'.
Container logs:
Fetching error logs...