runtime error

Exit code: 1. Reason: ( File "/usr/local/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 703, in generate_prompt return self.generate(prompt_strings, stop=stop, callbacks=callbacks, **kwargs) File "/usr/local/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 882, in generate output = self._generate_helper( File "/usr/local/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 740, in _generate_helper raise e File "/usr/local/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 727, in _generate_helper self._generate( File "/usr/local/lib/python3.10/site-packages/langchain_core/language_models/llms.py", line 1431, in _generate self._call(prompt, stop=stop, run_manager=run_manager, **kwargs) File "/usr/local/lib/python3.10/site-packages/langchain_huggingface/llms/huggingface_endpoint.py", line 251, in _call for chunk in self._stream(prompt, stop, run_manager, **invocation_params): File "/usr/local/lib/python3.10/site-packages/langchain_huggingface/llms/huggingface_endpoint.py", line 312, in _stream for response in self.client.text_generation( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_client.py", line 2173, in text_generation raise_text_generation_error(e) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_common.py", line 422, in raise_text_generation_error raise http_error File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_client.py", line 2143, in text_generation bytes_output = self.post(json=payload, model=model, task="text-generation", stream=stream) # type: ignore File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_client.py", line 314, in post raise InferenceTimeoutError( huggingface_hub.errors.InferenceTimeoutError: Model not loaded on the server: https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.2. Please retry with a higher timeout (current: 120).

Container logs:

Fetching error logs...