runtime error

Exit code: 1. Reason: ponse = await f(*args, **kwargs) File "/usr/local/lib/python3.10/site-packages/gradio/helpers.py", line 458, in get_final_item async for x in self.fn(*args): # noqa: B007 # type: ignore File "/usr/local/lib/python3.10/site-packages/gradio/chat_interface.py", line 633, in _examples_stream_fn async for response in generator: File "/usr/local/lib/python3.10/site-packages/gradio/utils.py", line 649, in __anext__ return await anyio.to_thread.run_sync( File "/usr/local/lib/python3.10/site-packages/anyio/to_thread.py", line 56, in run_sync return await get_async_backend().run_sync_in_worker_thread( File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 2177, in run_sync_in_worker_thread return await future File "/usr/local/lib/python3.10/site-packages/anyio/_backends/_asyncio.py", line 859, in run result = context.run(func, *args) File "/usr/local/lib/python3.10/site-packages/gradio/utils.py", line 632, in run_sync_iterator_async return next(iterator) File "/home/user/app/app.py", line 78, in respond for message in client.chat_completion( File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_common.py", line 356, in _stream_chat_completion_response_from_bytes output = _format_chat_completion_stream_output_from_text_generation_from_bytes(item) File "/usr/local/lib/python3.10/site-packages/huggingface_hub/inference/_common.py", line 379, in _format_chat_completion_stream_output_from_text_generation_from_bytes json_payload = json.loads(payload.lstrip("data:").rstrip("/n")) File "/usr/local/lib/python3.10/json/__init__.py", line 346, in loads return _default_decoder.decode(s) File "/usr/local/lib/python3.10/json/decoder.py", line 337, in decode obj, end = self.raw_decode(s, idx=_w(s, 0).end()) File "/usr/local/lib/python3.10/json/decoder.py", line 355, in raw_decode raise JSONDecodeError("Expecting value", s, err.value) from None json.decoder.JSONDecodeError: Expecting value: line 1 column 3 (char 2)

Container logs:

Fetching error logs...