barunsaha commited on
Commit
8953d62
1 Parent(s): c9f4415

Fix #44: show helpful error message when the token count exceeds

Browse files
Files changed (2) hide show
  1. app.py +17 -0
  2. global_config.py +1 -1
app.py CHANGED
@@ -9,6 +9,7 @@ import sys
9
  import tempfile
10
  from typing import List, Union
11
 
 
12
  import json5
13
  import requests
14
  import streamlit as st
@@ -217,6 +218,22 @@ def set_up_chat_ui():
217
  logger.error(msg)
218
  st.error(msg)
219
  return
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
220
 
221
  history.add_user_message(prompt)
222
  history.add_ai_message(response)
 
9
  import tempfile
10
  from typing import List, Union
11
 
12
+ import huggingface_hub
13
  import json5
14
  import requests
15
  import streamlit as st
 
218
  logger.error(msg)
219
  st.error(msg)
220
  return
221
+ except huggingface_hub.errors.ValidationError as ve:
222
+ msg = (
223
+ f'An error occurred while trying to generate the content: {ve}'
224
+ '\nPlease try again with a significantly shorter input text.'
225
+ )
226
+ logger.error(msg)
227
+ st.error(msg)
228
+ return
229
+ except Exception as ex:
230
+ msg = (
231
+ f'An unexpected error occurred while generating the content: {ex}'
232
+ '\nPlease try again later, possibly with different inputs.'
233
+ )
234
+ logger.error(msg)
235
+ st.error(msg)
236
+ return
237
 
238
  history.add_user_message(prompt)
239
  history.add_ai_message(response)
global_config.py CHANGED
@@ -21,7 +21,7 @@ class GlobalConfig:
21
  LLM_MODEL_TEMPERATURE = 0.2
22
  LLM_MODEL_MIN_OUTPUT_LENGTH = 100
23
  LLM_MODEL_MAX_OUTPUT_LENGTH = 4 * 4096 # tokens
24
- LLM_MODEL_MAX_INPUT_LENGTH = 750 # characters
25
 
26
  HUGGINGFACEHUB_API_TOKEN = os.environ.get('HUGGINGFACEHUB_API_TOKEN', '')
27
 
 
21
  LLM_MODEL_TEMPERATURE = 0.2
22
  LLM_MODEL_MIN_OUTPUT_LENGTH = 100
23
  LLM_MODEL_MAX_OUTPUT_LENGTH = 4 * 4096 # tokens
24
+ LLM_MODEL_MAX_INPUT_LENGTH = 400 # characters
25
 
26
  HUGGINGFACEHUB_API_TOKEN = os.environ.get('HUGGINGFACEHUB_API_TOKEN', '')
27