Spaces:
Sleeping
Sleeping
Update
Browse files
app.py
CHANGED
@@ -85,7 +85,7 @@ def process_example(message: str) -> tuple[str, list[tuple[str, str]]]:
|
|
85 |
return '', x
|
86 |
|
87 |
|
88 |
-
def
|
89 |
input_token_length = get_input_token_length(message, chat_history, system_prompt)
|
90 |
if input_token_length > MAX_INPUT_TOKEN_LENGTH:
|
91 |
raise gr.Error(f'The accumulated input is too long ({input_token_length} > {MAX_INPUT_TOKEN_LENGTH}). Clear your chat history and try again.')
|
@@ -178,7 +178,7 @@ with gr.Blocks(css='style.css') as demo:
|
|
178 |
api_name=False,
|
179 |
queue=False,
|
180 |
).then(
|
181 |
-
fn=
|
182 |
inputs=[saved_input, chatbot, system_prompt],
|
183 |
api_name=False,
|
184 |
queue=False,
|
@@ -210,7 +210,7 @@ with gr.Blocks(css='style.css') as demo:
|
|
210 |
api_name=False,
|
211 |
queue=False,
|
212 |
).then(
|
213 |
-
fn=
|
214 |
inputs=[saved_input, chatbot, system_prompt],
|
215 |
api_name=False,
|
216 |
queue=False,
|
|
|
85 |
return '', x
|
86 |
|
87 |
|
88 |
+
def check_input_token_length(message: str, chat_history: list[tuple[str, str]], system_prompt: str) -> None:
|
89 |
input_token_length = get_input_token_length(message, chat_history, system_prompt)
|
90 |
if input_token_length > MAX_INPUT_TOKEN_LENGTH:
|
91 |
raise gr.Error(f'The accumulated input is too long ({input_token_length} > {MAX_INPUT_TOKEN_LENGTH}). Clear your chat history and try again.')
|
|
|
178 |
api_name=False,
|
179 |
queue=False,
|
180 |
).then(
|
181 |
+
fn=check_input_token_length,
|
182 |
inputs=[saved_input, chatbot, system_prompt],
|
183 |
api_name=False,
|
184 |
queue=False,
|
|
|
210 |
api_name=False,
|
211 |
queue=False,
|
212 |
).then(
|
213 |
+
fn=check_input_token_length,
|
214 |
inputs=[saved_input, chatbot, system_prompt],
|
215 |
api_name=False,
|
216 |
queue=False,
|