Omnibus commited on
Commit
db203c4
·
verified ·
1 Parent(s): 4462aec

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -3
app.py CHANGED
@@ -74,7 +74,7 @@ def chat_inf(system_prompt,prompt,history,memory,client_choice,seed,temp,tokens,
74
  print("\n######### HIST "+str(in_len))
75
  print("\n######### TOKENS "+str(tokens))
76
  if (in_len+tokens) > 8000:
77
- history.append((prompt,"Wait, that's too many tokens, please reduce the Chat Memory value"))
78
  yield history,memory
79
  #hist=compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem)
80
  #yield [(prompt,"History has been compressed, processing request...")]
@@ -145,11 +145,11 @@ with gr.Blocks() as app:
145
  with gr.Group():
146
  rand = gr.Checkbox(label="Random Seed", value=True)
147
  seed=gr.Slider(label="Seed", minimum=1, maximum=1111111111111111,step=1, value=rand_val)
148
- tokens = gr.Slider(label="Max new tokens",value=3840,minimum=0,maximum=8000,step=64,interactive=True, visible=True,info="The maximum number of tokens")
149
  temp=gr.Slider(label="Temperature",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
150
  top_p=gr.Slider(label="Top-P",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
151
  rep_p=gr.Slider(label="Repetition Penalty",step=0.1, minimum=0.1, maximum=2.0, value=1.0)
152
- chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=5)
153
  with gr.Accordion(label="Screenshot",open=False):
154
  with gr.Row():
155
  with gr.Column(scale=3):
 
74
  print("\n######### HIST "+str(in_len))
75
  print("\n######### TOKENS "+str(tokens))
76
  if (in_len+tokens) > 8000:
77
+ history.append((prompt,"Wait, that's too many tokens, please reduce the 'Chat Memory' value, or reduce the 'Max new tokens' value"))
78
  yield history,memory
79
  #hist=compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem)
80
  #yield [(prompt,"History has been compressed, processing request...")]
 
145
  with gr.Group():
146
  rand = gr.Checkbox(label="Random Seed", value=True)
147
  seed=gr.Slider(label="Seed", minimum=1, maximum=1111111111111111,step=1, value=rand_val)
148
+ tokens = gr.Slider(label="Max new tokens",value=1600,minimum=0,maximum=8000,step=64,interactive=True, visible=True,info="The maximum number of tokens")
149
  temp=gr.Slider(label="Temperature",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
150
  top_p=gr.Slider(label="Top-P",step=0.01, minimum=0.01, maximum=1.0, value=0.9)
151
  rep_p=gr.Slider(label="Repetition Penalty",step=0.1, minimum=0.1, maximum=2.0, value=1.0)
152
+ chat_mem=gr.Number(label="Chat Memory", info="Number of previous chats to retain",value=4)
153
  with gr.Accordion(label="Screenshot",open=False):
154
  with gr.Row():
155
  with gr.Column(scale=3):