Spaces:
Runtime error
Runtime error
Allowed to max 256 tokens.
Browse files
app.py
CHANGED
@@ -85,7 +85,7 @@ def evaluate_stream(msg, history, temperature, top_p):
|
|
85 |
context = context.replace(r'<br>', '')
|
86 |
|
87 |
# TODO: Avoid the tokens are too long.
|
88 |
-
CUTOFF =
|
89 |
while len(tokenizer.encode(context)) > CUTOFF:
|
90 |
# save 15 token size for the answer
|
91 |
context = context[15:]
|
@@ -112,7 +112,7 @@ with gr.Blocks() as demo:
|
|
112 |
state = gr.State()
|
113 |
with gr.Row():
|
114 |
with gr.Column(scale=2):
|
115 |
-
temp = gr.components.Slider(minimum=0, maximum=1.1, value=
|
116 |
info="温度参数,越高的温度生成的内容越丰富,但是有可能出现语法问题。")
|
117 |
top_p = gr.components.Slider(minimum=0.5, maximum=1.0, value=0.975, label="Top-p",
|
118 |
info="top-p参数,只输出前p>top-p的文字,越大生成的内容越丰富,但也可能出现语法问题。数字越小似乎上下文的衔接性越好。")
|
@@ -131,4 +131,4 @@ with gr.Blocks() as demo:
|
|
131 |
gr.HTML(footer)
|
132 |
|
133 |
demo.queue()
|
134 |
-
demo.launch(debug=False)
|
|
|
85 |
context = context.replace(r'<br>', '')
|
86 |
|
87 |
# TODO: Avoid the tokens are too long.
|
88 |
+
CUTOFF = 256
|
89 |
while len(tokenizer.encode(context)) > CUTOFF:
|
90 |
# save 15 token size for the answer
|
91 |
context = context[15:]
|
|
|
112 |
state = gr.State()
|
113 |
with gr.Row():
|
114 |
with gr.Column(scale=2):
|
115 |
+
temp = gr.components.Slider(minimum=0, maximum=1.1, value=0.95, label="Temperature",
|
116 |
info="温度参数,越高的温度生成的内容越丰富,但是有可能出现语法问题。")
|
117 |
top_p = gr.components.Slider(minimum=0.5, maximum=1.0, value=0.975, label="Top-p",
|
118 |
info="top-p参数,只输出前p>top-p的文字,越大生成的内容越丰富,但也可能出现语法问题。数字越小似乎上下文的衔接性越好。")
|
|
|
131 |
gr.HTML(footer)
|
132 |
|
133 |
demo.queue()
|
134 |
+
demo.launch(debug=False)
|