Spaces:
Sleeping
Sleeping
历史上的今天,带图片
Browse files- crazy_functions/高级功能函数模板.py +3 -3
- predict.py +2 -1
crazy_functions/高级功能函数模板.py
CHANGED
@@ -5,15 +5,15 @@ import datetime
|
|
5 |
@CatchException
|
6 |
def 高阶功能模板函数(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
7 |
history = [] # 清空历史,以免输入溢出
|
8 |
-
for i in range(
|
9 |
currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month
|
10 |
currentDay = (datetime.date.today() + datetime.timedelta(days=i)).day
|
11 |
-
i_say = f'
|
12 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
13 |
yield chatbot, history, '正常' # 由于请求gpt需要一段时间,我们先及时地做一次状态显示
|
14 |
|
15 |
# history = [] 每次询问不携带之前的询问历史
|
16 |
-
gpt_say = predict_no_ui_long_connection(inputs=i_say, top_p=top_p, temperature=temperature, history=[], sys_prompt="
|
17 |
|
18 |
chatbot[-1] = (i_say, gpt_say)
|
19 |
history.append(i_say);history.append(gpt_say)
|
|
|
5 |
@CatchException
|
6 |
def 高阶功能模板函数(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
7 |
history = [] # 清空历史,以免输入溢出
|
8 |
+
for i in range(5):
|
9 |
currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month
|
10 |
currentDay = (datetime.date.today() + datetime.timedelta(days=i)).day
|
11 |
+
i_say = f'历史中哪些事件发生在{currentMonth}月{currentDay}日?列举两条并发送相关图片。'
|
12 |
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
13 |
yield chatbot, history, '正常' # 由于请求gpt需要一段时间,我们先及时地做一次状态显示
|
14 |
|
15 |
# history = [] 每次询问不携带之前的询问历史
|
16 |
+
gpt_say = predict_no_ui_long_connection(inputs=i_say, top_p=top_p, temperature=temperature, history=[], sys_prompt="当你想发送一张照片时,请使用 Markdown ,并且 不要有反斜线, 不要用代码块。使用 Unsplash API (https://source.unsplash.com/1280x720/? < PUT YOUR QUERY HERE >)。") # 请求gpt,需要一段时间
|
17 |
|
18 |
chatbot[-1] = (i_say, gpt_say)
|
19 |
history.append(i_say);history.append(gpt_say)
|
predict.py
CHANGED
@@ -83,7 +83,8 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr
|
|
83 |
except StopIteration: break
|
84 |
if len(chunk)==0: continue
|
85 |
if not chunk.startswith('data:'):
|
86 |
-
|
|
|
87 |
delta = json.loads(chunk.lstrip('data:'))['choices'][0]["delta"]
|
88 |
if len(delta) == 0: break
|
89 |
if "role" in delta: continue
|
|
|
83 |
except StopIteration: break
|
84 |
if len(chunk)==0: continue
|
85 |
if not chunk.startswith('data:'):
|
86 |
+
chunk = get_full_error(chunk.encode('utf8'), stream_response)
|
87 |
+
raise ConnectionAbortedError("OpenAI拒绝了请求:" + chunk.decode())
|
88 |
delta = json.loads(chunk.lstrip('data:'))['choices'][0]["delta"]
|
89 |
if len(delta) == 0: break
|
90 |
if "role" in delta: continue
|