vansin commited on
Commit
009d48a
·
1 Parent(s): 744538e

feat: update

Browse files
Files changed (1) hide show
  1. app.py +158 -13
app.py CHANGED
@@ -1,28 +1,173 @@
1
- import os
 
2
  import subprocess
3
 
4
- # os.system("python -m mindsearch.app --lang en --model_format internlm_server")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
 
 
8
 
9
- from flask import Flask, send_from_directory
 
 
 
 
 
 
 
 
 
 
 
 
 
10
 
 
 
 
11
 
12
- app = Flask(__name__, static_folder='dist')
 
 
 
 
 
 
 
13
 
14
- @app.route('/')
15
- def serve_index():
16
- return send_from_directory(app.static_folder, 'index.html')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
- @app.route('/<path:path>')
19
- def serve_file(path):
20
- return send_from_directory(app.static_folder, path)
21
 
22
- if __name__ == '__main__':
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
 
24
- subprocess.Popen(["python", "-m", "mindsearch.app", "--lang", "en", "--model_format", "internlm_server"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
 
25
 
26
- app.run(debug=False, port=7860, host="0.0.0.0")
 
 
 
 
27
 
28
 
 
 
 
 
 
 
 
1
+ # import os
2
+
3
  import subprocess
4
 
5
+ # # os.system("python -m mindsearch.app --lang en --model_format internlm_server")
6
+
7
+
8
+
9
+
10
+ # from flask import Flask, send_from_directory
11
+
12
+
13
+ # app = Flask(__name__, static_folder='dist')
14
+
15
+ # @app.route('/')
16
+ # def serve_index():
17
+ # return send_from_directory(app.static_folder, 'index.html')
18
+
19
+ # @app.route('/<path:path>')
20
+ # def serve_file(path):
21
+ # return send_from_directory(app.static_folder, path)
22
+
23
+ # if __name__ == '__main__':
24
+
25
+ # subprocess.Popen(["python", "-m", "mindsearch.app", "--lang", "en", "--model_format", "internlm_server"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
26
+
27
+ # app.run(debug=False, port=7860, host="0.0.0.0")
28
+
29
+
30
+ import json
31
+
32
+ import gradio as gr
33
+ import requests
34
+ from lagent.schema import AgentStatusCode
35
+
36
+ PLANNER_HISTORY = []
37
+ SEARCHER_HISTORY = []
38
+
39
+
40
+ def rst_mem(history_planner: list, history_searcher: list):
41
+ '''
42
+ Reset the chatbot memory.
43
+ '''
44
+ history_planner = []
45
+ history_searcher = []
46
+ if PLANNER_HISTORY:
47
+ PLANNER_HISTORY.clear()
48
+ return history_planner, history_searcher
49
+
50
 
51
+ def format_response(gr_history, agent_return):
52
+ if agent_return['state'] in [
53
+ AgentStatusCode.STREAM_ING, AgentStatusCode.ANSWER_ING
54
+ ]:
55
+ gr_history[-1][1] = agent_return['response']
56
+ elif agent_return['state'] == AgentStatusCode.PLUGIN_START:
57
+ thought = gr_history[-1][1].split('```')[0]
58
+ if agent_return['response'].startswith('```'):
59
+ gr_history[-1][1] = thought + '\n' + agent_return['response']
60
+ elif agent_return['state'] == AgentStatusCode.PLUGIN_END:
61
+ thought = gr_history[-1][1].split('```')[0]
62
+ if isinstance(agent_return['response'], dict):
63
+ gr_history[-1][
64
+ 1] = thought + '\n' + f'```json\n{json.dumps(agent_return["response"], ensure_ascii=False, indent=4)}\n```' # noqa: E501
65
+ elif agent_return['state'] == AgentStatusCode.PLUGIN_RETURN:
66
+ assert agent_return['inner_steps'][-1]['role'] == 'environment'
67
+ item = agent_return['inner_steps'][-1]
68
+ gr_history.append([
69
+ None,
70
+ f"```json\n{json.dumps(item['content'], ensure_ascii=False, indent=4)}\n```"
71
+ ])
72
+ gr_history.append([None, ''])
73
+ return
74
 
75
 
76
+ def predict(history_planner, history_searcher):
77
 
78
+ def streaming(raw_response):
79
+ for chunk in raw_response.iter_lines(chunk_size=8192,
80
+ decode_unicode=False,
81
+ delimiter=b'\n'):
82
+ if chunk:
83
+ decoded = chunk.decode('utf-8')
84
+ if decoded == '\r':
85
+ continue
86
+ if decoded[:6] == 'data: ':
87
+ decoded = decoded[6:]
88
+ elif decoded.startswith(': ping - '):
89
+ continue
90
+ response = json.loads(decoded)
91
+ yield (response['response'], response['current_node'])
92
 
93
+ global PLANNER_HISTORY
94
+ PLANNER_HISTORY.append(dict(role='user', content=history_planner[-1][0]))
95
+ new_search_turn = True
96
 
97
+ url = 'http://localhost:8002/solve'
98
+ headers = {'Content-Type': 'application/json'}
99
+ data = {'inputs': PLANNER_HISTORY}
100
+ raw_response = requests.post(url,
101
+ headers=headers,
102
+ data=json.dumps(data),
103
+ timeout=20,
104
+ stream=True)
105
 
106
+ for resp in streaming(raw_response):
107
+ agent_return, node_name = resp
108
+ if node_name:
109
+ if node_name in ['root', 'response']:
110
+ continue
111
+ agent_return = agent_return['nodes'][node_name]['detail']
112
+ if new_search_turn:
113
+ history_searcher.append([agent_return['content'], ''])
114
+ new_search_turn = False
115
+ format_response(history_searcher, agent_return)
116
+ if agent_return['state'] == AgentStatusCode.END:
117
+ new_search_turn = True
118
+ yield history_planner, history_searcher
119
+ else:
120
+ new_search_turn = True
121
+ format_response(history_planner, agent_return)
122
+ if agent_return['state'] == AgentStatusCode.END:
123
+ PLANNER_HISTORY = agent_return['inner_steps']
124
+ yield history_planner, history_searcher
125
+ return history_planner, history_searcher
126
 
 
 
 
127
 
128
+ with gr.Blocks() as demo:
129
+ gr.HTML("""<h1 align="center">WebAgent Gradio Simple Demo</h1>""")
130
+ with gr.Row():
131
+ with gr.Column(scale=10):
132
+ with gr.Row():
133
+ with gr.Column():
134
+ planner = gr.Chatbot(label='planner',
135
+ height=700,
136
+ show_label=True,
137
+ show_copy_button=True,
138
+ bubble_full_width=False,
139
+ render_markdown=True)
140
+ with gr.Column():
141
+ searcher = gr.Chatbot(label='searcher',
142
+ height=700,
143
+ show_label=True,
144
+ show_copy_button=True,
145
+ bubble_full_width=False,
146
+ render_markdown=True)
147
+ with gr.Row():
148
+ user_input = gr.Textbox(show_label=False,
149
+ placeholder='inputs...',
150
+ lines=5,
151
+ container=False)
152
+ with gr.Row():
153
+ with gr.Column(scale=2):
154
+ submitBtn = gr.Button('Submit')
155
+ with gr.Column(scale=1, min_width=20):
156
+ emptyBtn = gr.Button('Clear History')
157
 
158
+ def user(query, history):
159
+ return '', history + [[query, '']]
160
 
161
+ submitBtn.click(user, [user_input, planner], [user_input, planner],
162
+ queue=False).then(predict, [planner, searcher],
163
+ [planner, searcher])
164
+ emptyBtn.click(rst_mem, [planner, searcher], [planner, searcher],
165
+ queue=False)
166
 
167
 
168
+ subprocess.Popen(["python", "-m", "mindsearch.app", "--lang", "en", "--model_format", "internlm_server"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
169
+ demo.queue()
170
+ demo.launch(server_name='0.0.0.0',
171
+ server_port=7860,
172
+ inbrowser=True,
173
+ share=True)