Spaces:
Running
Running
suhyun.kang
commited on
Commit
•
c73f9e9
1
Parent(s):
1371afd
comments
Browse files
app.py
CHANGED
@@ -12,13 +12,13 @@ import gradio as gr
|
|
12 |
SUPPORTED_MODELS = ["gpt-4", "gpt-4-turbo", "gpt-3.5-turbo", "gemini-pro"]
|
13 |
|
14 |
|
15 |
-
def user(
|
16 |
model_pair = sample(SUPPORTED_MODELS, 2)
|
17 |
new_state_a = gradio_web_server.State(model_pair[0])
|
18 |
new_state_b = gradio_web_server.State(model_pair[1])
|
19 |
|
20 |
for state in [new_state_a, new_state_b]:
|
21 |
-
state.conv.append_message(state.conv.roles[0],
|
22 |
state.conv.append_message(state.conv.roles[1], None)
|
23 |
state.skip_next = False
|
24 |
|
@@ -34,7 +34,7 @@ def bot(state_a, state_b, request: gr.Request):
|
|
34 |
for state in new_states:
|
35 |
try:
|
36 |
# TODO(#1): Allow user to set configuration.
|
37 |
-
# bot_response returns a generator yielding states
|
38 |
generator = bot_response(state,
|
39 |
temperature=0.9,
|
40 |
top_p=0.9,
|
@@ -55,12 +55,21 @@ def bot(state_a, state_b, request: gr.Request):
|
|
55 |
|
56 |
for i in range(2):
|
57 |
try:
|
58 |
-
|
59 |
-
|
|
|
|
|
60 |
new_states[i] = new_state
|
61 |
-
|
62 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
stop = False
|
|
|
64 |
except StopIteration:
|
65 |
pass
|
66 |
|
|
|
12 |
SUPPORTED_MODELS = ["gpt-4", "gpt-4-turbo", "gpt-3.5-turbo", "gemini-pro"]
|
13 |
|
14 |
|
15 |
+
def user(user_prompt):
|
16 |
model_pair = sample(SUPPORTED_MODELS, 2)
|
17 |
new_state_a = gradio_web_server.State(model_pair[0])
|
18 |
new_state_b = gradio_web_server.State(model_pair[1])
|
19 |
|
20 |
for state in [new_state_a, new_state_b]:
|
21 |
+
state.conv.append_message(state.conv.roles[0], user_prompt)
|
22 |
state.conv.append_message(state.conv.roles[1], None)
|
23 |
state.skip_next = False
|
24 |
|
|
|
34 |
for state in new_states:
|
35 |
try:
|
36 |
# TODO(#1): Allow user to set configuration.
|
37 |
+
# bot_response returns a generator yielding states.
|
38 |
generator = bot_response(state,
|
39 |
temperature=0.9,
|
40 |
top_p=0.9,
|
|
|
55 |
|
56 |
for i in range(2):
|
57 |
try:
|
58 |
+
yielded = next(generators[i])
|
59 |
+
|
60 |
+
# The generator yields a tuple, with the new state as the first item.
|
61 |
+
new_state = yielded[0]
|
62 |
new_states[i] = new_state
|
63 |
+
|
64 |
+
# The last item from 'messages' represents the response to the prompt.
|
65 |
+
bot_message = new_state.conv.messages[-1]
|
66 |
+
|
67 |
+
# Each message in conv.messages is structured as [role, message],
|
68 |
+
# so we extract the last message component.
|
69 |
+
new_responses[i] = bot_message[-1]
|
70 |
+
|
71 |
stop = False
|
72 |
+
|
73 |
except StopIteration:
|
74 |
pass
|
75 |
|