Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -36,46 +36,56 @@ def parse_conversation(messages):
|
|
36 |
return system_prompt, initial_query
|
37 |
|
38 |
def respond(message, history, model, approach, system_message, max_tokens, temperature, top_p):
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
if val[0]: messages.append({"role": "user", "content": val[0]})
|
43 |
-
if val[1]: messages.append({"role": "assistant", "content": val[1]})
|
44 |
-
messages.append({"role": "user", "content": message})
|
45 |
-
|
46 |
-
if approach == "none":
|
47 |
-
response = client.chat.completions.create(
|
48 |
-
extra_headers={
|
49 |
-
"HTTP-Referer": "https://github.com/codelion/optillm",
|
50 |
-
"X-Title": "optillm"
|
51 |
-
},
|
52 |
-
model=model,
|
53 |
-
messages=messages,
|
54 |
-
max_tokens=max_tokens,
|
55 |
-
temperature=temperature,
|
56 |
-
top_p=top_p,
|
57 |
-
)
|
58 |
-
return response.choices[0].message.content
|
59 |
-
else:
|
60 |
-
system_prompt, initial_query = parse_conversation(messages)
|
61 |
-
if approach == 'rto':
|
62 |
-
final_response, _ = round_trip_optimization(system_prompt, initial_query, client, model)
|
63 |
-
elif approach == 'z3':
|
64 |
-
z3_solver = Z3SymPySolverSystem(system_prompt, client, model)
|
65 |
-
final_response, _ = z3_solver.process_query(initial_query)
|
66 |
-
elif approach == "self_consistency":
|
67 |
-
final_response, _ = advanced_self_consistency_approach(system_prompt, initial_query, client, model)
|
68 |
-
elif approach == "cot_reflection":
|
69 |
-
final_response, _ = cot_reflection(system_prompt, initial_query, client, model)
|
70 |
-
elif approach == 'plansearch':
|
71 |
-
response, _ = plansearch(system_prompt, initial_query, client, model)
|
72 |
-
final_response = response[0]
|
73 |
-
elif approach == 'leap':
|
74 |
-
final_response, _ = leap(system_prompt, initial_query, client, model)
|
75 |
-
elif approach == 're2':
|
76 |
-
final_response, _ = re2_approach(system_prompt, initial_query, client, model)
|
77 |
|
78 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
79 |
|
80 |
# for message in client.chat_completion(
|
81 |
# messages,
|
|
|
36 |
return system_prompt, initial_query
|
37 |
|
38 |
def respond(message, history, model, approach, system_message, max_tokens, temperature, top_p):
|
39 |
+
try:
|
40 |
+
client = OpenAI(api_key=API_KEY, base_url="https://openrouter.ai/api/v1")
|
41 |
+
messages = [{"role": "system", "content": system_message}]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
|
43 |
+
for val in history:
|
44 |
+
if val[0]:
|
45 |
+
messages.append({"role": "user", "content": val[0]})
|
46 |
+
if val[1]:
|
47 |
+
messages.append({"role": "assistant", "content": val[1]})
|
48 |
+
|
49 |
+
messages.append({"role": "user", "content": message})
|
50 |
+
|
51 |
+
if approach == "none":
|
52 |
+
response = client.chat.completions.create(
|
53 |
+
extra_headers={
|
54 |
+
"HTTP-Referer": "https://github.com/codelion/optillm",
|
55 |
+
"X-Title": "optillm"
|
56 |
+
},
|
57 |
+
model=model,
|
58 |
+
messages=messages,
|
59 |
+
max_tokens=max_tokens,
|
60 |
+
temperature=temperature,
|
61 |
+
top_p=top_p,
|
62 |
+
)
|
63 |
+
return response.choices[0].message.content
|
64 |
+
else:
|
65 |
+
system_prompt, initial_query = parse_conversation(messages)
|
66 |
+
|
67 |
+
if approach == 'rto':
|
68 |
+
final_response, _ = round_trip_optimization(system_prompt, initial_query, client, model)
|
69 |
+
elif approach == 'z3':
|
70 |
+
z3_solver = Z3SymPySolverSystem(system_prompt, client, model)
|
71 |
+
final_response, _ = z3_solver.process_query(initial_query)
|
72 |
+
elif approach == "self_consistency":
|
73 |
+
final_response, _ = advanced_self_consistency_approach(system_prompt, initial_query, client, model)
|
74 |
+
elif approach == "cot_reflection":
|
75 |
+
final_response, _ = cot_reflection(system_prompt, initial_query, client, model)
|
76 |
+
elif approach == 'plansearch':
|
77 |
+
response, _ = plansearch(system_prompt, initial_query, client, model)
|
78 |
+
final_response = response[0]
|
79 |
+
elif approach == 'leap':
|
80 |
+
final_response, _ = leap(system_prompt, initial_query, client, model)
|
81 |
+
elif approach == 're2':
|
82 |
+
final_response, _ = re2_approach(system_prompt, initial_query, client, model)
|
83 |
+
|
84 |
+
return final_response
|
85 |
+
|
86 |
+
except Exception as e:
|
87 |
+
error_message = f"Error in respond function: {str(e)}\nType: {type(e).__name__}"
|
88 |
+
print(error_message)
|
89 |
|
90 |
# for message in client.chat_completion(
|
91 |
# messages,
|