codelion commited on
Commit
ca2a4e4
1 Parent(s): 3c6d9fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -29
app.py CHANGED
@@ -1,6 +1,8 @@
1
  import os
2
  import gradio as gr
 
3
  from openai import OpenAI
 
4
  from optillm.cot_reflection import cot_reflection
5
  from optillm.rto import round_trip_optimization
6
  from optillm.z3_solver import Z3SymPySolverSystem
@@ -9,6 +11,7 @@ from optillm.plansearch import plansearch
9
  from optillm.leap import leap
10
  from optillm.reread import re2_approach
11
 
 
12
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
13
 
14
  def compare_responses(message, model1, approach1, model2, approach2, system_message, max_tokens, temperature, top_p):
@@ -32,12 +35,11 @@ def parse_conversation(messages):
32
  initial_query = "\n".join(conversation)
33
  return system_prompt, initial_query
34
 
35
- def respond(message, history, model, approach, system_message, max_tokens, temperature, top_p, image=None):
36
  try:
37
  client = OpenAI(api_key=API_KEY, base_url="https://openrouter.ai/api/v1")
38
  messages = [{"role": "system", "content": system_message}]
39
 
40
- # Add history if available
41
  for val in history:
42
  if val[0]:
43
  messages.append({"role": "user", "content": val[0]})
@@ -47,29 +49,21 @@ def respond(message, history, model, approach, system_message, max_tokens, tempe
47
  messages.append({"role": "user", "content": message})
48
 
49
  if approach == "none":
50
- # Prepare the API request data
51
- data = {
52
- "model": model,
53
- "messages": messages,
54
- "max_tokens": max_tokens,
55
- "temperature": temperature,
56
- "top_p": top_p,
57
- }
58
- if image:
59
- data["image"] = image # Add image if provided
60
-
61
  response = client.chat.completions.create(
62
  extra_headers={
63
  "HTTP-Referer": "https://github.com/codelion/optillm",
64
  "X-Title": "optillm"
65
  },
66
- **data
 
 
 
 
67
  )
68
  return response.choices[0].message.content
69
  else:
70
  system_prompt, initial_query = parse_conversation(messages)
71
 
72
- # Handle different approaches
73
  if approach == 'rto':
74
  final_response, _ = round_trip_optimization(system_prompt, initial_query, client, model)
75
  elif approach == 'z3':
@@ -95,13 +89,13 @@ def respond(message, history, model, approach, system_message, max_tokens, tempe
95
 
96
  def create_model_dropdown():
97
  return gr.Dropdown(
98
- [ "meta-llama/llama-3.1-8b-instruct:free", "nousresearch/hermes-3-llama-3.1-405b:free", "meta-llama/llama-3.2-1b-instruct:free",
99
- "mistralai/mistral-7b-instruct:free", "mistralai/pixtral-12b:free", "meta-llama/llama-3.1-70b-instruct:free",
100
  "qwen/qwen-2-7b-instruct:free", "qwen/qwen-2-vl-7b-instruct:free", "google/gemma-2-9b-it:free", "liquid/lfm-40b:free", "meta-llama/llama-3.1-405b-instruct:free",
101
  "openchat/openchat-7b:free", "meta-llama/llama-3.2-90b-vision-instruct:free", "meta-llama/llama-3.2-11b-vision-instruct:free",
102
  "meta-llama/llama-3-8b-instruct:free", "meta-llama/llama-3.2-3b-instruct:free", "microsoft/phi-3-medium-128k-instruct:free",
103
  "microsoft/phi-3-mini-128k-instruct:free", "huggingfaceh4/zephyr-7b-beta:free"],
104
- value="meta-llama/llama-3.1-8b-instruct:free", label="Model"
105
  )
106
 
107
  def create_approach_dropdown():
@@ -129,25 +123,24 @@ with gr.Blocks() as demo:
129
  approach = create_approach_dropdown()
130
  chatbot = gr.Chatbot()
131
  msg = gr.Textbox()
132
- image = gr.Image(type="pil", label="Upload Image (optional)")
133
  with gr.Row():
134
  submit = gr.Button("Submit")
135
  clear = gr.Button("Clear")
136
 
137
- def user(user_message, history, uploaded_image):
138
- return "", history + [[user_message, None]], uploaded_image
139
 
140
- def bot(history, model, approach, system_message, max_tokens, temperature, top_p, uploaded_image):
141
  user_message = history[-1][0]
142
- bot_message = respond(user_message, history[:-1], model, approach, system_message, max_tokens, temperature, top_p, image=uploaded_image)
143
  history[-1][1] = bot_message
144
  return history
145
 
146
- msg.submit(user, [msg, chatbot, image], [msg, chatbot, image]).then(
147
- bot, [chatbot, model, approach, system_message, max_tokens, temperature, top_p, image], chatbot
148
  )
149
- submit.click(user, [msg, chatbot, image], [msg, chatbot, image]).then(
150
- bot, [chatbot, model, approach, system_message, max_tokens, temperature, top_p, image], chatbot
151
  )
152
  clear.click(lambda: None, None, chatbot, queue=False)
153
 
@@ -160,7 +153,6 @@ with gr.Blocks() as demo:
160
 
161
  compare_input = gr.Textbox(label="Enter your message for comparison")
162
  compare_button = gr.Button("Compare")
163
- compare_image = gr.Image(type="pil", label="Upload Image for Comparison")
164
 
165
  with gr.Row():
166
  output1 = gr.Textbox(label="Response 1")
@@ -173,4 +165,4 @@ with gr.Blocks() as demo:
173
  )
174
 
175
  if __name__ == "__main__":
176
- demo.launch()
 
1
  import os
2
  import gradio as gr
3
+
4
  from openai import OpenAI
5
+
6
  from optillm.cot_reflection import cot_reflection
7
  from optillm.rto import round_trip_optimization
8
  from optillm.z3_solver import Z3SymPySolverSystem
 
11
  from optillm.leap import leap
12
  from optillm.reread import re2_approach
13
 
14
+
15
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
16
 
17
  def compare_responses(message, model1, approach1, model2, approach2, system_message, max_tokens, temperature, top_p):
 
35
  initial_query = "\n".join(conversation)
36
  return system_prompt, initial_query
37
 
38
+ def respond(message, history, model, approach, system_message, max_tokens, temperature, top_p):
39
  try:
40
  client = OpenAI(api_key=API_KEY, base_url="https://openrouter.ai/api/v1")
41
  messages = [{"role": "system", "content": system_message}]
42
 
 
43
  for val in history:
44
  if val[0]:
45
  messages.append({"role": "user", "content": val[0]})
 
49
  messages.append({"role": "user", "content": message})
50
 
51
  if approach == "none":
 
 
 
 
 
 
 
 
 
 
 
52
  response = client.chat.completions.create(
53
  extra_headers={
54
  "HTTP-Referer": "https://github.com/codelion/optillm",
55
  "X-Title": "optillm"
56
  },
57
+ model=model,
58
+ messages=messages,
59
+ max_tokens=max_tokens,
60
+ temperature=temperature,
61
+ top_p=top_p,
62
  )
63
  return response.choices[0].message.content
64
  else:
65
  system_prompt, initial_query = parse_conversation(messages)
66
 
 
67
  if approach == 'rto':
68
  final_response, _ = round_trip_optimization(system_prompt, initial_query, client, model)
69
  elif approach == 'z3':
 
89
 
90
  def create_model_dropdown():
91
  return gr.Dropdown(
92
+ [ "meta-llama/llama-3.1-8b-instruct:free", "nousresearch/hermes-3-llama-3.1-405b:free","meta-llama/llama-3.2-1b-instruct:free",
93
+ "mistralai/mistral-7b-instruct:free","mistralai/pixtral-12b:free","meta-llama/llama-3.1-70b-instruct:free",
94
  "qwen/qwen-2-7b-instruct:free", "qwen/qwen-2-vl-7b-instruct:free", "google/gemma-2-9b-it:free", "liquid/lfm-40b:free", "meta-llama/llama-3.1-405b-instruct:free",
95
  "openchat/openchat-7b:free", "meta-llama/llama-3.2-90b-vision-instruct:free", "meta-llama/llama-3.2-11b-vision-instruct:free",
96
  "meta-llama/llama-3-8b-instruct:free", "meta-llama/llama-3.2-3b-instruct:free", "microsoft/phi-3-medium-128k-instruct:free",
97
  "microsoft/phi-3-mini-128k-instruct:free", "huggingfaceh4/zephyr-7b-beta:free"],
98
+ value="nousresearch/hermes-3-llama-3.1-405b:free", label="Model"
99
  )
100
 
101
  def create_approach_dropdown():
 
123
  approach = create_approach_dropdown()
124
  chatbot = gr.Chatbot()
125
  msg = gr.Textbox()
 
126
  with gr.Row():
127
  submit = gr.Button("Submit")
128
  clear = gr.Button("Clear")
129
 
130
+ def user(user_message, history):
131
+ return "", history + [[user_message, None]]
132
 
133
+ def bot(history, model, approach, system_message, max_tokens, temperature, top_p):
134
  user_message = history[-1][0]
135
+ bot_message = respond(user_message, history[:-1], model, approach, system_message, max_tokens, temperature, top_p)
136
  history[-1][1] = bot_message
137
  return history
138
 
139
+ msg.submit(user, [msg, chatbot], [msg, chatbot]).then(
140
+ bot, [chatbot, model, approach, system_message, max_tokens, temperature, top_p], chatbot
141
  )
142
+ submit.click(user, [msg, chatbot], [msg, chatbot]).then(
143
+ bot, [chatbot, model, approach, system_message, max_tokens, temperature, top_p], chatbot
144
  )
145
  clear.click(lambda: None, None, chatbot, queue=False)
146
 
 
153
 
154
  compare_input = gr.Textbox(label="Enter your message for comparison")
155
  compare_button = gr.Button("Compare")
 
156
 
157
  with gr.Row():
158
  output1 = gr.Textbox(label="Response 1")
 
165
  )
166
 
167
  if __name__ == "__main__":
168
+ demo.launch()