Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -55,7 +55,7 @@ def predict(
|
|
55 |
|
56 |
client = chromadb.PersistentClient(path="./manifesto-database")
|
57 |
manifesto_collection = client.get_or_create_collection(name="manifesto-database", embedding_function=multilingual_embeddings)
|
58 |
-
retrieved_context = manifesto_collection.query(query_texts=[
|
59 |
contexts = [context for context in retrieved_context['documents']]
|
60 |
rag_template = f"\nHier sind Kontextinformationen:\n" + "\n".join([f"{context}" for context in contexts])
|
61 |
|
@@ -100,7 +100,7 @@ def predict(
|
|
100 |
temperature=temperature,
|
101 |
max_tokens=1000).choices[0].message.content
|
102 |
|
103 |
-
return response1, response2
|
104 |
|
105 |
def update_political_statement_options(test_type):
|
106 |
# Append an index starting from 1 before each statement
|
@@ -192,6 +192,10 @@ def main():
|
|
192 |
output1 = gr.Textbox(label="Model 1 Response")
|
193 |
output2 = gr.Textbox(label="Model 2 Response")
|
194 |
|
|
|
|
|
|
|
|
|
195 |
with gr.Tab("Settings"):
|
196 |
with gr.Row():
|
197 |
openai_api_key = gr.Textbox(label="OpenAI API Key", placeholder="Enter your OpenAI API key here", show_label=True, type="password")
|
@@ -199,14 +203,18 @@ def main():
|
|
199 |
|
200 |
with gr.Row():
|
201 |
temp_input = gr.Slider(minimum=0, maximum=1, step=0.01, label="Temperature", value=0.7)
|
|
|
|
|
202 |
top_p_input = gr.Slider(minimum=0, maximum=1, step=0.01, label="Top P", value=1)
|
|
|
|
|
203 |
num_contexts = gr.Slider(minimum=0, maximum=1, step=0.01, label="Top k retrieved contexts", value=3)
|
204 |
|
205 |
# Link settings to the predict function
|
206 |
submit_btn.click(
|
207 |
fn=predict,
|
208 |
inputs=[openai_api_key, togetherai_api_key, model_selector1, model_selector2, prompt_manipulation, direct_steering_option, ideology_test, political_statement, temp_input, top_p_input, num_contexts],
|
209 |
-
outputs=[output1, output2]
|
210 |
)
|
211 |
|
212 |
demo.launch()
|
|
|
55 |
|
56 |
client = chromadb.PersistentClient(path="./manifesto-database")
|
57 |
manifesto_collection = client.get_or_create_collection(name="manifesto-database", embedding_function=multilingual_embeddings)
|
58 |
+
retrieved_context = manifesto_collection.query(query_texts=[political_statement[3:]], n_results=num_contexts, where={"ideology": direct_steering_option})
|
59 |
contexts = [context for context in retrieved_context['documents']]
|
60 |
rag_template = f"\nHier sind Kontextinformationen:\n" + "\n".join([f"{context}" for context in contexts])
|
61 |
|
|
|
100 |
temperature=temperature,
|
101 |
max_tokens=1000).choices[0].message.content
|
102 |
|
103 |
+
return response1, response2, prompt
|
104 |
|
105 |
def update_political_statement_options(test_type):
|
106 |
# Append an index starting from 1 before each statement
|
|
|
192 |
output1 = gr.Textbox(label="Model 1 Response")
|
193 |
output2 = gr.Textbox(label="Model 2 Response")
|
194 |
|
195 |
+
# Place this at the end of the App tab setup
|
196 |
+
with gr.Collapsible(label="Additional Information", open=False):
|
197 |
+
prompt_display = gr.Textbox(label="Used Prompt", interactive=False, placeholder="Prompt used in the last submission will appear here.")
|
198 |
+
|
199 |
with gr.Tab("Settings"):
|
200 |
with gr.Row():
|
201 |
openai_api_key = gr.Textbox(label="OpenAI API Key", placeholder="Enter your OpenAI API key here", show_label=True, type="password")
|
|
|
203 |
|
204 |
with gr.Row():
|
205 |
temp_input = gr.Slider(minimum=0, maximum=1, step=0.01, label="Temperature", value=0.7)
|
206 |
+
|
207 |
+
with gr.Row():
|
208 |
top_p_input = gr.Slider(minimum=0, maximum=1, step=0.01, label="Top P", value=1)
|
209 |
+
|
210 |
+
with gr.Row():
|
211 |
num_contexts = gr.Slider(minimum=0, maximum=1, step=0.01, label="Top k retrieved contexts", value=3)
|
212 |
|
213 |
# Link settings to the predict function
|
214 |
submit_btn.click(
|
215 |
fn=predict,
|
216 |
inputs=[openai_api_key, togetherai_api_key, model_selector1, model_selector2, prompt_manipulation, direct_steering_option, ideology_test, political_statement, temp_input, top_p_input, num_contexts],
|
217 |
+
outputs=[output1, output2, prompt_display]
|
218 |
)
|
219 |
|
220 |
demo.launch()
|