Richie-O3 commited on
Commit
4c87232
1 Parent(s): a640dcc

add custom prompts

Browse files
Files changed (2) hide show
  1. app.py +42 -4
  2. backend_functions.py +15 -9
app.py CHANGED
@@ -22,7 +22,8 @@ with gr.Blocks() as main_app:
22
 
23
  with gr.Column():
24
  with gr.Row():
25
- options_audio = gr.Radio(["XTTS", "Elevenlabs"], label="Audio Generation")
 
26
  output_audio = gr.Audio(interactive=False, label='Audio', autoplay=False)
27
 
28
  messages = gr.State([])
@@ -34,7 +35,44 @@ with gr.Blocks() as main_app:
34
  with gr.Row():
35
  button_text = gr.Button(value='Submit text')
36
  clear_button = gr.ClearButton([chat, messages])
37
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  with gr.Tab('Times'):
39
 
40
  columns = ["User Message", "Chatbot Response", "Standalone Question", "Create Embedding", "Query Pinecone",
@@ -49,7 +87,7 @@ with gr.Blocks() as main_app:
49
 
50
  text.submit(
51
  fn=get_answer,
52
- inputs=[text, chat, messages, output_audio, output_video, table_times, options_audio],
53
  outputs=[chat, output_audio, output_video, table_times]
54
  ).then(
55
  lambda: None, None, [text]
@@ -61,7 +99,7 @@ with gr.Blocks() as main_app:
61
 
62
  button_text.click(
63
  fn=get_answer,
64
- inputs=[text, chat, messages, output_audio, output_video, table_times, options_audio],
65
  outputs=[chat, output_audio, output_video, table_times]
66
  ).then(
67
  lambda: None, None, [text]
 
22
 
23
  with gr.Column():
24
  with gr.Row():
25
+ options_audio = gr.Radio(["XTTS", "Elevenlabs"], value="Elevenlabs", label="Audio Generation")
26
+ options_prompt = gr.Radio(["Default", "Custom"], value="Default", label="Prompts")
27
  output_audio = gr.Audio(interactive=False, label='Audio', autoplay=False)
28
 
29
  messages = gr.State([])
 
35
  with gr.Row():
36
  button_text = gr.Button(value='Submit text')
37
  clear_button = gr.ClearButton([chat, messages])
38
+
39
+ with gr.Tab('Prompts'):
40
+ general_prompt = gr.Text(
41
+ placeholder='Ingrese el prompt general del bot', label='General prompt'
42
+ )
43
+ standalone_prompt = gr.Text(
44
+ placeholder='Ingrese el prompt usado para encontrar el contexto', label='Standalone prompt'
45
+ )
46
+ _ = gr.Markdown(
47
+ "```\n"
48
+ "Recuerde dejar estos formatos en los prompts: \n"
49
+ "----------------------- General --------------------------\n"
50
+ "=========\n"
51
+ "Contexto:\n"
52
+ "CONTEXTO\n"
53
+ "=========\n"
54
+ "\n"
55
+ "----------------------- Standalone -----------------------\n"
56
+ "You are a standalone question-maker. Given the following chat history and follow-up message, rephrase "
57
+ "the follow-up phrase to be a standalone question (sometimes the follow-up is not a question, so create "
58
+ "a standalone phrase), in spanish. In the standalone message you must include all the information at the "
59
+ "moment that is known about the customer, all the important nouns and what they are looking for. In cases "
60
+ "where you think is usefully, include what is the best recommendation for the customer. To give you "
61
+ "context, the conversation is about (INGRESE INFORMACIÓN DE LA MARCA, EL NOMBRE Y DE MANERA MUY GENERAL "
62
+ "QUE ES LO QUE VENDE).\n"
63
+ "There might be moments when there isn't a question in those cases return a standalone phrase: for example "
64
+ "if the user says 'hola' (or something similar) then the output would be 'el usuario está saludando', or "
65
+ "if the user says 'gracias' or 'es muy util' (or something similar) then the output would be a phrase "
66
+ "showing that the user is grateful and what they are grateful for, or if the user say 'si' then it would "
67
+ "be a phrase encapsulating the relationship to its previous question or phrase.\n"
68
+ "Your response cannot be more than 100 words.\n"
69
+ "Chat History:\n"
70
+ "\n"
71
+ "HISTORY\n"
72
+ "Follow-up message: QUESTION\n"
73
+ "Standalone message:\n", line_breaks=True
74
+ )
75
+
76
  with gr.Tab('Times'):
77
 
78
  columns = ["User Message", "Chatbot Response", "Standalone Question", "Create Embedding", "Query Pinecone",
 
87
 
88
  text.submit(
89
  fn=get_answer,
90
+ inputs=[text, chat, messages, output_audio, output_video, table_times, options_audio, options_prompt, general_prompt, standalone_prompt],
91
  outputs=[chat, output_audio, output_video, table_times]
92
  ).then(
93
  lambda: None, None, [text]
 
99
 
100
  button_text.click(
101
  fn=get_answer,
102
+ inputs=[text, chat, messages, output_audio, output_video, table_times, options_audio, options_prompt, general_prompt, standalone_prompt],
103
  outputs=[chat, output_audio, output_video, table_times]
104
  ).then(
105
  lambda: None, None, [text]
backend_functions.py CHANGED
@@ -96,9 +96,12 @@ def _query_pinecone(embedding):
96
  return final_results
97
 
98
 
99
- def _general_prompt(context):
100
- with open("prompt_general.txt", "r") as file:
101
- file_prompt = file.read().replace("\n", "")
 
 
 
102
 
103
  context_prompt = file_prompt.replace('CONTEXT', context)
104
  print(context_prompt)
@@ -141,9 +144,12 @@ def _call_gpt_standalone(prompt: str):
141
  return response.choices[0].message.content
142
 
143
 
144
- def _get_standalone_question(question, history_messages):
145
- with open("prompt_standalone_message.txt", "r") as file:
146
- file_prompt_standalone = file.read().replace("\n", "")
 
 
 
147
 
148
  history = ''
149
  for i, msg in enumerate(history_messages):
@@ -320,7 +326,7 @@ def _create_video(link_audio: str, unique_id: str):
320
  return signed_url_video
321
 
322
 
323
- def get_answer(question: str, chatbot: list[tuple[str, str]], history_messages, comp_audio, comp_video, df_table, option_audio):
324
  """
325
  Gets the answer of the chatbot
326
  """
@@ -329,7 +335,7 @@ def get_answer(question: str, chatbot: list[tuple[str, str]], history_messages,
329
  message_output = 'Un placer haberte ayudado, hasta luego!'
330
  else:
331
  start_get_standalone_question = time.time()
332
- standalone_msg_q = _get_standalone_question(question, history_messages) # create standalone question or message
333
  end_get_standalone_question = time.time()
334
  time_get_standalone_question = end_get_standalone_question - start_get_standalone_question
335
 
@@ -344,7 +350,7 @@ def get_answer(question: str, chatbot: list[tuple[str, str]], history_messages,
344
  time_query_pinecone = end_query_pinecone - start_query_pinecone
345
 
346
  start_general_prompt = time.time()
347
- final_context_prompt = _general_prompt(best_results) # create context/general prompt
348
  end_general_prompt = time.time()
349
  time_general_prompt = end_general_prompt - start_general_prompt
350
 
 
96
  return final_results
97
 
98
 
99
+ def _general_prompt(context, option_prompt, general_prompt):
100
+ if option_prompt == "Default":
101
+ with open("prompt_general.txt", "r") as file:
102
+ file_prompt = file.read().replace("\n", "")
103
+ elif option_prompt == "Custom":
104
+ file_prompt = general_prompt
105
 
106
  context_prompt = file_prompt.replace('CONTEXT', context)
107
  print(context_prompt)
 
144
  return response.choices[0].message.content
145
 
146
 
147
+ def _get_standalone_question(question, history_messages, option_prompt, standalone_prompt):
148
+ if option_prompt == "Default":
149
+ with open("prompt_standalone_message.txt", "r") as file:
150
+ file_prompt_standalone = file.read().replace("\n", "")
151
+ elif option_prompt == "Custom":
152
+ file_prompt_standalone = standalone_prompt
153
 
154
  history = ''
155
  for i, msg in enumerate(history_messages):
 
326
  return signed_url_video
327
 
328
 
329
+ def get_answer(question: str, chatbot: list[tuple[str, str]], history_messages, comp_audio, comp_video, df_table, option_audio, option_prompt, general_prompt, standalone_prompt):
330
  """
331
  Gets the answer of the chatbot
332
  """
 
335
  message_output = 'Un placer haberte ayudado, hasta luego!'
336
  else:
337
  start_get_standalone_question = time.time()
338
+ standalone_msg_q = _get_standalone_question(question, history_messages, option_prompt, standalone_prompt) # create standalone question or message
339
  end_get_standalone_question = time.time()
340
  time_get_standalone_question = end_get_standalone_question - start_get_standalone_question
341
 
 
350
  time_query_pinecone = end_query_pinecone - start_query_pinecone
351
 
352
  start_general_prompt = time.time()
353
+ final_context_prompt = _general_prompt(best_results, option_prompt, general_prompt) # create context/general prompt
354
  end_general_prompt = time.time()
355
  time_general_prompt = end_general_prompt - start_general_prompt
356