Update app.py
Browse files
app.py
CHANGED
@@ -35,6 +35,8 @@ data = {
|
|
35 |
}
|
36 |
table = pd.DataFrame.from_dict(data)
|
37 |
|
|
|
|
|
38 |
def chatbot_response(user_message):
|
39 |
# Generate chatbot response using the chatbot model
|
40 |
#inputs = chatbot_tokenizer.encode("User: " + user_message, return_tensors="pt")
|
@@ -52,7 +54,8 @@ def chatbot_response(user_message):
|
|
52 |
|
53 |
# pretty print last ouput tokens from bot
|
54 |
response = "DialoGPT: {}".format(chatbot_tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
|
55 |
-
|
|
|
56 |
return response
|
57 |
|
58 |
def sql_response(user_query):
|
@@ -72,7 +75,7 @@ chatbot_interface = gr.Interface(
|
|
72 |
outputs=gr.Textbox(),
|
73 |
live=True,
|
74 |
capture_session=True,
|
75 |
-
title="Chatbot",
|
76 |
description="Type your message in the box above, and the chatbot will respond.",
|
77 |
)
|
78 |
|
|
|
35 |
}
|
36 |
table = pd.DataFrame.from_dict(data)
|
37 |
|
38 |
+
step = 0
|
39 |
+
|
40 |
def chatbot_response(user_message):
|
41 |
# Generate chatbot response using the chatbot model
|
42 |
#inputs = chatbot_tokenizer.encode("User: " + user_message, return_tensors="pt")
|
|
|
54 |
|
55 |
# pretty print last ouput tokens from bot
|
56 |
response = "DialoGPT: {}".format(chatbot_tokenizer.decode(chat_history_ids[:, bot_input_ids.shape[-1]:][0], skip_special_tokens=True))
|
57 |
+
|
58 |
+
step += 1
|
59 |
return response
|
60 |
|
61 |
def sql_response(user_query):
|
|
|
75 |
outputs=gr.Textbox(),
|
76 |
live=True,
|
77 |
capture_session=True,
|
78 |
+
title="ST Chatbot",
|
79 |
description="Type your message in the box above, and the chatbot will respond.",
|
80 |
)
|
81 |
|