Spaces:
Sleeping
Sleeping
kendrickfff
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -9,46 +9,44 @@ os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500
|
|
9 |
# Replace 'your-model-name' with the actual model you intend to use
|
10 |
llm = ChatGoogleGenerativeAI(model='gemini-1.5-pro')
|
11 |
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
# Get response from the model
|
19 |
-
response = llm.predict(user_input)
|
20 |
-
|
21 |
-
# Append the bot's response to the chat history
|
22 |
-
chat_history.append(("Bot", response))
|
23 |
-
|
24 |
-
# Return the updated chat history
|
25 |
-
return chat_history
|
26 |
-
except Exception as e:
|
27 |
-
# In case of an error, return the error message in the chat
|
28 |
-
chat_history.append(("Bot", f"Error: {str(e)}"))
|
29 |
-
return chat_history
|
30 |
|
31 |
# Create a Gradio interface
|
32 |
with gr.Blocks() as iface:
|
33 |
gr.Markdown("# Ken Chatbot")
|
34 |
gr.Markdown("Ask me anything!")
|
35 |
|
36 |
-
chatbot = gr.Chatbot() # Initialize the chatbot
|
|
|
37 |
state = gr.State([]) # Store chat history
|
38 |
|
39 |
-
# Set up the layout with user input on the right
|
40 |
-
with gr.Row():
|
41 |
-
with gr.Column(scale=3):
|
42 |
-
gr.Markdown("### Bot")
|
43 |
-
bot_display = gr.Chatbot() # Bot's chat on the left side
|
44 |
-
|
45 |
-
with gr.Column(scale=1):
|
46 |
-
gr.Markdown("### User")
|
47 |
-
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message and press enter to send your message...") # User input on the right
|
48 |
-
|
49 |
# Set up the interaction for when the user submits a message
|
50 |
-
msg.submit(chat_with_gemini, [msg, state], [chatbot]) # Update chatbot with new messages
|
51 |
msg.submit(lambda: "", None, msg) # Clear the input box after submission
|
52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
# Launch the interface with debugging enabled
|
54 |
iface.launch(debug=True)
|
|
|
9 |
# Replace 'your-model-name' with the actual model you intend to use
|
10 |
llm = ChatGoogleGenerativeAI(model='gemini-1.5-pro')
|
11 |
|
12 |
+
def chat_with_gemini(message, chat_history):
|
13 |
+
# Placeholder function to simulate bot response
|
14 |
+
bot_response = "You said: " + message
|
15 |
+
chat_history.append((message, bot_response))
|
16 |
+
return chat_history, chat_history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
17 |
|
18 |
# Create a Gradio interface
|
19 |
with gr.Blocks() as iface:
|
20 |
gr.Markdown("# Ken Chatbot")
|
21 |
gr.Markdown("Ask me anything!")
|
22 |
|
23 |
+
chatbot = gr.Chatbot(elem_id="chatbot") # Initialize the chatbot with an element ID for styling
|
24 |
+
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message and press enter to send your message...")
|
25 |
state = gr.State([]) # Store chat history
|
26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
27 |
# Set up the interaction for when the user submits a message
|
28 |
+
msg.submit(chat_with_gemini, [msg, state], [chatbot, state]) # Update chatbot and state with new messages
|
29 |
msg.submit(lambda: "", None, msg) # Clear the input box after submission
|
30 |
|
31 |
+
# CSS for styling the chatbot messages
|
32 |
+
iface.css("""
|
33 |
+
#chatbot .message.user {
|
34 |
+
background-color: #DCF8C6;
|
35 |
+
border-radius: 15px;
|
36 |
+
padding: 8px 12px;
|
37 |
+
margin: 5px 50px 5px auto;
|
38 |
+
max-width: 70%;
|
39 |
+
text-align: right;
|
40 |
+
}
|
41 |
+
#chatbot .message.bot {
|
42 |
+
background-color: #E1E1E1;
|
43 |
+
border-radius: 15px;
|
44 |
+
padding: 8px 12px;
|
45 |
+
margin: 5px auto 5px 50px;
|
46 |
+
max-width: 70%;
|
47 |
+
text-align: left;
|
48 |
+
}
|
49 |
+
""")
|
50 |
+
|
51 |
# Launch the interface with debugging enabled
|
52 |
iface.launch(debug=True)
|