ask-me-anything / app.py
kendrickfff's picture
Update app.py
16f0ae2 verified
raw
history blame
1.75 kB
import os
import gradio as gr
from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
# Set the path to the service account key
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500537db5.json"
# Initialize the language model with required parameters
# Replace 'your-model-name' with the actual model you intend to use
llm = ChatGoogleGenerativeAI(model='gemini-1.5-pro')
# Chat function
def chat_with_gemini(user_input, chat_history):
try:
# Append the user input to the chat history
chat_history.append(("User", user_input))
# Get response from the model
response = llm.predict(user_input)
# Append the bot's response to the chat history
chat_history.append(("Bot", response))
# Return the updated chat history
return chat_history
except Exception as e:
# In case of an error, return the error message in the chat
chat_history.append(("Bot", f"Error: {str(e)}"))
return chat_history
# Create a Gradio interface
with gr.Blocks() as iface:
gr.Markdown("# Ken Chatbot")
gr.Markdown("Ask me anything!")
chatbot = gr.Chatbot() # Initialize the chatbot
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message and press enter to send your message...") # Text input for user messages
state = gr.State([]) # Store chat history
# Set up the interaction for when the user submits a message
msg.submit(chat_with_gemini, [msg, state], [chatbot]) # Update chatbot with new messages
msg.submit(lambda: "", None, msg) # Clear the input box after submission
# Launch the interface with debugging enabled
iface.launch(debug=True)