Spaces:
Running
Running
File size: 1,723 Bytes
b0397d2 a602ed1 b0397d2 89b4d20 da9fe57 1c76710 7e0e867 4e4c8dd b0397d2 6f9707c 1c76710 4e4c8dd 7e0e867 6f9707c f4d1c4e b0397d2 6f9707c 4e4c8dd 6f9707c 4e4c8dd b0397d2 7e0e867 b0397d2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 |
import os
import gradio as gr
from langchain_google_genai.chat_models import ChatGoogleGenerativeAI
# Set the path to the service account key
os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500537db5.json"
# Initialize the language model with required parameters
# Replace 'your-model-name' with the actual model you intend to use
llm = ChatGoogleGenerativeAI(model='gemini-1.5-pro')
# Chat function
def chat_with_gemini(user_input, chat_history):
try:
# Append the user input to the chat history
chat_history.append(("User", user_input))
# Get response from the model
response = llm.predict(user_input)
# Append the bot's response to the chat history
chat_history.append(("Bot", response))
# Return the updated chat history
return chat_history
except Exception as e:
# In case of an error, return the error message in the chat
chat_history.append(("Bot", f"Error: {str(e)}"))
return chat_history
# Create a Gradio interface
with gr.Blocks() as iface:
gr.Markdown("# Chatbot with Gemini 1.5")
gr.Markdown("Ask me anything!")
chatbot = gr.Chatbot() # Initialize the chatbot
msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message...") # Text input for user messages
state = gr.State([]) # Store chat history
# Set up the interaction for when the user submits a message
msg.submit(chat_with_gemini, [msg, state], [chatbot]) # Update chatbot with new messages
msg.submit(lambda: "", None, msg) # Clear the input box after submission
# Launch the interface with debugging enabled
iface.launch(debug=True)
|