import os import gradio as gr from langchain_google_genai.chat_models import ChatGoogleGenerativeAI # Set the path to the service account key os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "./firm-catalyst-437006-s4-407500537db5.json" # Initialize the language model with required parameters llm = ChatGoogleGenerativeAI(model='gemini-1.5-pro') def chat_with_gemini(message, chat_history): # Generate a response using the language model bot_response = llm.predict(message) # Get the bot's response from the model chat_history.append((message, bot_response)) # Append the user and bot messages as a tuple return chat_history, chat_history # Create a Gradio interface with gr.Blocks() as iface: gr.Markdown("# Ken Chatbot") gr.Markdown("Ask me anything!") chatbot = gr.Chatbot(elem_id="chatbot") # Initialize the chatbot with an element ID for styling msg = gr.Textbox(label="Type your message here...", placeholder="Enter your message and press enter to send your message...") state = gr.State([]) # Store chat history # Set up the interaction for when the user submits a message msg.submit(chat_with_gemini, [msg, state], [chatbot, state]) # Update chatbot and state with new messages msg.submit(lambda: "", None, msg) # Clear the input box after submission # Custom CSS for styling the chatbot messages gr.HTML(""" """) # Launch the interface with debugging enabled iface.launch(debug=True)