Spaces:
Sleeping
Sleeping
File size: 4,442 Bytes
341de20 329e507 341de20 2e2d75b e33bd8b 341de20 e33bd8b 341de20 e33bd8b 341de20 e33bd8b 341de20 2e2d75b 0a16327 329e507 0a16327 329e507 6824ee6 329e507 92d908c 2e2d75b 92d908c 2e2d75b 92d908c 6824ee6 2e2d75b 1deabc5 6824ee6 329e507 2e2d75b 329e507 2e2d75b 1deabc5 2e2d75b 2b340a6 329e507 92d908c 2e2d75b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 |
import streamlit as st
import google.generativeai as genai
from dotenv import load_dotenv
import os
# Load environment variables
load_dotenv()
# Configure Google Generative AI with API key
api_key = os.getenv("GENERATIVEAI_API_KEY")
genai.configure(api_key=api_key)
# Initialize the session state to store chat history
if 'messages' not in st.session_state:
st.session_state['messages'] = []
# Global variable to maintain chat session
chat = None
# Generation configuration and safety settings
generation_config = {
"temperature": 0.9,
"top_p": 0.5,
"top_k": 5,
"max_output_tokens": 1000,
}
safety_settings = [
{
"category": "HARM_CATEGORY_HARASSMENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
},
{
"category": "HARM_CATEGORY_HATE_SPEECH",
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
},
{
"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
},
{
"category": "HARM_CATEGORY_DANGEROUS_CONTENT",
"threshold": "BLOCK_MEDIUM_AND_ABOVE"
},
]
# Function to handle text summary requests
def text_summary(text, isNew=False):
global chat
if isNew or chat is None: # Start a new chat session
model = genai.GenerativeModel(
model_name="gemini-pro",
generation_config=generation_config,
safety_settings=safety_settings
)
chat = model.start_chat()
chat.send_message("""
Act as a financial advisor and generate financial summaries in a structured and tabular format. Follow these guidelines strictly:
- Start each section with a clear title in <strong> tags.
- For key metrics, use a table with two columns: one for the metric name and one for its value.
- Use bullet points only for listing risks and growth prospects.
- Ensure each section is clearly separated with line breaks.
- Do not use bold or italic formatting (, *), except for the specified HTML tags.
""")
# Send message and return response
response = chat.send_message(text)
return response.text
# Layout for chatbot UI
st.title("Financial Summary Chatbot")
# Adding custom CSS for scrollable chat output with set dimensions
st.markdown("""
<style>
.chat-output {
max-height: 400px;
width: 100%;
overflow-y: scroll;
padding: 10px;
border: 1px solid #ccc;
background-color: #f5f5f5;
}
.input-container {
position: fixed;
bottom: 0;
width: 100%;
background-color: #fff;
padding: 10px 0;
}
</style>
""", unsafe_allow_html=True)
# White box (output container) where the chat output should go
output_container = st.empty() # This will hold the dynamic content
# Function to display the chat history in the white box
def display_chat():
with output_container.container(): # Use the white box for output
# Loop through session messages and display them
for message in st.session_state['messages']:
if message['role'] == 'user':
st.write(f"**You:** {message['content']}")
else:
st.write(f"**Bot:** {message['content']}")
# Add a separator after each bot message
st.markdown("<hr style='border:1px dashed;'>", unsafe_allow_html=True)
# Input container (This will stay at the bottom)
input_container = st.container()
# Fixed input area at the bottom using the input container
with input_container:
st.markdown('<div class="input-container">', unsafe_allow_html=True)
is_new_session = st.checkbox("Start new session", value=False)
user_input = st.text_area("Type your message here:", height=100)
send_button = st.button("Send")
# If user presses 'Send'
if send_button and user_input:
# Store the user's input
st.session_state['messages'].append({"role": "user", "content": user_input})
# Call the text_summary function to get the bot's response
bot_response = text_summary(user_input, is_new_session)
# Store the bot's response along with the separator
st.session_state['messages'].append({"role": "bot", "content": bot_response})
# Clear the input text area
user_input = ""
# Display the chat history in the white box
display_chat()
|