interview_copilot_2 / app copy 2.py
alex buz
final
3774cbd
import streamlit as st
from st_pages import Page, show_pages
from openai import OpenAI
import urllib.parse
from whisper_stt import whisper_stt
# Set page configuration
st.set_page_config(layout="wide")
show_pages([Page("app.py", "Home", "🏠")])
# Initialize session state variables
if 'paused' not in st.session_state:
st.session_state.paused = False
if 'question_text' not in st.session_state:
st.session_state.question_text = ""
if 'submitted' not in st.session_state:
st.session_state.submitted = False
if 'response_content' not in st.session_state:
st.session_state.response_content = []
if 'stopped' not in st.session_state:
st.session_state.stopped = False
if 'function_call_count' not in st.session_state:
st.session_state.function_call_count = 0
if 'transcribed_text' not in st.session_state:
st.session_state.transcribed_text = ""
if 'last_processed_text' not in st.session_state:
st.session_state.last_processed_text = ""
if 'questions' not in st.session_state:
st.session_state.questions = []
def on_stop():
st.session_state.stopped = True
def handle_enter(key):
if key == "ctrl+enter":
new_question = st.session_state.question_input
print(f"handle_enter called. new_question: '{new_question}'")
print(f"session state: {st.session_state}")
with st.sidebar:
api_key = st.text_input("API Key", key="chatbot_api_key", type="password")
col1, col2 = st.columns(2)
with col1:
# Call whisper_stt without a callback
transcribed_text = whisper_stt(
openai_api_key=api_key,
language='en'
)
if transcribed_text:
st.session_state.question_text = transcribed_text
# Check if new transcription is available
if transcribed_text and transcribed_text != st.session_state.transcribed_text:
st.session_state.transcribed_text = transcribed_text
st.session_state.question_text = transcribed_text
st.session_state.submitted = True
if st.session_state.question_text:
st.write("Current Question:", st.session_state.question_text)
if 'question_input' in st.session_state and st.session_state.question_input:
st.write("Current Question Input:", st.session_state.question_input)
with col2:
st.button(label='Stop', on_click=on_stop)
# Create an input for the question and use new_question directly
new_question = st.text_area("Question",
value=st.session_state.question_text or "",
height=150,
key="question_input",
on_change=handle_enter,
args=("ctrl+enter",)
)
print(f"After text_area, new_question: '{new_question}'")
# Check if new_question has changed and is not empty
if new_question and new_question != st.session_state.question_text:
st.session_state.question_text = new_question
st.session_state.submitted = True
st.markdown("## Navigation")
for i, q in enumerate(st.session_state.questions):
st.write(f"Q{i+1}: {q['question']}")
if st.session_state.question_text and not api_key:
st.info("Please add your OpenAI API key to continue.")
st.stop()
if st.session_state.submitted and not st.session_state.stopped:
st.session_state.questions.append({'question': st.session_state.question_text, 'response': ''})
client = OpenAI(api_key=api_key)
st.session_state.messages = [{"role": "user", "content": st.session_state.question_text}]
response = client.chat.completions.create(
model="gpt-4o",
messages=st.session_state.messages,
stream=True
)
# Create a placeholder for the streamed response
response_placeholder = st.empty()
response_text = ""
for chunk in response:
if st.session_state.stopped:
st.session_state.stopped = False
st.session_state.submitted = False
break
else:
if chunk and chunk.choices[0].delta.content:
response_text += chunk.choices[0].delta.content
# Update the placeholder with the current response text
response_placeholder.markdown(f"**Answer:** {response_text}")
if response_text:
st.session_state.questions[-1]['response'] = response_text
st.session_state.submitted = False
st.session_state.stopped = False
# Display previous questions and answers
for i, q in enumerate(st.session_state.questions[:-1]): # Exclude the last question
st.markdown(f"### Question {i+1}: {q['question']}")
st.markdown(f"**Answer:** {q['response']}")
# Display the latest question separately
if st.session_state.questions:
latest_q = st.session_state.questions[-1]
st.markdown(f"### Question {len(st.session_state.questions)}: {latest_q['question']}")
# The answer for the latest question is already being streamed in the placeholder