ferferefer's picture
Update app.py
ded56f4 verified
import os
import shutil
import streamlit as st
from llm import load_llm, response_generator
from vector_store import load_vector_store, process_pdf
from uuid import uuid4
repo_id = "Qwen/Qwen2.5-1.5B-Instruct-GGUF"
filename = "qwen2.5-1.5b-instruct-q8_0.gguf"
llm = load_llm(repo_id, filename)
vector_store = load_vector_store()
st.title("Medical Triage System")
st.subheader("Upload Referral Letters for Triage")
st.write(
"This AI-powered system analyzes referral letters to classify them as **Urgent** or **Routine** "
"and suggests either a **Face-to-Face** or **Virtual Appointment**."
)
# Initialize state
if "messages" not in st.session_state:
vector_store.reset_collection()
if os.path.exists("./temp"):
shutil.rmtree("./temp")
st.session_state.messages = []
# Display chat messages
for message in st.session_state.messages:
with st.chat_message(message["role"]):
st.write(message["content"])
# Accept PDF uploads
with st.sidebar:
st.title("Upload Referral Letters")
uploaded_files = st.file_uploader(
"Choose PDF files", accept_multiple_files=True, type="pdf"
)
if uploaded_files is not None:
for uploaded_file in uploaded_files:
temp_dir = "./temp"
if not os.path.exists(temp_dir):
os.makedirs(temp_dir)
temp_file = f"./temp/{uploaded_file.name}-{uuid4()}.pdf"
with open(temp_file, "wb") as file:
file.write(uploaded_file.getvalue())
st.write(f"Processing {uploaded_file.name}...")
process_pdf(temp_file, vector_store)
st.success(f"Processed {uploaded_file.name} successfully. ✅")
# Process user query
if prompt := st.text_input("Enter triage-related query (e.g., 'Is this urgent?')"):
st.session_state.messages.append({"role": "user", "content": prompt})
with st.chat_message("user"):
st.markdown(prompt)
with st.chat_message("assistant"):
retriever = vector_store.as_retriever(search_kwargs={"k": 3})
response = response_generator(llm, st.session_state.messages, prompt, retriever)
st.markdown(response["answer"])
with st.expander("See Context"):
st.write(response["context"])
st.session_state.messages.append({"role": "assistant", "content": response["answer"]})