ferferefer commited on
Commit
ded56f4
1 Parent(s): 47e3bd8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -38
app.py CHANGED
@@ -5,62 +5,38 @@ from llm import load_llm, response_generator
5
  from vector_store import load_vector_store, process_pdf
6
  from uuid import uuid4
7
 
8
- # repo_id = "Qwen/Qwen2.5-0.5B-Instruct-GGUF"
9
- # filename = "qwen2.5-0.5b-instruct-q8_0.gguf"
10
  repo_id = "Qwen/Qwen2.5-1.5B-Instruct-GGUF"
11
  filename = "qwen2.5-1.5b-instruct-q8_0.gguf"
12
 
13
-
14
  llm = load_llm(repo_id, filename)
15
  vector_store = load_vector_store()
16
 
 
 
 
 
 
 
17
 
18
- st.title("PDF QA")
19
- # Initialize chat history
20
  if "messages" not in st.session_state:
21
  vector_store.reset_collection()
22
  if os.path.exists("./temp"):
23
  shutil.rmtree("./temp")
24
  st.session_state.messages = []
25
 
26
- # Display chat messages from history on app rerun
27
  for message in st.session_state.messages:
28
  with st.chat_message(message["role"]):
29
- if message["role"] == "user":
30
- st.write(message["content"])
31
- else:
32
- st.write(message["content"])
33
-
34
- # Accept user input
35
- if prompt := st.chat_input("What is up?"):
36
- # Add user message to chat history
37
- st.session_state.messages.append({"role": "user", "content": prompt})
38
- # Display user message in chat message container
39
- with st.chat_message("user"):
40
- st.markdown(prompt)
41
-
42
- # Display assistant response in chat message container
43
- with st.chat_message("assistant"):
44
- retriever = vector_store.as_retriever(search_kwargs={"k": 3})
45
- response = response_generator(llm, st.session_state.messages, prompt, retriever)
46
-
47
- st.markdown(response["answer"])
48
- with st.expander("See context"):
49
- st.write(response["context"])
50
-
51
- # Add assistant response to chat history
52
- st.session_state.messages.append(
53
- {"role": "assistant", "content": response["answer"]}
54
- )
55
 
 
56
  with st.sidebar:
57
- st.title("PDFs")
58
- st.write("Upload your pdfs here")
59
  uploaded_files = st.file_uploader(
60
- "Choose a PDF file", accept_multiple_files=True, type="pdf"
61
  )
62
  if uploaded_files is not None:
63
- st.session_state.uploaded_pdf = True
64
  for uploaded_file in uploaded_files:
65
  temp_dir = "./temp"
66
  if not os.path.exists(temp_dir):
@@ -69,6 +45,22 @@ with st.sidebar:
69
  with open(temp_file, "wb") as file:
70
  file.write(uploaded_file.getvalue())
71
 
72
- st.write("filename:", uploaded_file.name)
73
  process_pdf(temp_file, vector_store)
74
- st.success("PDFs processed successfully. ✅")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  from vector_store import load_vector_store, process_pdf
6
  from uuid import uuid4
7
 
 
 
8
  repo_id = "Qwen/Qwen2.5-1.5B-Instruct-GGUF"
9
  filename = "qwen2.5-1.5b-instruct-q8_0.gguf"
10
 
 
11
  llm = load_llm(repo_id, filename)
12
  vector_store = load_vector_store()
13
 
14
+ st.title("Medical Triage System")
15
+ st.subheader("Upload Referral Letters for Triage")
16
+ st.write(
17
+ "This AI-powered system analyzes referral letters to classify them as **Urgent** or **Routine** "
18
+ "and suggests either a **Face-to-Face** or **Virtual Appointment**."
19
+ )
20
 
21
+ # Initialize state
 
22
  if "messages" not in st.session_state:
23
  vector_store.reset_collection()
24
  if os.path.exists("./temp"):
25
  shutil.rmtree("./temp")
26
  st.session_state.messages = []
27
 
28
+ # Display chat messages
29
  for message in st.session_state.messages:
30
  with st.chat_message(message["role"]):
31
+ st.write(message["content"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
 
33
+ # Accept PDF uploads
34
  with st.sidebar:
35
+ st.title("Upload Referral Letters")
 
36
  uploaded_files = st.file_uploader(
37
+ "Choose PDF files", accept_multiple_files=True, type="pdf"
38
  )
39
  if uploaded_files is not None:
 
40
  for uploaded_file in uploaded_files:
41
  temp_dir = "./temp"
42
  if not os.path.exists(temp_dir):
 
45
  with open(temp_file, "wb") as file:
46
  file.write(uploaded_file.getvalue())
47
 
48
+ st.write(f"Processing {uploaded_file.name}...")
49
  process_pdf(temp_file, vector_store)
50
+ st.success(f"Processed {uploaded_file.name} successfully. ✅")
51
+
52
+ # Process user query
53
+ if prompt := st.text_input("Enter triage-related query (e.g., 'Is this urgent?')"):
54
+ st.session_state.messages.append({"role": "user", "content": prompt})
55
+ with st.chat_message("user"):
56
+ st.markdown(prompt)
57
+
58
+ with st.chat_message("assistant"):
59
+ retriever = vector_store.as_retriever(search_kwargs={"k": 3})
60
+ response = response_generator(llm, st.session_state.messages, prompt, retriever)
61
+
62
+ st.markdown(response["answer"])
63
+ with st.expander("See Context"):
64
+ st.write(response["context"])
65
+
66
+ st.session_state.messages.append({"role": "assistant", "content": response["answer"]})