Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -2,10 +2,10 @@ import streamlit as st
|
|
2 |
from streamlit_option_menu import option_menu
|
3 |
from markup import app_intro
|
4 |
import langchain
|
5 |
-
from langchain.cache import InMemoryCache
|
6 |
from query_data import chat_chain
|
|
|
|
|
7 |
|
8 |
-
langchain.llm_cache = InMemoryCache()
|
9 |
|
10 |
def tab1():
|
11 |
st.header("CIMA Chatbot")
|
@@ -21,17 +21,13 @@ unique_metadata_list = []
|
|
21 |
seen = set()
|
22 |
|
23 |
def tab4():
|
24 |
-
if "messages" not in st.session_state:
|
25 |
-
st.session_state.messages = []
|
26 |
-
|
27 |
st.header("π£οΈ Chat with the AI about the ingested documents! π")
|
28 |
|
29 |
-
for
|
30 |
-
|
31 |
-
|
32 |
|
33 |
if user_input := st.chat_input("User Input"):
|
34 |
-
st.session_state.messages.append({"role": "user", "content": user_input})
|
35 |
|
36 |
with st.chat_message("user"):
|
37 |
st.markdown(user_input)
|
@@ -57,8 +53,6 @@ def tab4():
|
|
57 |
|
58 |
st.write(answer)
|
59 |
st.write(unique_metadata_list)
|
60 |
-
|
61 |
-
st.session_state.messages.append({"role": "assistant", "content": answer})
|
62 |
|
63 |
|
64 |
def main():
|
|
|
2 |
from streamlit_option_menu import option_menu
|
3 |
from markup import app_intro
|
4 |
import langchain
|
|
|
5 |
from query_data import chat_chain
|
6 |
+
from memory import msgs
|
7 |
+
|
8 |
|
|
|
9 |
|
10 |
def tab1():
|
11 |
st.header("CIMA Chatbot")
|
|
|
21 |
seen = set()
|
22 |
|
23 |
def tab4():
|
|
|
|
|
|
|
24 |
st.header("π£οΈ Chat with the AI about the ingested documents! π")
|
25 |
|
26 |
+
for i, msg in enumerate(msgs.messages):
|
27 |
+
name = "user" if i % 2 == 0 else "assistant"
|
28 |
+
st.chat_message(name).markdown(msg.content)
|
29 |
|
30 |
if user_input := st.chat_input("User Input"):
|
|
|
31 |
|
32 |
with st.chat_message("user"):
|
33 |
st.markdown(user_input)
|
|
|
53 |
|
54 |
st.write(answer)
|
55 |
st.write(unique_metadata_list)
|
|
|
|
|
56 |
|
57 |
|
58 |
def main():
|