Update app.py
Browse files
app.py
CHANGED
@@ -58,15 +58,18 @@ st.title("Document QA by Dono")
|
|
58 |
|
59 |
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
|
60 |
|
61 |
-
loader = PyPDFDirectoryLoader("/home/user/app/pdfs/")
|
62 |
-
docs = loader.load()
|
63 |
-
print(len(docs))
|
64 |
|
|
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
|
67 |
|
68 |
@st.cache_resource
|
69 |
-
def load_model():
|
70 |
#embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-large",model_kwargs={"device":DEVICE})
|
71 |
embeddings = HuggingFaceInstructEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",model_kwargs={"device":DEVICE})
|
72 |
print(DEVICE)
|
@@ -182,7 +185,8 @@ def get_message_history():
|
|
182 |
yield f"{role.title()}: {content}"
|
183 |
|
184 |
|
185 |
-
|
|
|
186 |
|
187 |
print('2')
|
188 |
print(time.time())
|
|
|
58 |
|
59 |
DEVICE = "cuda:0" if torch.cuda.is_available() else "cpu"
|
60 |
|
|
|
|
|
|
|
61 |
|
62 |
+
@st.cache_data
|
63 |
+
def load_data():
|
64 |
+
loader = PyPDFDirectoryLoader("/home/user/app/pdfs/")
|
65 |
+
docs = loader.load()
|
66 |
+
print(len(docs))
|
67 |
+
return docs
|
68 |
|
69 |
|
70 |
|
71 |
@st.cache_resource
|
72 |
+
def load_model(docs):
|
73 |
#embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-large",model_kwargs={"device":DEVICE})
|
74 |
embeddings = HuggingFaceInstructEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",model_kwargs={"device":DEVICE})
|
75 |
print(DEVICE)
|
|
|
185 |
yield f"{role.title()}: {content}"
|
186 |
|
187 |
|
188 |
+
docs = load_data()
|
189 |
+
qa_chain = load_model(docs)
|
190 |
|
191 |
print('2')
|
192 |
print(time.time())
|