Spaces:
Sleeping
Sleeping
souravmighty
commited on
Commit
•
d307acf
1
Parent(s):
ad134c3
added logo and changed the embeddings
Browse files- .chainlit/config.toml +1 -1
- app.py +23 -23
- chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/data_level0.bin +3 -0
- chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/header.bin +3 -0
- chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/length.bin +3 -0
- chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/link_lists.bin +0 -0
- chroma_db/chroma.sqlite3 +0 -0
- public/logo_dark.png +0 -0
- public/logo_light.png +0 -0
.chainlit/config.toml
CHANGED
@@ -64,7 +64,7 @@ default_expand_messages = false
|
|
64 |
hide_cot = false
|
65 |
|
66 |
# Link to your github repo. This will add a github button in the UI's header.
|
67 |
-
|
68 |
|
69 |
# Specify a CSS file that can be used to customize the user interface.
|
70 |
# The CSS file can be served from the public directory or via an external link.
|
|
|
64 |
hide_cot = false
|
65 |
|
66 |
# Link to your github repo. This will add a github button in the UI's header.
|
67 |
+
github = "https://github.com/souravmighty"
|
68 |
|
69 |
# Specify a CSS file that can be used to customize the user interface.
|
70 |
# The CSS file can be served from the public directory or via an external link.
|
app.py
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
import PyPDF2
|
2 |
-
from langchain_community.embeddings import
|
3 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
4 |
from langchain_community.vectorstores import Chroma
|
5 |
from langchain.chains import ConversationalRetrievalChain
|
@@ -41,25 +41,6 @@ async def on_chat_start():
|
|
41 |
]
|
42 |
).send()
|
43 |
|
44 |
-
|
45 |
-
await setup_agent(settings)
|
46 |
-
|
47 |
-
|
48 |
-
@cl.on_settings_update
|
49 |
-
async def setup_agent(settings):
|
50 |
-
print("Setup agent with settings:", settings)
|
51 |
-
|
52 |
-
user_env = cl.user_session.get("env")
|
53 |
-
os.environ["GROQ_API_KEY"] = user_env.get("GROQ_API_KEY")
|
54 |
-
|
55 |
-
# embeddings = OllamaEmbeddings(model="nomic-embed-text")
|
56 |
-
# memory=get_memory()
|
57 |
-
|
58 |
-
# docsearch = await cl.make_async(Chroma)(
|
59 |
-
# persist_directory="./chroma_db",
|
60 |
-
# embedding_function=embeddings
|
61 |
-
# )
|
62 |
-
|
63 |
files = None #Initialize variable to store uploaded files
|
64 |
|
65 |
# Wait for the user to upload a file
|
@@ -92,17 +73,36 @@ async def setup_agent(settings):
|
|
92 |
metadatas = [{"source": f"{i}-pl"} for i in range(len(texts))]
|
93 |
|
94 |
# Create a Chroma vector store
|
95 |
-
embeddings = OllamaEmbeddings(model="nomic-embed-text")
|
|
|
96 |
#embeddings = OllamaEmbeddings(model="llama2:7b")
|
97 |
docsearch = await cl.make_async(Chroma.from_texts)(
|
98 |
-
texts, embeddings, metadatas=metadatas
|
99 |
)
|
100 |
-
|
|
|
101 |
# Let the user know that the system is ready
|
102 |
msg.content = f"Processing `{file.name}` done. You can now ask questions!"
|
103 |
await msg.update()
|
104 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
105 |
memory=get_memory()
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
|
108 |
# Create a chain that uses the Chroma vector store
|
|
|
1 |
import PyPDF2
|
2 |
+
from langchain_community.embeddings import SentenceTransformerEmbeddings
|
3 |
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
4 |
from langchain_community.vectorstores import Chroma
|
5 |
from langchain.chains import ConversationalRetrievalChain
|
|
|
41 |
]
|
42 |
).send()
|
43 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
44 |
files = None #Initialize variable to store uploaded files
|
45 |
|
46 |
# Wait for the user to upload a file
|
|
|
73 |
metadatas = [{"source": f"{i}-pl"} for i in range(len(texts))]
|
74 |
|
75 |
# Create a Chroma vector store
|
76 |
+
# embeddings = OllamaEmbeddings(model="nomic-embed-text")
|
77 |
+
embeddings = SentenceTransformerEmbeddings(model_name = "sentence-transformers/all-MiniLM-L6-v2")
|
78 |
#embeddings = OllamaEmbeddings(model="llama2:7b")
|
79 |
docsearch = await cl.make_async(Chroma.from_texts)(
|
80 |
+
texts, embeddings, metadatas=metadatas, persist_directory='./chroma_db'
|
81 |
)
|
82 |
+
docsearch.persist()
|
83 |
+
|
84 |
# Let the user know that the system is ready
|
85 |
msg.content = f"Processing `{file.name}` done. You can now ask questions!"
|
86 |
await msg.update()
|
87 |
|
88 |
+
await setup_agent(settings)
|
89 |
+
|
90 |
+
|
91 |
+
@cl.on_settings_update
|
92 |
+
async def setup_agent(settings):
|
93 |
+
print("Setup agent with settings:", settings)
|
94 |
+
|
95 |
+
user_env = cl.user_session.get("env")
|
96 |
+
os.environ["GROQ_API_KEY"] = user_env.get("GROQ_API_KEY")
|
97 |
+
|
98 |
+
# embeddings = OllamaEmbeddings(model="nomic-embed-text")
|
99 |
+
embeddings = SentenceTransformerEmbeddings(model_name = "sentence-transformers/all-MiniLM-L6-v2")
|
100 |
memory=get_memory()
|
101 |
+
|
102 |
+
docsearch = await cl.make_async(Chroma)(
|
103 |
+
persist_directory="./chroma_db",
|
104 |
+
embedding_function=embeddings
|
105 |
+
)
|
106 |
|
107 |
|
108 |
# Create a chain that uses the Chroma vector store
|
chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/data_level0.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3c9fd302f000d7790aa403c2d0d8fec363fe46f30b07d53020b6e33b22435a9
|
3 |
+
size 1676000
|
chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/header.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e87a1dc8bcae6f2c4bea6d5dd5005454d4dace8637dae29bff3c037ea771411e
|
3 |
+
size 100
|
chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/length.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:756bb44282bd5346e3ab0f955749756f1f378b655ccd3c2ee9f8ae7cc5f92a9e
|
3 |
+
size 4000
|
chroma_db/71a07f1e-d833-4c9a-a157-bfc7ef8305f1/link_lists.bin
ADDED
File without changes
|
chroma_db/chroma.sqlite3
ADDED
Binary file (262 kB). View file
|
|
public/logo_dark.png
ADDED
public/logo_light.png
ADDED