Spaces:
Sleeping
Sleeping
second commit
Browse files- Dockerfile +1 -1
- app.py +9 -3
- roaringkitty.py +31 -0
- talking_app.py +9 -29
Dockerfile
CHANGED
@@ -8,4 +8,4 @@ COPY --chown=user . $HOME/app
|
|
8 |
COPY ./requirements.txt ~/app/requirements.txt
|
9 |
RUN pip install -r requirements.txt
|
10 |
COPY . .
|
11 |
-
CMD ["chainlit", "run", "
|
|
|
8 |
COPY ./requirements.txt ~/app/requirements.txt
|
9 |
RUN pip install -r requirements.txt
|
10 |
COPY . .
|
11 |
+
CMD ["chainlit", "run", "talking_app.py", "--port", "7860"]
|
app.py
CHANGED
@@ -1,13 +1,19 @@
|
|
1 |
-
|
2 |
-
import chainlet as cl
|
3 |
import sys
|
4 |
|
5 |
sys.path.append(".")
|
6 |
|
7 |
from earnings_app import extract_information
|
8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
9 |
@cl.on_chat_start
|
10 |
-
async def
|
11 |
cl.user_session.set("chain", extract_information())
|
12 |
await cl.Message(content="Welcome to Earnings chat!").send()
|
13 |
|
|
|
1 |
+
import chainlit as cl
|
|
|
2 |
import sys
|
3 |
|
4 |
sys.path.append(".")
|
5 |
|
6 |
from earnings_app import extract_information
|
7 |
|
8 |
+
@cl.author_rename
|
9 |
+
def rename(orig_author: str):
|
10 |
+
diamond_char = u'\U0001F537'
|
11 |
+
phrase = diamond_char + " Diamond Hands " + diamond_char
|
12 |
+
rename_dict = {"RetrievalQA": phrase}
|
13 |
+
return rename_dict.get(orig_author, orig_author)
|
14 |
+
|
15 |
@cl.on_chat_start
|
16 |
+
async def init():
|
17 |
cl.user_session.set("chain", extract_information())
|
18 |
await cl.Message(content="Welcome to Earnings chat!").send()
|
19 |
|
roaringkitty.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
2 |
+
from langchain.document_loaders.csv_loader import CSVLoader
|
3 |
+
from langchain.embeddings import CacheBackedEmbeddings
|
4 |
+
from langchain.vectorstores import FAISS
|
5 |
+
from langchain.chains import RetrievalQA
|
6 |
+
from langchain.chat_models import ChatOpenAI
|
7 |
+
from langchain.storage import LocalFileStore
|
8 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
9 |
+
|
10 |
+
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
11 |
+
|
12 |
+
def roaringkiity_chain():
|
13 |
+
# build FAISS index from csv
|
14 |
+
loader = CSVLoader(file_path="./data/roaringkitty.csv", source_column="Link")
|
15 |
+
data = loader.load()
|
16 |
+
documents = text_splitter.transform_documents(data)
|
17 |
+
store = LocalFileStore("./cache/")
|
18 |
+
core_embeddings_model = OpenAIEmbeddings()
|
19 |
+
embedder = CacheBackedEmbeddings.from_bytes_store(
|
20 |
+
core_embeddings_model, store, namespace=core_embeddings_model.model
|
21 |
+
)
|
22 |
+
# make async docsearch
|
23 |
+
docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
|
24 |
+
|
25 |
+
chain = RetrievalQA.from_chain_type(
|
26 |
+
ChatOpenAI(model="gpt-4", temperature=0, streaming=True),
|
27 |
+
chain_type="stuff",
|
28 |
+
return_source_documents=True,
|
29 |
+
retriever=docsearch.as_retriever(),
|
30 |
+
chain_type_kwargs = {"prompt": prompt}
|
31 |
+
)
|
talking_app.py
CHANGED
@@ -1,20 +1,18 @@
|
|
|
|
|
|
|
|
|
|
1 |
import chainlit as cl
|
2 |
-
|
3 |
-
|
4 |
-
from langchain.embeddings import CacheBackedEmbeddings
|
5 |
-
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
6 |
-
from langchain.vectorstores import FAISS
|
7 |
-
from langchain.chains import RetrievalQA
|
8 |
-
from langchain.chat_models import ChatOpenAI
|
9 |
-
from langchain.storage import LocalFileStore
|
10 |
from langchain.prompts.chat import (
|
11 |
ChatPromptTemplate,
|
12 |
SystemMessagePromptTemplate,
|
13 |
HumanMessagePromptTemplate,
|
14 |
)
|
15 |
-
|
|
|
16 |
|
17 |
-
text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
|
18 |
|
19 |
# Please respond as if you were Ken from the movie Barbie. Ken is a well-meaning but naive character who loves to Beach. He talks like a typical Californian Beach Bro, but he doesn't use the word "Dude" so much.
|
20 |
# If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
@@ -54,25 +52,7 @@ async def init():
|
|
54 |
msg = cl.Message(content=f"Building Index...")
|
55 |
await msg.send()
|
56 |
|
57 |
-
|
58 |
-
loader = CSVLoader(file_path="./data/roaringkitty.csv", source_column="Link")
|
59 |
-
data = loader.load()
|
60 |
-
documents = text_splitter.transform_documents(data)
|
61 |
-
store = LocalFileStore("./cache/")
|
62 |
-
core_embeddings_model = OpenAIEmbeddings()
|
63 |
-
embedder = CacheBackedEmbeddings.from_bytes_store(
|
64 |
-
core_embeddings_model, store, namespace=core_embeddings_model.model
|
65 |
-
)
|
66 |
-
# make async docsearch
|
67 |
-
docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
|
68 |
-
|
69 |
-
chain = RetrievalQA.from_chain_type(
|
70 |
-
ChatOpenAI(model="gpt-4", temperature=0, streaming=True),
|
71 |
-
chain_type="stuff",
|
72 |
-
return_source_documents=True,
|
73 |
-
retriever=docsearch.as_retriever(),
|
74 |
-
chain_type_kwargs = {"prompt": prompt}
|
75 |
-
)
|
76 |
|
77 |
msg.content = f"Index built!"
|
78 |
await msg.send()
|
|
|
1 |
+
import sys
|
2 |
+
|
3 |
+
sys.path.append(".")
|
4 |
+
|
5 |
import chainlit as cl
|
6 |
+
|
7 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
8 |
from langchain.prompts.chat import (
|
9 |
ChatPromptTemplate,
|
10 |
SystemMessagePromptTemplate,
|
11 |
HumanMessagePromptTemplate,
|
12 |
)
|
13 |
+
from roaringkitty import roaringkiity_chain
|
14 |
+
|
15 |
|
|
|
16 |
|
17 |
# Please respond as if you were Ken from the movie Barbie. Ken is a well-meaning but naive character who loves to Beach. He talks like a typical Californian Beach Bro, but he doesn't use the word "Dude" so much.
|
18 |
# If you don't know the answer, just say that you don't know, don't try to make up an answer.
|
|
|
52 |
msg = cl.Message(content=f"Building Index...")
|
53 |
await msg.send()
|
54 |
|
55 |
+
chain = roaringkiity_chain()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
msg.content = f"Index built!"
|
58 |
await msg.send()
|