File size: 1,811 Bytes
b5c0f22
 
 
 
 
4dc12d1
b5c0f22
72ca7b7
7027b4e
b5c0f22
 
6fbe222
b5c0f22
 
 
 
 
 
 
 
 
 
 
f18801c
c222354
e0058ba
 
b5c0f22
 
 
6181fef
 
b5c0f22
e0058ba
b5c0f22
 
 
 
e0058ba
b5c0f22
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
import json
import openai
import numpy as np
import getpass
import os
import gradio as gr

from langchain_openai import OpenAI
from langchain_community.vectorstores import Chroma
from langchain_community.document_loaders import TextLoader
from langchain.text_splitter import RecursiveCharacterTextSplitter, CharacterTextSplitter
from langchain_openai import OpenAIEmbeddings
from langchain.chains import ConversationalRetrievalChain

documents = []
loader = TextLoader("sentences.txt")
documents.extend(loader.load())

text_splitter = RecursiveCharacterTextSplitter(
    chunk_size = 1000,
    chunk_overlap = 150
)

os.environ["OPENAI_API_KEY"] = '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'

openai.api_key = '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'

# Recursive Splitting the whole text of emails into chunks
splits = text_splitter.split_documents(documents)

print(splits[0:50])

# Creating the Embeddings from the splits we created
embedding = OpenAIEmbeddings(openai_api_key=openai.api_key)

# Storing the Embeddings into ChromaDB
persist_directory = 'docs/chroma/'
vectordb = Chroma.from_documents(
    documents=splits[0:100],
    embedding=embedding,
    persist_directory=persist_directory
)

retriever = vectordb.as_retriever(search_type="similarity", search_kwargs={"k":2})
qa = ConversationalRetrievalChain.from_llm(OpenAI(temperature=0), retriever)

def respond(message, history):
    chat_history = []
    print(message)
    print(chat_history)
    # Getting the response from QA langchain 
    response = qa({"question": message, "chat_history": chat_history})

    # Append user messages and responses to chat history
    chat_history.append((message, response['answer']))
    print(chat_history)
    return response['answer']

gr.ChatInterface(respond).launch(debug=True)