Spaces:
Runtime error
Runtime error
File size: 2,991 Bytes
f7efcdd 4bdd907 f7efcdd 5b1a6a8 f7efcdd |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 |
from langchain.embeddings.openai import OpenAIEmbeddings
from langchain.vectorstores import Chroma
from langchain.text_splitter import CharacterTextSplitter
from langchain.chains.question_answering import load_qa_chain
from langchain.llms import OpenAI
import os
from glob import glob
import shutil
import gradio as gr
import git
git.Repo.clone_from("https://github.com/TheMITTech/shakespeare", "shakespeare")
files = glob("./shakespeare/**/*.html")
os.mkdir('./data')
destination_folder = './data/'
for html_file in files:
shutil.move(html_file, destination_folder + html_file.split("/")[-1])
from langchain.document_loaders import BSHTMLLoader, DirectoryLoader
bshtml_dir_loader = DirectoryLoader('./data/', loader_cls=BSHTMLLoader)
data = bshtml_dir_loader.load()
from langchain.text_splitter import RecursiveCharacterTextSplitter
text_splitter = RecursiveCharacterTextSplitter(
chunk_size = 1000,
chunk_overlap = 20,
length_function = len,
)
documents = text_splitter.split_documents(data)
from langchain.embeddings.openai import OpenAIEmbeddings
embeddings = OpenAIEmbeddings()
from chromadb.segment.impl.vector.local_hnsw import HnswParams
from langchain.vectorstores import Chroma
persist_directory = "vector_db"
vectordb = Chroma.from_documents(documents=documents, embedding=embeddings, persist_directory=persist_directory)
vectordb.persist()
vectordb = None
vectordb = Chroma(persist_directory=persist_directory, embedding_function=embeddings)
from langchain.chat_models import ChatOpenAI
llm = ChatOpenAI(temperature=0, model="gpt-4")
doc_retriever = vectordb.as_retriever()
from langchain.chains import RetrievalQA
shakespeare_qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=doc_retriever)
from langchain.utilities import SerpAPIWrapper
search = SerpAPIWrapper()
from langchain.agents import initialize_agent, Tool
from langchain.agents import AgentType
from langchain.tools import BaseTool
from langchain.llms import OpenAI
from langchain import LLMMathChain, SerpAPIWrapper
tools = [
Tool(
name = "Shakespeare QA System",
func=shakespeare_qa.run,
description="useful for when you need to answer questions about Shakespeare's works. Input should be a fully formed question."
),
Tool(
name = "SERP API Search",
func=search.run,
description="useful for when you need to answer questions about ruff (a python linter). Input should be a fully formed question."
),
]
agent = initialize_agent(tools, llm, agent=AgentType.ZERO_SHOT_REACT_DESCRIPTION, verbose=True)
def make_inference(query):
return(agent.run(query))
if __name__ == "__main__":
gr.Interface(
make_inference,
[
gr.inputs.Textbox(lines=2, label="Query"),
],
gr.outputs.Textbox(label="Response"),
title="Shakespeare",
description="Shakespeare is a tool that allows you to ask questions about Shakespeare.",
).launch() |