Spaces:
Runtime error
Runtime error
import os | |
import gradio as gr | |
import pinecone | |
from llama_index import GPTIndexMemory, GPTPineconeIndex | |
from langchain.agents import Tool | |
from langchain.chains.conversation.memory import ConversationBufferMemory | |
from langchain import OpenAI | |
from langchain.agents import initialize_agent | |
OPENAI_API_KEY=os.environ["OPENAI_API_KEY"] | |
PINECONE_API_KEY=os.environ["PINECONE_API_KEY"] | |
pinecone.init(api_key=PINECONE_API_KEY, environment="us-east1-gcp") | |
pindex=pinecone.Index("sejarah") | |
indexed_pinecone=GPTPineconeIndex([], pinecone_index=pindex) | |
tools = [ | |
Tool( | |
name = "GPT Index", | |
func=lambda q: str(indexed_pinecone.query(q)), | |
description="useful for when you want to answer questions about the author. The input to this tool should be a complete english sentence.", | |
return_direct=True | |
) | |
] | |
memory = GPTIndexMemory(index=indexed_pinecone, memory_key="chat_history", query_kwargs={"response_mode": "compact"}) | |
llm=OpenAI(temperature=0, model_name="gpt-3.5-turbo") | |
agent_chain = initialize_agent(tools, llm, agent="conversational-react-description", memory=memory, verbose=True) | |
def predict(input, history=[]): | |
response = agent_chain.run(input) | |
history = history + [(input, response)] | |
response = history | |
# response = [response] | |
# return response, response | |
return response, response | |
with gr.Blocks() as demo: | |
chatbot = gr.Chatbot() | |
state = gr.State([]) | |
with gr.Row(): | |
txt = gr.Textbox(show_label=False, placeholder="Enter text and press enter").style(container=False) | |
txt.submit(predict, [txt, state], [chatbot, state]) | |
# txt.submit(agent_executor.run, [txt, state], [chatbot, state]) | |
demo.launch() |