dtrejopizzo commited on
Commit
f2d6127
·
1 Parent(s): 721ab63

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -12
app.py CHANGED
@@ -4,8 +4,6 @@ import gradio as gr
4
  import sys
5
  import os
6
 
7
- #from langchain.chat_models import ChatOpenAI
8
-
9
  os.environ["OPENAI_API_KEY"]
10
 
11
  def construct_index(directory_path):
@@ -16,21 +14,20 @@ def construct_index(directory_path):
16
 
17
  prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
18
 
19
- llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=1.0, model_name="gpt-4", max_tokens=num_outputs))
20
 
21
  documents = SimpleDirectoryReader(directory_path).load_data()
22
-
23
- index = GPTVectorStoreIndex(documents, llm_predictor=llm_predictor, prompt_helper=prompt_helper)
24
-
25
- #index.save_to_disk('index.json')
26
- index.storage_context.persist(persist_dir="index.json")
27
 
28
  return index
29
 
30
  def chatbot(input_text):
31
- #query_engine = index.as_query_engine()
32
- index = GPTVectorStoreIndex.load_from_disk('index.json')
33
- response = index.query(input_text, response_mode="compact")
34
  return response.response
35
 
36
  iface = gr.Interface(fn=chatbot,
@@ -38,5 +35,8 @@ iface = gr.Interface(fn=chatbot,
38
  outputs="text",
39
  title="Demo Galicia")
40
 
41
- index = construct_index("docs")
 
 
 
42
  iface.launch(share=True, debug=True)
 
4
  import sys
5
  import os
6
 
 
 
7
  os.environ["OPENAI_API_KEY"]
8
 
9
  def construct_index(directory_path):
 
14
 
15
  prompt_helper = PromptHelper(max_input_size, num_outputs, max_chunk_overlap, chunk_size_limit=chunk_size_limit)
16
 
17
+ llm_predictor = LLMPredictor(llm=ChatOpenAI(temperature=0.7, model_name="gpt-4", max_tokens=num_outputs))
18
 
19
  documents = SimpleDirectoryReader(directory_path).load_data()
20
+
21
+ index = GPTVectorStoreIndex(documents)
22
+
23
+ index.storage_context.persist()
 
24
 
25
  return index
26
 
27
  def chatbot(input_text):
28
+ query_engine = index.as_query_engine()
29
+ #index = GPTVectorStoreIndex.load_from_disk('index.json')
30
+ response = query_engine.query(input_text, response_mode="compact")
31
  return response.response
32
 
33
  iface = gr.Interface(fn=chatbot,
 
35
  outputs="text",
36
  title="Demo Galicia")
37
 
38
+ # rebuild storage context
39
+ storage_context = StorageContext.from_defaults(persist_dir='./storage')
40
+ # load index
41
+ index = load_index_from_storage(storage_context)
42
  iface.launch(share=True, debug=True)