mlara commited on
Commit
2a15010
1 Parent(s): fc7a3d4
Files changed (1) hide show
  1. app.py +7 -4
app.py CHANGED
@@ -14,7 +14,7 @@ load_dotenv()
14
  # Add path to the root of the repo to the system path
15
  sys.path.append(".")
16
 
17
- from rag import retrieval_augmented_qa_pipeline
18
 
19
  # ChatOpenAI Templates
20
  system_template = """You are a helpful assistant who always speaks in a pleasant tone!
@@ -48,11 +48,14 @@ async def main(message: cl.Message):
48
 
49
  print(message.content)
50
 
51
-
52
- response = retrieval_augmented_qa_pipeline(client).run_pipeline(message.content)
 
 
 
 
53
 
54
  msg = cl.Message(content="")
55
- msg.stream_token(response)
56
 
57
  # Update the prompt object with the completion
58
  msg.prompt = response
 
14
  # Add path to the root of the repo to the system path
15
  sys.path.append(".")
16
 
17
+ from rag import _build_vector_db, retrieval_augmented_qa_pipeline, RetrievalAugmentedQAPipeline
18
 
19
  # ChatOpenAI Templates
20
  system_template = """You are a helpful assistant who always speaks in a pleasant tone!
 
48
 
49
  print(message.content)
50
 
51
+ vector_db = _build_vector_db()
52
+ pipeline = RetrievalAugmentedQAPipeline(
53
+ llm=client,
54
+ vector_db_retriever=vector_db
55
+ )
56
+ response = pipeline.run_pipeline(message.content)
57
 
58
  msg = cl.Message(content="")
 
59
 
60
  # Update the prompt object with the completion
61
  msg.prompt = response