Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
Update app.py
Browse files
app.py
CHANGED
@@ -188,16 +188,15 @@ async def chat(query,history,sources,reports,subtype,year):
|
|
188 |
#callbacks = [StreamingStdOutCallbackHandler()]
|
189 |
llm_qa = HuggingFaceEndpoint(
|
190 |
endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
|
191 |
-
|
192 |
-
huggingfacehub_api_token=HF_token)
|
193 |
|
194 |
# create rag chain
|
195 |
chat_model = ChatHuggingFace(llm=llm_qa)
|
196 |
-
chain = chat_model | StrOutputParser()
|
197 |
###-------------------------- get answers ---------------------------------------
|
198 |
answer_lst = []
|
199 |
for question, context in zip(question_lst , context_retrieved_lst):
|
200 |
-
answer =
|
201 |
answer_lst.append(answer)
|
202 |
docs_html = []
|
203 |
for i, d in enumerate(context_retrieved, 1):
|
|
|
188 |
#callbacks = [StreamingStdOutCallbackHandler()]
|
189 |
llm_qa = HuggingFaceEndpoint(
|
190 |
endpoint_url="https://howaqfw0lpap12sg.us-east-1.aws.endpoints.huggingface.cloud",
|
191 |
+
max_new_tokens=1024,
|
192 |
+
huggingfacehub_api_token=HF_token,)
|
193 |
|
194 |
# create rag chain
|
195 |
chat_model = ChatHuggingFace(llm=llm_qa)
|
|
|
196 |
###-------------------------- get answers ---------------------------------------
|
197 |
answer_lst = []
|
198 |
for question, context in zip(question_lst , context_retrieved_lst):
|
199 |
+
answer = chat_model.invoke(messages)
|
200 |
answer_lst.append(answer)
|
201 |
docs_html = []
|
202 |
for i, d in enumerate(context_retrieved, 1):
|