Upload folder using huggingface_hub
Browse files- Test_RAG.py +13 -8
Test_RAG.py
CHANGED
@@ -613,17 +613,22 @@ def bot(history, temperature, top_p, top_k, repetition_penalty, hide_full_prompt
|
|
613 |
llm.pipeline._forward_params["stopping_criteria"] = StoppingCriteriaList(stop_tokens)
|
614 |
|
615 |
if do_rag:
|
616 |
-
t1 = Thread(target=rag_chain.invoke, args=({"input": history[-1][0]},))
|
|
|
|
|
617 |
else:
|
618 |
-
input_text = rag_prompt_template.format(input=history[-1][0], context="")
|
619 |
-
t1 = Thread(target=llm.invoke, args=(input_text,))
|
620 |
-
|
|
|
|
|
621 |
|
622 |
# Initialize an empty string to store the generated text
|
623 |
-
partial_text = ""
|
624 |
-
for new_text in streamer:
|
625 |
-
|
626 |
-
|
|
|
627 |
yield history
|
628 |
|
629 |
|
|
|
613 |
llm.pipeline._forward_params["stopping_criteria"] = StoppingCriteriaList(stop_tokens)
|
614 |
|
615 |
if do_rag:
|
616 |
+
# t1 = Thread(target=rag_chain.invoke, args=({"input": history[-1][0]},))
|
617 |
+
input_text = history[-1][0]
|
618 |
+
response = rag_chain.run(input_text)
|
619 |
else:
|
620 |
+
# input_text = rag_prompt_template.format(input=history[-1][0], context="")
|
621 |
+
# t1 = Thread(target=llm.invoke, args=(input_text,))
|
622 |
+
input_text = history[-1][0]
|
623 |
+
response = rag_chain.run(input_text)
|
624 |
+
# t1.start()
|
625 |
|
626 |
# Initialize an empty string to store the generated text
|
627 |
+
# partial_text = ""
|
628 |
+
# for new_text in streamer:
|
629 |
+
# partial_text = text_processor(partial_text, new_text)
|
630 |
+
# history[-1][1] = partial_text
|
631 |
+
history[-1][1] = response
|
632 |
yield history
|
633 |
|
634 |
|