Spaces:
Sleeping
Sleeping
h3110Fr13nd
commited on
Commit
•
67e94fc
1
Parent(s):
e93d5c0
main.py
Browse files
main.py
CHANGED
@@ -26,8 +26,8 @@ langchain.debug = True
|
|
26 |
|
27 |
dotenv.load_dotenv()
|
28 |
|
29 |
-
print(os.
|
30 |
-
print(os.
|
31 |
|
32 |
class GradioApp:
|
33 |
def __init__(self):
|
@@ -69,7 +69,7 @@ Answer:
|
|
69 |
)
|
70 |
self.db = Chroma(persist_directory="./pragetx_chroma", embedding_function=HuggingFaceEmbeddings(), collection_name="pragetx")
|
71 |
# self.llm = ChatOllama(model="phi3:3.8b", base_url="http://localhost:11434", num_gpu=16)
|
72 |
-
self.llm = HuggingChat(email = os.
|
73 |
self.chain = (
|
74 |
{"chat_history": self.chat_history, "context": self.db.as_retriever(search_kwargs={"k":3}), "question": RunnablePassthrough()} |
|
75 |
self.prompt |
|
|
|
26 |
|
27 |
dotenv.load_dotenv()
|
28 |
|
29 |
+
print(os.environ.get("HF_EMAIL"))
|
30 |
+
print(os.environ.get("HF_PASS"))
|
31 |
|
32 |
class GradioApp:
|
33 |
def __init__(self):
|
|
|
69 |
)
|
70 |
self.db = Chroma(persist_directory="./pragetx_chroma", embedding_function=HuggingFaceEmbeddings(), collection_name="pragetx")
|
71 |
# self.llm = ChatOllama(model="phi3:3.8b", base_url="http://localhost:11434", num_gpu=16)
|
72 |
+
self.llm = HuggingChat(email = os.environ.get("HF_EMAIL") , psw = os.environ.get("HF_PASS") )
|
73 |
self.chain = (
|
74 |
{"chat_history": self.chat_history, "context": self.db.as_retriever(search_kwargs={"k":3}), "question": RunnablePassthrough()} |
|
75 |
self.prompt |
|