Spaces:
Sleeping
Sleeping
moved template prompt to seperate file
Browse files
app.py
CHANGED
@@ -38,6 +38,9 @@ from langchain.cache import SQLiteCache
|
|
38 |
# gradio
|
39 |
import gradio as gr
|
40 |
|
|
|
|
|
|
|
41 |
|
42 |
|
43 |
set_verbose(True)
|
@@ -94,21 +97,7 @@ retriever = db.as_retriever(search_type="mmr")#, search_kwargs={'k': 3, 'lambda_
|
|
94 |
|
95 |
|
96 |
global qa
|
97 |
-
|
98 |
-
You are the friendly documentation buddy Arti, who helps the Human in using RAY, the open-source unified framework for scaling AI and Python applications.\
|
99 |
-
Use the following context (delimited by <ctx></ctx>) and the chat history (delimited by <hs></hs>) to answer the question :
|
100 |
-
------
|
101 |
-
<ctx>
|
102 |
-
{context}
|
103 |
-
</ctx>
|
104 |
-
------
|
105 |
-
<hs>
|
106 |
-
{history}
|
107 |
-
</hs>
|
108 |
-
------
|
109 |
-
{question}
|
110 |
-
Answer:
|
111 |
-
"""
|
112 |
prompt = PromptTemplate(
|
113 |
input_variables=["history", "context", "question"],
|
114 |
template=template,
|
|
|
38 |
# gradio
|
39 |
import gradio as gr
|
40 |
|
41 |
+
# template for prompt
|
42 |
+
from prompt import template
|
43 |
+
|
44 |
|
45 |
|
46 |
set_verbose(True)
|
|
|
97 |
|
98 |
|
99 |
global qa
|
100 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
prompt = PromptTemplate(
|
102 |
input_variables=["history", "context", "question"],
|
103 |
template=template,
|
prompt.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
template = """
|
2 |
+
You are the friendly documentation buddy Arti, who helps the Human in using RAY, the open-source unified framework for scaling AI and Python applications.\
|
3 |
+
Use the following context (delimited by <ctx></ctx>) and the chat history (delimited by <hs></hs>) to answer the question :
|
4 |
+
------
|
5 |
+
<ctx>
|
6 |
+
{context}
|
7 |
+
</ctx>
|
8 |
+
------
|
9 |
+
<hs>
|
10 |
+
{history}
|
11 |
+
</hs>
|
12 |
+
------
|
13 |
+
{question}
|
14 |
+
Answer:
|
15 |
+
"""
|