Update app.py
Browse files
app.py
CHANGED
@@ -57,8 +57,8 @@ def load_model(_docs):
|
|
57 |
|
58 |
model = AutoGPTQForCausalLM.from_quantized(
|
59 |
model_name_or_path,
|
60 |
-
|
61 |
-
revision="gptq-8bit-128g-actorder_True",
|
62 |
model_basename=model_basename,
|
63 |
use_safetensors=True,
|
64 |
trust_remote_code=True,
|
@@ -108,9 +108,9 @@ def load_model(_docs):
|
|
108 |
# SYSTEM_PROMPT = ("Use the following pieces of context to answer the question at the end. "
|
109 |
# "If you don't know the answer, just say that you don't know, "
|
110 |
# "don't try to make up an answer.")
|
111 |
-
SYSTEM_PROMPT = ("Use the following pieces of context along with general information you possess to answer the question at the end.
|
112 |
"If you don't know the answer, just say that you don't know, "
|
113 |
-
"don't try to make up an answer.")
|
114 |
|
115 |
template = generate_prompt("""{context} Question: {question} """,system_prompt=SYSTEM_PROMPT,) #Enter memory here!
|
116 |
prompt = PromptTemplate(template=template, input_variables=["context", "question"]) #Add history here
|
|
|
57 |
|
58 |
model = AutoGPTQForCausalLM.from_quantized(
|
59 |
model_name_or_path,
|
60 |
+
revision="gptq-8bit-128g-actorder_False",
|
61 |
+
#revision="gptq-8bit-128g-actorder_True",
|
62 |
model_basename=model_basename,
|
63 |
use_safetensors=True,
|
64 |
trust_remote_code=True,
|
|
|
108 |
# SYSTEM_PROMPT = ("Use the following pieces of context to answer the question at the end. "
|
109 |
# "If you don't know the answer, just say that you don't know, "
|
110 |
# "don't try to make up an answer.")
|
111 |
+
SYSTEM_PROMPT = ("Use the following pieces of context along with general information you possess to answer the question at the end."
|
112 |
"If you don't know the answer, just say that you don't know, "
|
113 |
+
"don't try to make up an answer.","Answer strictly what is asked and do not provide further information at all.")
|
114 |
|
115 |
template = generate_prompt("""{context} Question: {question} """,system_prompt=SYSTEM_PROMPT,) #Enter memory here!
|
116 |
prompt = PromptTemplate(template=template, input_variables=["context", "question"]) #Add history here
|