update alpaca_chat prompts for instructions to explainn the conversation
Browse files
src/axolotl/prompt_strategies/alpaca_chat.py
CHANGED
@@ -20,11 +20,24 @@ def load(tokenizer, cfg):
|
|
20 |
|
21 |
class AlpacaConcisePrompter(AlpacaPrompter):
|
22 |
"""
|
23 |
-
Alpaca Prompter extending the system prompt to ask for concise answers
|
24 |
"""
|
25 |
|
26 |
-
system_prompt = "Below is an instruction that describes a task, paired with an input that provides further context.
|
27 |
-
system_no_input_prompt = "Below is an instruction that describes a task.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
28 |
|
29 |
|
30 |
class AlpacaQAPromptTokenizingStrategy(InstructionPromptTokenizingStrategy):
|
@@ -64,7 +77,7 @@ def load_concise(tokenizer, cfg):
|
|
64 |
|
65 |
def load_qa(tokenizer, cfg):
|
66 |
return AlpacaQAPromptTokenizingStrategy(
|
67 |
-
|
68 |
tokenizer,
|
69 |
cfg.train_on_inputs,
|
70 |
cfg.sequence_len,
|
@@ -73,7 +86,7 @@ def load_qa(tokenizer, cfg):
|
|
73 |
|
74 |
def load_camel_ai(tokenizer, cfg):
|
75 |
return CamelAIPromptTokenizingStrategy(
|
76 |
-
|
77 |
tokenizer,
|
78 |
cfg.train_on_inputs,
|
79 |
cfg.sequence_len,
|
|
|
20 |
|
21 |
class AlpacaConcisePrompter(AlpacaPrompter):
|
22 |
"""
|
23 |
+
Alpaca Prompter extending the system prompt to ask for concise chat-instruct answers
|
24 |
"""
|
25 |
|
26 |
+
system_prompt = "Below is an instruction from a USER that describes a task, paired with an input that provides further context. The ASSISTANT writes a response that concisely and appropriately completes the request.\n\n"
|
27 |
+
system_no_input_prompt = "Below is an instruction from a USER that describes a task. The ASSISTANT writes a response that appropriately and concisely completes the request.\n\n"
|
28 |
+
|
29 |
+
|
30 |
+
class AlpacaChatPrompter(AlpacaPrompter):
|
31 |
+
"""
|
32 |
+
Alpaca Chat Prompter extending the system prompt to for chat-instruct answers
|
33 |
+
"""
|
34 |
+
|
35 |
+
system_prompt = "Below is an instruction from a USER that describes a task, paired with an input that provides further context. The ASSISTANT writes a response that concisely and appropriately completes the request.\n\n"
|
36 |
+
system_no_input_prompt = "Below is an instruction from a USER that describes a task. The ASSISTANT writes a response that appropriately and concisely completes the request.\n\n"
|
37 |
+
|
38 |
+
def __init__(self): # pylint: disable=super-init-not-called
|
39 |
+
self.prompt_style = PromptStyle.CHAT.value
|
40 |
+
self.match_prompt_style()
|
41 |
|
42 |
|
43 |
class AlpacaQAPromptTokenizingStrategy(InstructionPromptTokenizingStrategy):
|
|
|
77 |
|
78 |
def load_qa(tokenizer, cfg):
|
79 |
return AlpacaQAPromptTokenizingStrategy(
|
80 |
+
AlpacaChatPrompter(),
|
81 |
tokenizer,
|
82 |
cfg.train_on_inputs,
|
83 |
cfg.sequence_len,
|
|
|
86 |
|
87 |
def load_camel_ai(tokenizer, cfg):
|
88 |
return CamelAIPromptTokenizingStrategy(
|
89 |
+
AlpacaChatPrompter(),
|
90 |
tokenizer,
|
91 |
cfg.train_on_inputs,
|
92 |
cfg.sequence_len,
|