Removed unnecessary f in vllm prompt template
#2
by
shhossain
- opened
README.md
CHANGED
@@ -153,7 +153,7 @@ prompts = [
|
|
153 |
"What is 291 - 150?",
|
154 |
"How much wood would a woodchuck chuck if a woodchuck could chuck wood?",
|
155 |
]
|
156 |
-
prompt_template=
|
157 |
'''
|
158 |
|
159 |
prompts = [prompt_template.format(prompt=prompt) for prompt in prompts]
|
|
|
153 |
"What is 291 - 150?",
|
154 |
"How much wood would a woodchuck chuck if a woodchuck could chuck wood?",
|
155 |
]
|
156 |
+
prompt_template='''GPT4 Correct User: {prompt}<|end_of_turn|>GPT4 Correct Assistant:
|
157 |
'''
|
158 |
|
159 |
prompts = [prompt_template.format(prompt=prompt) for prompt in prompts]
|