Upload llama_cpp_python_streamingllm.py
Browse files
llama_cpp_python_streamingllm.py
CHANGED
@@ -155,7 +155,7 @@ class StreamingLLM(Llama):
|
|
155 |
mirostat_eta: float = 0.1,
|
156 |
mirostat_tau: float = 5.0,
|
157 |
penalize_nl: bool = True,
|
158 |
-
logits_processor
|
159 |
grammar: Optional[LlamaGrammar] = None,
|
160 |
):
|
161 |
last_n_tokens_data = [llama_cpp.llama_token(0)] * max(
|
|
|
155 |
mirostat_eta: float = 0.1,
|
156 |
mirostat_tau: float = 5.0,
|
157 |
penalize_nl: bool = True,
|
158 |
+
logits_processor=None,
|
159 |
grammar: Optional[LlamaGrammar] = None,
|
160 |
):
|
161 |
last_n_tokens_data = [llama_cpp.llama_token(0)] * max(
|