Upload Qwen2ForCausalLM
Browse files- config.json +11 -0
config.json
CHANGED
@@ -20,6 +20,17 @@
|
|
20 |
"rope_scaling": null,
|
21 |
"rope_theta": 1000000.0,
|
22 |
"sliding_window": null,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
23 |
"tie_word_embeddings": true,
|
24 |
"torch_dtype": "bfloat16",
|
25 |
"transformers_version": "4.46.3",
|
|
|
20 |
"rope_scaling": null,
|
21 |
"rope_theta": 1000000.0,
|
22 |
"sliding_window": null,
|
23 |
+
"step_separator_ids": [
|
24 |
+
[
|
25 |
+
3407
|
26 |
+
],
|
27 |
+
[
|
28 |
+
2533
|
29 |
+
],
|
30 |
+
[
|
31 |
+
271
|
32 |
+
]
|
33 |
+
],
|
34 |
"tie_word_embeddings": true,
|
35 |
"torch_dtype": "bfloat16",
|
36 |
"transformers_version": "4.46.3",
|