Add `max_length` to model config
#3
by
saattrupdan
- opened
- config.json +2 -1
config.json
CHANGED
@@ -32,5 +32,6 @@
|
|
32 |
"torch_dtype": "float32",
|
33 |
"transformers_version": "4.22.1",
|
34 |
"use_cache": true,
|
35 |
-
"vocab_size": 64000
|
|
|
36 |
}
|
|
|
32 |
"torch_dtype": "float32",
|
33 |
"transformers_version": "4.22.1",
|
34 |
"use_cache": true,
|
35 |
+
"vocab_size": 64000,
|
36 |
+
"max_length": 2048
|
37 |
}
|