added "attention_bias": false to config.json
Browse files- config.json +1 -0
config.json
CHANGED
@@ -133,5 +133,6 @@
|
|
133 |
"torch_dtype": "bfloat16",
|
134 |
"transformers_version": "4.40.2",
|
135 |
"use_cache": true,
|
|
|
136 |
"vocab_size": 32064
|
137 |
}
|
|
|
133 |
"torch_dtype": "bfloat16",
|
134 |
"transformers_version": "4.40.2",
|
135 |
"use_cache": true,
|
136 |
+
"attention_bias": false,
|
137 |
"vocab_size": 32064
|
138 |
}
|