abhinavkulkarni
commited on
Commit
•
b8dc80e
1
Parent(s):
987b81f
Upload config
Browse files- config.json +1 -1
config.json
CHANGED
@@ -22,7 +22,7 @@
|
|
22 |
"n_head_kv": 8,
|
23 |
"n_layer": 60,
|
24 |
"parallel_attn": true,
|
25 |
-
"torch_dtype": "
|
26 |
"transformers_version": "4.33.1",
|
27 |
"use_cache": true,
|
28 |
"vocab_size": 65024
|
|
|
22 |
"n_head_kv": 8,
|
23 |
"n_layer": 60,
|
24 |
"parallel_attn": true,
|
25 |
+
"torch_dtype": "float16",
|
26 |
"transformers_version": "4.33.1",
|
27 |
"use_cache": true,
|
28 |
"vocab_size": 65024
|