Upload FalconForCausalLM
Browse files- config.json +1 -13
config.json
CHANGED
@@ -28,19 +28,7 @@
|
|
28 |
"num_hidden_layers": 32,
|
29 |
"num_kv_heads": 71,
|
30 |
"parallel_attn": true,
|
31 |
-
"
|
32 |
-
"bnb_4bit_compute_dtype": "bfloat16",
|
33 |
-
"bnb_4bit_quant_type": "nf4",
|
34 |
-
"bnb_4bit_use_double_quant": true,
|
35 |
-
"llm_int8_enable_fp32_cpu_offload": false,
|
36 |
-
"llm_int8_has_fp16_weight": false,
|
37 |
-
"llm_int8_skip_modules": null,
|
38 |
-
"llm_int8_threshold": 6.0,
|
39 |
-
"load_in_4bit": true,
|
40 |
-
"load_in_8bit": false,
|
41 |
-
"quant_method": "bitsandbytes"
|
42 |
-
},
|
43 |
-
"torch_dtype": "float16",
|
44 |
"transformers_version": "4.37.0.dev0",
|
45 |
"use_cache": true,
|
46 |
"vocab_size": 65024
|
|
|
28 |
"num_hidden_layers": 32,
|
29 |
"num_kv_heads": 71,
|
30 |
"parallel_attn": true,
|
31 |
+
"torch_dtype": "float32",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
32 |
"transformers_version": "4.37.0.dev0",
|
33 |
"use_cache": true,
|
34 |
"vocab_size": 65024
|