|
{ |
|
"_name_or_path": "EleutherAI/pythia-12b", |
|
"architectures": [ |
|
"GPTNeoXForCausalLM" |
|
], |
|
"attention_bias": true, |
|
"attention_dropout": 0.0, |
|
"bos_token_id": 0, |
|
"classifier_dropout": 0.1, |
|
"eos_token_id": 0, |
|
"hidden_act": "gelu", |
|
"hidden_dropout": 0.0, |
|
"hidden_size": 5120, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 20480, |
|
"layer_norm_eps": 1e-05, |
|
"max_position_embeddings": 2048, |
|
"model_type": "gpt_neox", |
|
"num_attention_heads": 40, |
|
"num_hidden_layers": 36, |
|
"partial_rotary_factor": 0.25, |
|
"quantization_config": { |
|
"amp": false, |
|
"autoround_version": "0.4.1", |
|
"batch_size": 4, |
|
"bits": 4, |
|
"damp_percent": 0.01, |
|
"data_type": "int", |
|
"desc_act": false, |
|
"enable_minmax_tuning": true, |
|
"enable_norm_bias_tuning": false, |
|
"enable_quanted_input": true, |
|
"gradient_accumulate_steps": 1, |
|
"group_size": 128, |
|
"iters": 200, |
|
"low_gpu_mem_usage": false, |
|
"lr": 0.005, |
|
"minmax_lr": 0.005, |
|
"nsamples": 128, |
|
"quant_method": "gptq", |
|
"scale_dtype": "torch.float16", |
|
"seqlen": 512, |
|
"sym": false, |
|
"to_quant_block_names": [ |
|
[ |
|
"gpt_neox.layers.0", |
|
"gpt_neox.layers.1", |
|
"gpt_neox.layers.2", |
|
"gpt_neox.layers.3", |
|
"gpt_neox.layers.4", |
|
"gpt_neox.layers.5", |
|
"gpt_neox.layers.6", |
|
"gpt_neox.layers.7", |
|
"gpt_neox.layers.8", |
|
"gpt_neox.layers.9", |
|
"gpt_neox.layers.10", |
|
"gpt_neox.layers.11", |
|
"gpt_neox.layers.12", |
|
"gpt_neox.layers.13", |
|
"gpt_neox.layers.14", |
|
"gpt_neox.layers.15", |
|
"gpt_neox.layers.16", |
|
"gpt_neox.layers.17", |
|
"gpt_neox.layers.18", |
|
"gpt_neox.layers.19", |
|
"gpt_neox.layers.20", |
|
"gpt_neox.layers.21", |
|
"gpt_neox.layers.22", |
|
"gpt_neox.layers.23", |
|
"gpt_neox.layers.24", |
|
"gpt_neox.layers.25", |
|
"gpt_neox.layers.26", |
|
"gpt_neox.layers.27", |
|
"gpt_neox.layers.28", |
|
"gpt_neox.layers.29", |
|
"gpt_neox.layers.30", |
|
"gpt_neox.layers.31", |
|
"gpt_neox.layers.32", |
|
"gpt_neox.layers.33", |
|
"gpt_neox.layers.34", |
|
"gpt_neox.layers.35" |
|
] |
|
], |
|
"true_sequential": false |
|
}, |
|
"rope_scaling": null, |
|
"rope_theta": 10000, |
|
"rotary_emb_base": 10000, |
|
"rotary_pct": 0.25, |
|
"tie_word_embeddings": false, |
|
"torch_dtype": "float32", |
|
"transformers_version": "4.47.0", |
|
"use_cache": true, |
|
"use_parallel_residual": true, |
|
"vocab_size": 50688 |
|
} |
|
|