|
{ |
|
"_name_or_path": "meta-llama/Llama-3.2-1B", |
|
"architectures": [ |
|
"LlamaForSequenceClassification" |
|
], |
|
"attention_bias": false, |
|
"attention_dropout": 0.0, |
|
"bos_token_id": 128000, |
|
"eos_token_id": 128001, |
|
"head_dim": 64, |
|
"hidden_act": "silu", |
|
"hidden_size": 2048, |
|
"id2label": { |
|
"0": "0", |
|
"1": "1", |
|
"2": "2", |
|
"3": "3", |
|
"4": "4", |
|
"5": "5", |
|
"6": "6", |
|
"7": "7", |
|
"8": "8", |
|
"9": "9", |
|
"10": "10", |
|
"11": "11", |
|
"12": "12", |
|
"13": "13", |
|
"14": "14", |
|
"15": "15", |
|
"16": "16", |
|
"17": "17", |
|
"18": "18", |
|
"19": "19", |
|
"20": "20", |
|
"21": "21", |
|
"22": "22", |
|
"23": "23", |
|
"24": "24", |
|
"25": "25", |
|
"26": "26", |
|
"27": "27", |
|
"28": "28" |
|
}, |
|
"initializer_range": 0.02, |
|
"intermediate_size": 8192, |
|
"label2id": { |
|
"0": 0, |
|
"1": 1, |
|
"2": 2, |
|
"3": 3, |
|
"4": 4, |
|
"5": 5, |
|
"6": 6, |
|
"7": 7, |
|
"8": 8, |
|
"9": 9, |
|
"10": 10, |
|
"11": 11, |
|
"12": 12, |
|
"13": 13, |
|
"14": 14, |
|
"15": 15, |
|
"16": 16, |
|
"17": 17, |
|
"18": 18, |
|
"19": 19, |
|
"20": 20, |
|
"21": 21, |
|
"22": 22, |
|
"23": 23, |
|
"24": 24, |
|
"25": 25, |
|
"26": 26, |
|
"27": 27, |
|
"28": 28 |
|
}, |
|
"max_position_embeddings": 131072, |
|
"mlp_bias": false, |
|
"model_type": "llama", |
|
"num_attention_heads": 32, |
|
"num_hidden_layers": 16, |
|
"num_key_value_heads": 8, |
|
"pad_token_id": 128001, |
|
"pretraining_tp": 1, |
|
"problem_type": "single_label_classification", |
|
"rms_norm_eps": 1e-05, |
|
"rope_scaling": { |
|
"factor": 32.0, |
|
"high_freq_factor": 4.0, |
|
"low_freq_factor": 1.0, |
|
"original_max_position_embeddings": 8192, |
|
"rope_type": "llama3" |
|
}, |
|
"rope_theta": 500000.0, |
|
"tie_word_embeddings": true, |
|
"torch_dtype": "bfloat16", |
|
"transformers_version": "4.48.1", |
|
"use_cache": true, |
|
"vocab_size": 128256 |
|
} |
|
|