{ "_name_or_path": "mistralai/Mistral-7B-v0.1", "architectures": [ "MistralForSequenceClassification" ], "attention_dropout": 0.0, "bos_token_id": 1, "eos_token_id": 2, "hidden_act": "silu", "hidden_size": 4096, "id2label": { "0": "loc1lay1", "1": "loc1lay2", "2": "loc1lay3", "3": "loc1lay4", "4": "loc2lay1", "5": "loc2lay2", "6": "loc2lay3", "7": "loc2lay4", "8": "loc3lay1", "9": "loc3lay2", "10": "loc3lay3", "11": "loc3lay4", "12": "loc4lay1", "13": "loc4lay2", "14": "loc4lay3", "15": "loc4lay4", "16": "loc5+", "17": "loc1", "18": "loc2", "19": "loc3", "20": "loc4", "21": "nfw", "22": "tfw", "23": "fwc", "24": "fwp", "25": "lay1", "26": "lay2", "27": "lay3", "28": "lay4" }, "initializer_range": 0.02, "intermediate_size": 14336, "label2id": { "fwc": 23, "fwp": 24, "lay1": 25, "lay2": 26, "lay3": 27, "lay4": 28, "loc1": 17, "loc1lay1": 0, "loc1lay2": 1, "loc1lay3": 2, "loc1lay4": 3, "loc2": 18, "loc2lay1": 4, "loc2lay2": 5, "loc2lay3": 6, "loc2lay4": 7, "loc3": 19, "loc3lay1": 8, "loc3lay2": 9, "loc3lay3": 10, "loc3lay4": 11, "loc4": 20, "loc4lay1": 12, "loc4lay2": 13, "loc4lay3": 14, "loc4lay4": 15, "loc5+": 16, "nfw": 21, "tfw": 22 }, "max_position_embeddings": 32768, "model_type": "mistral", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "rms_norm_eps": 1e-05, "rope_theta": 10000.0, "sliding_window": 4096, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.42.0.dev0", "use_cache": true, "vocab_size": 32000 }