File size: 328 Bytes
96d45d3 |
1 2 3 4 5 6 7 8 9 10 11 12 |
{
"model_type": "mistral",
"base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.3",
"task": "CAUSAL_LM",
"peft_type": "LORA",
"adapter_config": "adapter_config.json",
"tokenizer_class": "MistralTokenizer",
"hidden_size": 4096,
"num_attention_heads": 32,
"num_hidden_layers": 28,
"vocab_size": 50257
} |