LexDLubbers's picture
Create config.json
96d45d3 verified
raw
history blame
328 Bytes
{
"model_type": "mistral",
"base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.3",
"task": "CAUSAL_LM",
"peft_type": "LORA",
"adapter_config": "adapter_config.json",
"tokenizer_class": "MistralTokenizer",
"hidden_size": 4096,
"num_attention_heads": 32,
"num_hidden_layers": 28,
"vocab_size": 50257
}