Files changed (1) hide show
  1. config.json +5 -1
config.json CHANGED
@@ -3,6 +3,10 @@
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
 
 
 
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 128000,
@@ -25,4 +29,4 @@
25
  "transformers_version": "4.40.2",
26
  "use_cache": false,
27
  "vocab_size": 128256
28
- }
 
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
6
+ "auto_map": {
7
+ "AutoModelForCausalLM": "modeling_llama3.LlamaForConditionalGeneration",
8
+ "LlamaForCausalLM": "modeling_llama3.LlamaForConditionalGeneration"
9
+ },
10
  "attention_bias": false,
11
  "attention_dropout": 0.0,
12
  "bos_token_id": 128000,
 
29
  "transformers_version": "4.40.2",
30
  "use_cache": false,
31
  "vocab_size": 128256
32
+ }