michaelbenayoun HF staff commited on
Commit
cb8448f
1 Parent(s): 606afc3

Upload LlamaForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +5 -5
  2. generation_config.json +7 -0
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,16 +1,16 @@
1
  {
2
- "_name_or_path": "michaelbenayoun/llama-2-tiny-16layers-random",
3
  "architectures": [
4
- "LlamaModel"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
- "hidden_size": 32,
12
  "initializer_range": 0.02,
13
- "intermediate_size": 32,
14
  "is_decoder": true,
15
  "max_position_embeddings": 4096,
16
  "model_type": "llama",
@@ -24,7 +24,7 @@
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
- "transformers_version": "4.40.0.dev0",
28
  "use_cache": true,
29
  "vocab_size": 32000
30
  }
 
1
  {
2
+ "_name_or_path": "michaelbenayoun/llama-2-tiny-4kv-heads-4layers-random",
3
  "architectures": [
4
+ "LlamaForCausalLM"
5
  ],
6
  "attention_bias": false,
7
  "attention_dropout": 0.0,
8
  "bos_token_id": 1,
9
  "eos_token_id": 2,
10
  "hidden_act": "silu",
11
+ "hidden_size": 128,
12
  "initializer_range": 0.02,
13
+ "intermediate_size": 128,
14
  "is_decoder": true,
15
  "max_position_embeddings": 4096,
16
  "model_type": "llama",
 
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
  "torch_dtype": "float32",
27
+ "transformers_version": "4.40.0",
28
  "use_cache": true,
29
  "vocab_size": 32000
30
  }
generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "pad_token_id": 0,
6
+ "transformers_version": "4.40.0"
7
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fb426491748f8f503eb347e0b534e3ad8a3a2bf9b17ad7bb58f380b67b3a2999
3
- size 4186920
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20a817fc0897bdaf27e8a20fdfb8189c32c11d7ce158324a8c7c5fcc9dadcd50
3
+ size 34153080