Safetensors
English
HXLee commited on
Commit
9c1d5dc
1 Parent(s): f26c65e

upload model files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. Llama-2-13b-w2a16g128/config.json +28 -0
  2. Llama-2-13b-w2a16g128/model-00001-of-00007.safetensors +3 -0
  3. Llama-2-13b-w2a16g128/model-00002-of-00007.safetensors +3 -0
  4. Llama-2-13b-w2a16g128/model-00003-of-00007.safetensors +3 -0
  5. Llama-2-13b-w2a16g128/model-00004-of-00007.safetensors +3 -0
  6. Llama-2-13b-w2a16g128/model-00005-of-00007.safetensors +3 -0
  7. Llama-2-13b-w2a16g128/model-00006-of-00007.safetensors +3 -0
  8. Llama-2-13b-w2a16g128/model-00007-of-00007.safetensors +3 -0
  9. Llama-2-13b-w2a16g128/model.safetensors.index.json +0 -0
  10. Llama-2-13b-w2a16g128/special_tokens_map.json +30 -0
  11. Llama-2-13b-w2a16g128/tokenizer.model +3 -0
  12. Llama-2-13b-w2a16g128/tokenizer_config.json +41 -0
  13. Llama-2-13b-w2a16g64/config.json +28 -0
  14. Llama-2-13b-w2a16g64/model-00001-of-00007.safetensors +3 -0
  15. Llama-2-13b-w2a16g64/model-00002-of-00007.safetensors +3 -0
  16. Llama-2-13b-w2a16g64/model-00003-of-00007.safetensors +3 -0
  17. Llama-2-13b-w2a16g64/model-00004-of-00007.safetensors +3 -0
  18. Llama-2-13b-w2a16g64/model-00005-of-00007.safetensors +3 -0
  19. Llama-2-13b-w2a16g64/model-00006-of-00007.safetensors +3 -0
  20. Llama-2-13b-w2a16g64/model-00007-of-00007.safetensors +3 -0
  21. Llama-2-13b-w2a16g64/model.safetensors.index.json +0 -0
  22. Llama-2-13b-w2a16g64/special_tokens_map.json +30 -0
  23. Llama-2-13b-w2a16g64/tokenizer.model +3 -0
  24. Llama-2-13b-w2a16g64/tokenizer_config.json +41 -0
  25. Llama-2-13b-w3a16/config.json +28 -0
  26. Llama-2-13b-w3a16/model-00001-of-00007.safetensors +3 -0
  27. Llama-2-13b-w3a16/model-00002-of-00007.safetensors +3 -0
  28. Llama-2-13b-w3a16/model-00003-of-00007.safetensors +3 -0
  29. Llama-2-13b-w3a16/model-00004-of-00007.safetensors +3 -0
  30. Llama-2-13b-w3a16/model-00005-of-00007.safetensors +3 -0
  31. Llama-2-13b-w3a16/model-00006-of-00007.safetensors +3 -0
  32. Llama-2-13b-w3a16/model-00007-of-00007.safetensors +3 -0
  33. Llama-2-13b-w3a16/model.safetensors.index.json +0 -0
  34. Llama-2-13b-w3a16/special_tokens_map.json +30 -0
  35. Llama-2-13b-w3a16/tokenizer.model +3 -0
  36. Llama-2-13b-w3a16/tokenizer_config.json +41 -0
  37. Llama-2-13b-w3a16g128/config.json +28 -0
  38. Llama-2-13b-w3a16g128/model-00001-of-00007.safetensors +3 -0
  39. Llama-2-13b-w3a16g128/model-00002-of-00007.safetensors +3 -0
  40. Llama-2-13b-w3a16g128/model-00003-of-00007.safetensors +3 -0
  41. Llama-2-13b-w3a16g128/model-00004-of-00007.safetensors +3 -0
  42. Llama-2-13b-w3a16g128/model-00005-of-00007.safetensors +3 -0
  43. Llama-2-13b-w3a16g128/model-00006-of-00007.safetensors +3 -0
  44. Llama-2-13b-w3a16g128/model-00007-of-00007.safetensors +3 -0
  45. Llama-2-13b-w3a16g128/model.safetensors.index.json +0 -0
  46. Llama-2-13b-w3a16g128/special_tokens_map.json +30 -0
  47. Llama-2-13b-w3a16g128/tokenizer.model +3 -0
  48. Llama-2-13b-w3a16g128/tokenizer_config.json +41 -0
  49. Llama-2-13b-w4a16/config.json +28 -0
  50. Llama-2-13b-w4a16/model-00001-of-00007.safetensors +3 -0
Llama-2-13b-w2a16g128/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/bn/mayuexiao/hf_weights/llama2/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.35.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
Llama-2-13b-w2a16g128/model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:13e1ec84c74eda2973fdfe2310b4af0ae004cfcd2f36c3534ba146cefab41576
3
+ size 4983326192
Llama-2-13b-w2a16g128/model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901ea53c22c730810d1faed44a91d547193df44c480de09e372d0941f53db3aa
3
+ size 4980494112
Llama-2-13b-w2a16g128/model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f3d08cead5236729a4199f64225bc5c47720ff149b68793b532b7216c4c35fe
3
+ size 4957591640
Llama-2-13b-w2a16g128/model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:abe3ad789173fcda3b77cd5a0b96f034a02172f682942df6c9164ab6f5a8bf17
3
+ size 4994854504
Llama-2-13b-w2a16g128/model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c479e6e2d5dd1daebf6091e54ed780884424fe4636aa4d73d742d9a9b492ed52
3
+ size 4889944904
Llama-2-13b-w2a16g128/model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3dc28263baee05193562cbac2550f8240ec1e672d2a88b1c2c12fc16e44b4336
3
+ size 4957601976
Llama-2-13b-w2a16g128/model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6b04d678a6ce75afbfa55e0fc3b90694b3a2cb18d55fa6758961739ca569bc86
3
+ size 863955112
Llama-2-13b-w2a16g128/model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
Llama-2-13b-w2a16g128/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
Llama-2-13b-w2a16g128/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
Llama-2-13b-w2a16g128/tokenizer_config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": true,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "bos_token": "<s>",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": false,
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<unk>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
41
+ }
Llama-2-13b-w2a16g64/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/bn/mayuexiao/hf_weights/llama2/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.35.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
Llama-2-13b-w2a16g64/model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8963eb88a45d429bb8c0265b7d091c680ce97d6b0b517244ae3a17ab00363bd0
3
+ size 4991179816
Llama-2-13b-w2a16g64/model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2a05a736f91f2f7f41eefb2839347375cfdc7a8155e6f269d433fa4d17b3539
3
+ size 4955480808
Llama-2-13b-w2a16g64/model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:debf1cb0cadd37090637c7711f4c6de37536f18e13488499b424086b4e804883
3
+ size 4968486648
Llama-2-13b-w2a16g64/model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87c089d280de0cb7ced12a0ec319be821995148e3595aa6672708f112214895c
3
+ size 4901403016
Llama-2-13b-w2a16g64/model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a6dafc77f9a34cf0172b8266c1144abfd8e1273b0e09d112ef421cbce3b9ecd
3
+ size 4968486648
Llama-2-13b-w2a16g64/model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b097d99196b5ba68e461c4a86ec33e471770898356b2ccee0f9ab8b10f83dd39
3
+ size 4901403016
Llama-2-13b-w2a16g64/model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:12d311714c99152ea8b25f8228b36139d3b0fd4e466bab8d4fc6cf62497d992b
3
+ size 1337821536
Llama-2-13b-w2a16g64/model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
Llama-2-13b-w2a16g64/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
Llama-2-13b-w2a16g64/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
Llama-2-13b-w2a16g64/tokenizer_config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": true,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "bos_token": "<s>",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": false,
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<unk>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
41
+ }
Llama-2-13b-w3a16/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/bn/mayuexiao/hf_weights/llama2/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.35.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
Llama-2-13b-w3a16/model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1e6df9d5507c231935aab9efcc7cacdeebe0dcd377d6fb76c4956b518cc06483
3
+ size 4975193912
Llama-2-13b-w3a16/model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:659dd0e677dcbc99b32f8fcb2da00be89c69bbcb1a130415ec90d2e3484665f9
3
+ size 4967434072
Llama-2-13b-w3a16/model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4b2c065798233d6a8a93841c60b9ef0482e2e442d9849cef1ee6300f98db18b3
3
+ size 4930723440
Llama-2-13b-w3a16/model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:409aab8209e0c87b6f2ecb89f229439c5fa7789088023c3cc8613e6371c64488
3
+ size 4894054424
Llama-2-13b-w3a16/model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9fcc90d38bd83b7946dccb601d5c80aea34c967ea4472e28fd266b09da4c6c1
3
+ size 4967434216
Llama-2-13b-w3a16/model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ccc9c82822fdb258ab5aeb7b9c58253877e9b518d17cc5d94668555cac645817
3
+ size 4930723440
Llama-2-13b-w3a16/model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6a01bd2a7fe36789bcd8fdafab01ee64b84967f980201a2c57bbc1070c609d71
3
+ size 574230872
Llama-2-13b-w3a16/model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
Llama-2-13b-w3a16/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
Llama-2-13b-w3a16/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
Llama-2-13b-w3a16/tokenizer_config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": true,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "bos_token": "<s>",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": false,
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<unk>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
41
+ }
Llama-2-13b-w3a16g128/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/bn/mayuexiao/hf_weights/llama2/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.35.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
Llama-2-13b-w3a16g128/model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f62cfa7fd30adc908bef0dc285e07378f987b24f5de8ae3c334d38c313a07f4c
3
+ size 4983326192
Llama-2-13b-w3a16g128/model-00002-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1818b028686c9efdc7662a72682addcb3a7377898c03fbafc32042b7fe65cec7
3
+ size 4980494112
Llama-2-13b-w3a16g128/model-00003-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f9f5ad6876868079e53816b7e4986be548a70c9069f79fc35f89e4db623c377
3
+ size 4957591640
Llama-2-13b-w3a16g128/model-00004-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f9ff147a6ba2d20ac36a992369f79bb26ac7507ce64af7dbcf467d5fada7f217
3
+ size 4994854504
Llama-2-13b-w3a16g128/model-00005-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:492c297fa570f13e77dbae81e53b59867639688f8fdfe54005322fba2b377671
3
+ size 4889944904
Llama-2-13b-w3a16g128/model-00006-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c07abf2779af6f049edfce6500dfb505546802cfdba6c7daa3251407f3e46a9b
3
+ size 4957601976
Llama-2-13b-w3a16g128/model-00007-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:143d64b60981047158eef2654f26a44c6bbac95bc995f8bd26bc12256d78ffc6
3
+ size 863955112
Llama-2-13b-w3a16g128/model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
Llama-2-13b-w3a16g128/special_tokens_map.json ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<s>",
4
+ "lstrip": false,
5
+ "normalized": true,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "</s>",
11
+ "lstrip": false,
12
+ "normalized": true,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "unk_token": {
24
+ "content": "<unk>",
25
+ "lstrip": false,
26
+ "normalized": true,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ }
30
+ }
Llama-2-13b-w3a16g128/tokenizer.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
+ size 499723
Llama-2-13b-w3a16g128/tokenizer_config.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_bos_token": true,
3
+ "add_eos_token": false,
4
+ "added_tokens_decoder": {
5
+ "0": {
6
+ "content": "<unk>",
7
+ "lstrip": false,
8
+ "normalized": true,
9
+ "rstrip": false,
10
+ "single_word": false,
11
+ "special": true
12
+ },
13
+ "1": {
14
+ "content": "<s>",
15
+ "lstrip": false,
16
+ "normalized": true,
17
+ "rstrip": false,
18
+ "single_word": false,
19
+ "special": true
20
+ },
21
+ "2": {
22
+ "content": "</s>",
23
+ "lstrip": false,
24
+ "normalized": true,
25
+ "rstrip": false,
26
+ "single_word": false,
27
+ "special": true
28
+ }
29
+ },
30
+ "bos_token": "<s>",
31
+ "clean_up_tokenization_spaces": false,
32
+ "eos_token": "</s>",
33
+ "legacy": false,
34
+ "model_max_length": 1000000000000000019884624838656,
35
+ "pad_token": "<unk>",
36
+ "sp_model_kwargs": {},
37
+ "spaces_between_special_tokens": false,
38
+ "tokenizer_class": "LlamaTokenizer",
39
+ "unk_token": "<unk>",
40
+ "use_default_system_prompt": false
41
+ }
Llama-2-13b-w4a16/config.json ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/bn/mayuexiao/hf_weights/llama2/Llama-2-13b-hf",
3
+ "architectures": [
4
+ "LlamaForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "bos_token_id": 1,
8
+ "eos_token_id": 2,
9
+ "hidden_act": "silu",
10
+ "hidden_size": 5120,
11
+ "initializer_range": 0.02,
12
+ "intermediate_size": 13824,
13
+ "max_position_embeddings": 4096,
14
+ "model_type": "llama",
15
+ "num_attention_heads": 40,
16
+ "num_hidden_layers": 40,
17
+ "num_key_value_heads": 40,
18
+ "pad_token_id": 0,
19
+ "pretraining_tp": 1,
20
+ "rms_norm_eps": 1e-05,
21
+ "rope_scaling": null,
22
+ "rope_theta": 10000.0,
23
+ "tie_word_embeddings": false,
24
+ "torch_dtype": "float16",
25
+ "transformers_version": "4.35.2",
26
+ "use_cache": true,
27
+ "vocab_size": 32000
28
+ }
Llama-2-13b-w4a16/model-00001-of-00007.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a439d6ecd91267bf6f345d430ad5cb0dd2bb6d530332bf27d5559a30835fd939
3
+ size 4975193912