RaymondAISG commited on
Commit
79b43c3
1 Parent(s): c47e2cf

Upload LlamaForCausalLM

Browse files
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
2
- license: llama3
3
  language:
4
  - en
5
  - id
6
  - ta
7
  - th
8
  - vi
 
9
  ---
10
  # LLaMA3 8B SEA-LIONv2
11
 
 
1
  ---
 
2
  language:
3
  - en
4
  - id
5
  - ta
6
  - th
7
  - vi
8
+ license: llama3
9
  ---
10
  # LLaMA3 8B SEA-LIONv2
11
 
config.json CHANGED
@@ -1,4 +1,5 @@
1
  {
 
2
  "architectures": [
3
  "LlamaForCausalLM"
4
  ],
@@ -8,20 +9,26 @@
8
  "eos_token_id": 128001,
9
  "hidden_act": "silu",
10
  "hidden_size": 4096,
 
11
  "initializer_range": 0.02,
12
  "intermediate_size": 14336,
13
  "max_position_embeddings": 8192,
 
14
  "model_type": "llama",
 
15
  "num_attention_heads": 32,
16
  "num_hidden_layers": 32,
17
  "num_key_value_heads": 8,
 
 
18
  "pretraining_tp": 1,
19
  "rms_norm_eps": 1e-05,
20
  "rope_scaling": null,
21
  "rope_theta": 500000.0,
22
  "tie_word_embeddings": false,
23
  "torch_dtype": "bfloat16",
24
- "transformers_version": "4.43.1",
 
25
  "use_cache": true,
26
  "use_flash_attention_2": true,
27
  "vocab_size": 128256
 
1
  {
2
+ "_name_or_path": "/data/users/raymond/hr2",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
9
  "eos_token_id": 128001,
10
  "hidden_act": "silu",
11
  "hidden_size": 4096,
12
+ "init_device": "cpu",
13
  "initializer_range": 0.02,
14
  "intermediate_size": 14336,
15
  "max_position_embeddings": 8192,
16
+ "mlp_bias": false,
17
  "model_type": "llama",
18
+ "name": "hf_causal_lm",
19
  "num_attention_heads": 32,
20
  "num_hidden_layers": 32,
21
  "num_key_value_heads": 8,
22
+ "pretrained": true,
23
+ "pretrained_model_name_or_path": "/mnt/fs-arf-01/cpt_checkpoints/meta-llama/Meta-Llama-3-8B",
24
  "pretraining_tp": 1,
25
  "rms_norm_eps": 1e-05,
26
  "rope_scaling": null,
27
  "rope_theta": 500000.0,
28
  "tie_word_embeddings": false,
29
  "torch_dtype": "bfloat16",
30
+ "transformers_version": "4.42.3",
31
+ "trust_remote_code": true,
32
  "use_cache": true,
33
  "use_flash_attention_2": true,
34
  "vocab_size": 128256
generation_config.json CHANGED
@@ -1,10 +1,6 @@
1
  {
 
2
  "bos_token_id": 128000,
3
  "eos_token_id": 128001,
4
- "do_sample": true,
5
- "temperature": 0.6,
6
- "max_length": 4096,
7
- "top_p": 0.9,
8
- "use_cache": true,
9
- "transformers_version": "4.43.1"
10
  }
 
1
  {
2
+ "_from_model_config": true,
3
  "bos_token_id": 128000,
4
  "eos_token_id": 128001,
5
+ "transformers_version": "4.42.3"
 
 
 
 
 
6
  }
model-00001-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:185d4ce48d5864c692b111ffb4bbceabad5bae2adefb76d1e50eeeaa5547c08e
3
- size 4976698176
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:43d9d421ba1c3534b3783a8bf06aab1bc9721fb0a9ef6d2752a645010068f4ff
3
+ size 4976698672
model-00002-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b93c796116c80fcc485c267862f505a146daba0ee423b401986d99e64b4209a8
3
- size 4999802096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:57eb52fdf10560a5aff0aac261b50b6c2cb2251338c86267de8df192e20cee35
3
+ size 4999802720
model-00003-of-00004.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:766bdc6a11e609e25735fc2e0e59d342ed560602f73d956badfca023a7d98cb6
3
- size 4915915576
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f0dcccfd909f49333a915617c85fdf9854704641a8e46926a433299ea5ee6189
3
+ size 4915916176