Model save
Browse files- README.md +135 -0
- all_results.json +21 -0
- eval_results.json +16 -0
- generation_config.json +10 -0
- model-00001-of-00003.safetensors +3 -0
- model-00002-of-00003.safetensors +3 -0
- model-00003-of-00003.safetensors +3 -0
- model.safetensors.index.json +298 -0
- train_results.json +8 -0
- trainer_state.json +0 -0
README.md
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
tags:
|
3 |
+
- generated_from_trainer
|
4 |
+
datasets:
|
5 |
+
- arrow
|
6 |
+
model-index:
|
7 |
+
- name: PE_Llama_2_7b_sft_rlhf
|
8 |
+
results: []
|
9 |
+
---
|
10 |
+
|
11 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
12 |
+
should probably proofread and complete it, then remove this comment. -->
|
13 |
+
|
14 |
+
# PE_Llama_2_7b_sft_rlhf
|
15 |
+
|
16 |
+
This model was trained from scratch on the arrow dataset.
|
17 |
+
It achieves the following results on the evaluation set:
|
18 |
+
- Loss: 0.0093
|
19 |
+
- Rewards/chosen: -7.0331
|
20 |
+
- Rewards/rejected: -29.3861
|
21 |
+
- Rewards/accuracies: 0.9916
|
22 |
+
- Rewards/margins: 22.3530
|
23 |
+
- Logps/rejected: -118.6765
|
24 |
+
- Logps/chosen: -90.0482
|
25 |
+
- Logits/rejected: -1.3495
|
26 |
+
- Logits/chosen: -1.4301
|
27 |
+
|
28 |
+
## Model description
|
29 |
+
|
30 |
+
More information needed
|
31 |
+
|
32 |
+
## Intended uses & limitations
|
33 |
+
|
34 |
+
More information needed
|
35 |
+
|
36 |
+
## Training and evaluation data
|
37 |
+
|
38 |
+
More information needed
|
39 |
+
|
40 |
+
## Training procedure
|
41 |
+
|
42 |
+
### Training hyperparameters
|
43 |
+
|
44 |
+
The following hyperparameters were used during training:
|
45 |
+
- learning_rate: 3e-07
|
46 |
+
- train_batch_size: 1
|
47 |
+
- eval_batch_size: 2
|
48 |
+
- seed: 42
|
49 |
+
- distributed_type: multi-GPU
|
50 |
+
- num_devices: 8
|
51 |
+
- gradient_accumulation_steps: 8
|
52 |
+
- total_train_batch_size: 64
|
53 |
+
- total_eval_batch_size: 16
|
54 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
55 |
+
- lr_scheduler_type: linear
|
56 |
+
- lr_scheduler_warmup_ratio: 0.1
|
57 |
+
- num_epochs: 3
|
58 |
+
|
59 |
+
### Training results
|
60 |
+
|
61 |
+
| Training Loss | Epoch | Step | Validation Loss | Rewards/chosen | Rewards/rejected | Rewards/accuracies | Rewards/margins | Logps/rejected | Logps/chosen | Logits/rejected | Logits/chosen |
|
62 |
+
|:-------------:|:-----:|:----:|:---------------:|:--------------:|:----------------:|:------------------:|:---------------:|:--------------:|:------------:|:---------------:|:-------------:|
|
63 |
+
| 0.5577 | 0.05 | 100 | 0.5743 | -0.0890 | -0.3528 | 0.9022 | 0.2638 | -60.6098 | -76.1599 | -1.3076 | -1.3716 |
|
64 |
+
| 0.1502 | 0.09 | 200 | 0.1761 | -0.5864 | -2.4951 | 0.9804 | 1.9086 | -64.8944 | -77.1548 | -1.3397 | -1.4091 |
|
65 |
+
| 0.0367 | 0.14 | 300 | 0.0640 | -1.1815 | -4.8466 | 0.9860 | 3.6651 | -69.5975 | -78.3450 | -1.3685 | -1.4428 |
|
66 |
+
| 0.0195 | 0.18 | 400 | 0.0419 | -1.6306 | -6.4153 | 0.9832 | 4.7847 | -72.7348 | -79.2431 | -1.3875 | -1.4648 |
|
67 |
+
| 0.0128 | 0.23 | 500 | 0.0321 | -2.1351 | -8.0395 | 0.9860 | 5.9044 | -75.9833 | -80.2522 | -1.4045 | -1.4847 |
|
68 |
+
| 0.0078 | 0.27 | 600 | 0.0294 | -2.8235 | -9.6992 | 0.9860 | 6.8757 | -79.3027 | -81.6291 | -1.4163 | -1.4986 |
|
69 |
+
| 0.0074 | 0.32 | 700 | 0.0177 | -2.7718 | -10.7772 | 0.9832 | 8.0054 | -81.4587 | -81.5256 | -1.4251 | -1.5079 |
|
70 |
+
| 0.0051 | 0.37 | 800 | 0.0144 | -2.4805 | -11.3179 | 0.9832 | 8.8374 | -82.5400 | -80.9429 | -1.4353 | -1.5181 |
|
71 |
+
| 0.003 | 0.41 | 900 | 0.0160 | -2.8352 | -12.2817 | 0.9860 | 9.4465 | -84.4677 | -81.6525 | -1.4421 | -1.5261 |
|
72 |
+
| 0.0031 | 0.46 | 1000 | 0.0122 | -2.8873 | -13.0359 | 0.9860 | 10.1487 | -85.9761 | -81.7565 | -1.4514 | -1.5345 |
|
73 |
+
| 0.0107 | 0.5 | 1100 | 0.0110 | -2.8383 | -13.0784 | 0.9888 | 10.2401 | -86.0611 | -81.6586 | -1.4506 | -1.5334 |
|
74 |
+
| 0.0065 | 0.55 | 1200 | 0.0130 | -3.3682 | -13.9857 | 0.9860 | 10.6176 | -87.8757 | -82.7184 | -1.4603 | -1.5441 |
|
75 |
+
| 0.0054 | 0.59 | 1300 | 0.0123 | -3.6048 | -14.8999 | 0.9888 | 11.2951 | -89.7041 | -83.1916 | -1.4576 | -1.5403 |
|
76 |
+
| 0.0048 | 0.64 | 1400 | 0.0091 | -3.3176 | -15.0505 | 0.9860 | 11.7329 | -90.0053 | -82.6172 | -1.4598 | -1.5418 |
|
77 |
+
| 0.0017 | 0.68 | 1500 | 0.0087 | -3.3081 | -15.5642 | 0.9860 | 12.2561 | -91.0327 | -82.5982 | -1.4671 | -1.5494 |
|
78 |
+
| 0.0042 | 0.73 | 1600 | 0.0091 | -3.5315 | -16.2814 | 0.9860 | 12.7498 | -92.4670 | -83.0451 | -1.4722 | -1.5560 |
|
79 |
+
| 0.0035 | 0.78 | 1700 | 0.0078 | -3.1483 | -15.9040 | 0.9916 | 12.7557 | -91.7122 | -82.2786 | -1.4664 | -1.5481 |
|
80 |
+
| 0.0094 | 0.82 | 1800 | 0.0071 | -2.9923 | -15.9175 | 0.9888 | 12.9251 | -91.7391 | -81.9667 | -1.4572 | -1.5390 |
|
81 |
+
| 0.0024 | 0.87 | 1900 | 0.0066 | -2.9861 | -16.5288 | 0.9916 | 13.5427 | -92.9619 | -81.9542 | -1.4690 | -1.5511 |
|
82 |
+
| 0.0067 | 0.91 | 2000 | 0.0076 | -3.2851 | -16.0301 | 0.9916 | 12.7450 | -91.9644 | -82.5522 | -1.4577 | -1.5391 |
|
83 |
+
| 0.0044 | 0.96 | 2100 | 0.0064 | -3.3414 | -16.8752 | 0.9944 | 13.5338 | -93.6545 | -82.6647 | -1.4617 | -1.5440 |
|
84 |
+
| 0.0025 | 1.0 | 2200 | 0.0060 | -3.1967 | -16.8252 | 0.9944 | 13.6285 | -93.5546 | -82.3753 | -1.4630 | -1.5444 |
|
85 |
+
| 0.0023 | 1.05 | 2300 | 0.0063 | -3.5595 | -17.6105 | 0.9916 | 14.0510 | -95.1253 | -83.1011 | -1.4645 | -1.5467 |
|
86 |
+
| 0.0055 | 1.1 | 2400 | 0.0070 | -4.0460 | -18.6662 | 0.9944 | 14.6201 | -97.2365 | -84.0740 | -1.4606 | -1.5441 |
|
87 |
+
| 0.0052 | 1.14 | 2500 | 0.0067 | -3.3185 | -17.6030 | 0.9944 | 14.2844 | -95.1102 | -82.6191 | -1.4679 | -1.5507 |
|
88 |
+
| 0.0023 | 1.19 | 2600 | 0.0064 | -3.4071 | -18.2406 | 0.9944 | 14.8335 | -96.3854 | -82.7962 | -1.4667 | -1.5501 |
|
89 |
+
| 0.0044 | 1.23 | 2700 | 0.0090 | -4.3343 | -19.6985 | 0.9916 | 15.3642 | -99.3012 | -84.6506 | -1.4647 | -1.5496 |
|
90 |
+
| 0.0033 | 1.28 | 2800 | 0.0113 | -4.6406 | -19.7381 | 0.9916 | 15.0976 | -99.3805 | -85.2631 | -1.4569 | -1.5408 |
|
91 |
+
| 0.0023 | 1.32 | 2900 | 0.0070 | -3.9341 | -19.4138 | 0.9944 | 15.4797 | -98.7318 | -83.8501 | -1.4612 | -1.5449 |
|
92 |
+
| 0.0034 | 1.37 | 3000 | 0.0066 | -3.7082 | -18.5209 | 0.9916 | 14.8127 | -96.9460 | -83.3983 | -1.4587 | -1.5399 |
|
93 |
+
| 0.0033 | 1.42 | 3100 | 0.0064 | -3.6694 | -18.6338 | 0.9972 | 14.9644 | -97.1717 | -83.3208 | -1.4480 | -1.5297 |
|
94 |
+
| 0.0034 | 1.46 | 3200 | 0.0059 | -3.7376 | -19.1673 | 0.9944 | 15.4298 | -98.2389 | -83.4571 | -1.4483 | -1.5307 |
|
95 |
+
| 0.0019 | 1.51 | 3300 | 0.0061 | -3.9735 | -19.7068 | 0.9916 | 15.7332 | -99.3178 | -83.9291 | -1.4459 | -1.5285 |
|
96 |
+
| 0.0011 | 1.55 | 3400 | 0.0066 | -4.3242 | -20.4806 | 0.9944 | 16.1564 | -100.8654 | -84.6304 | -1.4412 | -1.5245 |
|
97 |
+
| 0.0001 | 1.6 | 3500 | 0.0093 | -4.7847 | -21.0204 | 0.9916 | 16.2357 | -101.9450 | -85.5513 | -1.4308 | -1.5145 |
|
98 |
+
| 0.0037 | 1.64 | 3600 | 0.0076 | -4.5704 | -20.9595 | 0.9888 | 16.3891 | -101.8232 | -85.1228 | -1.4373 | -1.5209 |
|
99 |
+
| 0.003 | 1.69 | 3700 | 0.0087 | -4.7965 | -21.6522 | 0.9916 | 16.8557 | -103.2086 | -85.5750 | -1.4300 | -1.5148 |
|
100 |
+
| 0.0056 | 1.73 | 3800 | 0.0093 | -5.1262 | -22.2592 | 0.9916 | 17.1330 | -104.4226 | -86.2344 | -1.4213 | -1.5058 |
|
101 |
+
| 0.0024 | 1.78 | 3900 | 0.0113 | -5.8601 | -23.7638 | 0.9888 | 17.9037 | -107.4319 | -87.7022 | -1.4014 | -1.4856 |
|
102 |
+
| 0.0034 | 1.83 | 4000 | 0.0056 | -4.7077 | -22.5264 | 0.9944 | 17.8187 | -104.9570 | -85.3974 | -1.4252 | -1.5084 |
|
103 |
+
| 0.0044 | 1.87 | 4100 | 0.0055 | -4.2834 | -21.6926 | 0.9972 | 17.4092 | -103.2894 | -84.5488 | -1.4342 | -1.5165 |
|
104 |
+
| 0.0001 | 1.92 | 4200 | 0.0068 | -5.2542 | -23.4097 | 0.9916 | 18.1555 | -106.7237 | -86.4905 | -1.4219 | -1.5052 |
|
105 |
+
| 0.0044 | 1.96 | 4300 | 0.0075 | -5.2492 | -23.2824 | 0.9888 | 18.0332 | -106.4690 | -86.4804 | -1.4098 | -1.4921 |
|
106 |
+
| 0.0022 | 2.01 | 4400 | 0.0082 | -5.6200 | -23.9342 | 0.9944 | 18.3142 | -107.7725 | -87.2220 | -1.4087 | -1.4906 |
|
107 |
+
| 0.0033 | 2.05 | 4500 | 0.0091 | -5.9484 | -24.5607 | 0.9916 | 18.6123 | -109.0256 | -87.8787 | -1.4036 | -1.4857 |
|
108 |
+
| 0.0022 | 2.1 | 4600 | 0.0091 | -6.0570 | -25.0424 | 0.9916 | 18.9853 | -109.9890 | -88.0961 | -1.3980 | -1.4804 |
|
109 |
+
| 0.0011 | 2.15 | 4700 | 0.0100 | -6.3832 | -25.6097 | 0.9888 | 19.2265 | -111.1236 | -88.7484 | -1.3907 | -1.4732 |
|
110 |
+
| 0.0065 | 2.19 | 4800 | 0.0073 | -5.7898 | -25.1360 | 0.9916 | 19.3462 | -110.1763 | -87.5616 | -1.4006 | -1.4827 |
|
111 |
+
| 0.0022 | 2.24 | 4900 | 0.0091 | -6.1379 | -25.9334 | 0.9916 | 19.7955 | -111.7710 | -88.2578 | -1.3907 | -1.4732 |
|
112 |
+
| 0.0022 | 2.28 | 5000 | 0.0147 | -7.3728 | -27.6080 | 0.9888 | 20.2352 | -115.1203 | -90.7277 | -1.3738 | -1.4564 |
|
113 |
+
| 0.0033 | 2.33 | 5100 | 0.0120 | -6.9056 | -27.3057 | 0.9888 | 20.4002 | -114.5157 | -89.7931 | -1.3780 | -1.4604 |
|
114 |
+
| 0.0043 | 2.37 | 5200 | 0.0097 | -6.5949 | -27.6154 | 0.9888 | 21.0205 | -115.1350 | -89.1717 | -1.3772 | -1.4593 |
|
115 |
+
| 0.0022 | 2.42 | 5300 | 0.0152 | -7.5122 | -28.6578 | 0.9888 | 21.1456 | -117.2199 | -91.0065 | -1.3647 | -1.4465 |
|
116 |
+
| 0.0022 | 2.46 | 5400 | 0.0149 | -7.7072 | -29.4467 | 0.9888 | 21.7395 | -118.7977 | -91.3965 | -1.3515 | -1.4331 |
|
117 |
+
| 0.0001 | 2.51 | 5500 | 0.0137 | -7.6730 | -29.4473 | 0.9916 | 21.7743 | -118.7989 | -91.3281 | -1.3483 | -1.4293 |
|
118 |
+
| 0.0022 | 2.56 | 5600 | 0.0133 | -7.6989 | -29.6686 | 0.9916 | 21.9697 | -119.2415 | -91.3798 | -1.3485 | -1.4299 |
|
119 |
+
| 0.0011 | 2.6 | 5700 | 0.0095 | -6.8592 | -28.9672 | 0.9888 | 22.1080 | -117.8385 | -89.7003 | -1.3553 | -1.4366 |
|
120 |
+
| 0.0054 | 2.65 | 5800 | 0.0077 | -6.4136 | -28.4244 | 0.9916 | 22.0108 | -116.7531 | -88.8093 | -1.3637 | -1.4450 |
|
121 |
+
| 0.0033 | 2.69 | 5900 | 0.0115 | -7.6490 | -30.1521 | 0.9888 | 22.5031 | -120.2085 | -91.2800 | -1.3400 | -1.4208 |
|
122 |
+
| 0.0011 | 2.74 | 6000 | 0.0086 | -6.8537 | -29.1407 | 0.9888 | 22.2870 | -118.1857 | -89.6894 | -1.3510 | -1.4317 |
|
123 |
+
| 0.0011 | 2.78 | 6100 | 0.0095 | -7.1201 | -29.6324 | 0.9888 | 22.5123 | -119.1690 | -90.2221 | -1.3452 | -1.4257 |
|
124 |
+
| 0.0022 | 2.83 | 6200 | 0.0086 | -6.8942 | -29.1673 | 0.9916 | 22.2731 | -118.2387 | -89.7703 | -1.3531 | -1.4335 |
|
125 |
+
| 0.0013 | 2.88 | 6300 | 0.0086 | -6.8366 | -29.0334 | 0.9916 | 22.1968 | -117.9710 | -89.6551 | -1.3543 | -1.4349 |
|
126 |
+
| 0.0033 | 2.92 | 6400 | 0.0096 | -7.0073 | -29.2913 | 0.9916 | 22.2840 | -118.4869 | -89.9966 | -1.3494 | -1.4303 |
|
127 |
+
| 0.0011 | 2.97 | 6500 | 0.0092 | -6.9778 | -29.3366 | 0.9916 | 22.3588 | -118.5774 | -89.9376 | -1.3494 | -1.4297 |
|
128 |
+
|
129 |
+
|
130 |
+
### Framework versions
|
131 |
+
|
132 |
+
- Transformers 4.35.0
|
133 |
+
- Pytorch 2.1.1+cu121
|
134 |
+
- Datasets 2.14.6
|
135 |
+
- Tokenizers 0.14.1
|
all_results.json
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 3.0,
|
3 |
+
"eval_logits/chosen": -1.4300650358200073,
|
4 |
+
"eval_logits/rejected": -1.3494709730148315,
|
5 |
+
"eval_logps/chosen": -90.04821014404297,
|
6 |
+
"eval_logps/rejected": -118.67648315429688,
|
7 |
+
"eval_loss": 0.009333602152764797,
|
8 |
+
"eval_rewards/accuracies": 0.9916201233863831,
|
9 |
+
"eval_rewards/chosen": -7.033120155334473,
|
10 |
+
"eval_rewards/margins": 22.35300636291504,
|
11 |
+
"eval_rewards/rejected": -29.386123657226562,
|
12 |
+
"eval_runtime": 171.6798,
|
13 |
+
"eval_samples": 2862,
|
14 |
+
"eval_samples_per_second": 16.671,
|
15 |
+
"eval_steps_per_second": 1.043,
|
16 |
+
"train_loss": 0.019671504644591626,
|
17 |
+
"train_runtime": 60672.7973,
|
18 |
+
"train_samples": 140201,
|
19 |
+
"train_samples_per_second": 6.932,
|
20 |
+
"train_steps_per_second": 0.108
|
21 |
+
}
|
eval_results.json
ADDED
@@ -0,0 +1,16 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 3.0,
|
3 |
+
"eval_logits/chosen": -1.4300650358200073,
|
4 |
+
"eval_logits/rejected": -1.3494709730148315,
|
5 |
+
"eval_logps/chosen": -90.04821014404297,
|
6 |
+
"eval_logps/rejected": -118.67648315429688,
|
7 |
+
"eval_loss": 0.009333602152764797,
|
8 |
+
"eval_rewards/accuracies": 0.9916201233863831,
|
9 |
+
"eval_rewards/chosen": -7.033120155334473,
|
10 |
+
"eval_rewards/margins": 22.35300636291504,
|
11 |
+
"eval_rewards/rejected": -29.386123657226562,
|
12 |
+
"eval_runtime": 171.6798,
|
13 |
+
"eval_samples": 2862,
|
14 |
+
"eval_samples_per_second": 16.671,
|
15 |
+
"eval_steps_per_second": 1.043
|
16 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token_id": 1,
|
3 |
+
"do_sample": true,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"max_length": 4096,
|
6 |
+
"pad_token_id": 0,
|
7 |
+
"temperature": 0.6,
|
8 |
+
"top_p": 0.9,
|
9 |
+
"transformers_version": "4.35.0"
|
10 |
+
}
|
model-00001-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:449f4d14fdc10733750e6f9398de4e78c77c1a5d2039710d1029a1eb01222e6a
|
3 |
+
size 4938985352
|
model-00002-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b9591e3562c5a8e7c9b56d2b8ba636095707cf3806deb8d7bcc1b17b4918501
|
3 |
+
size 4947390880
|
model-00003-of-00003.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3ce4fd687e1615b0709403209c3a519ca34a9bf34388b53e3feedd3b7f5ec38
|
3 |
+
size 3590488816
|
model.safetensors.index.json
ADDED
@@ -0,0 +1,298 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"metadata": {
|
3 |
+
"total_size": 13476831232
|
4 |
+
},
|
5 |
+
"weight_map": {
|
6 |
+
"lm_head.weight": "model-00003-of-00003.safetensors",
|
7 |
+
"model.embed_tokens.weight": "model-00001-of-00003.safetensors",
|
8 |
+
"model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
9 |
+
"model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
10 |
+
"model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
11 |
+
"model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
12 |
+
"model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
13 |
+
"model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
14 |
+
"model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
15 |
+
"model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
16 |
+
"model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
17 |
+
"model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
18 |
+
"model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
19 |
+
"model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
20 |
+
"model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
21 |
+
"model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
22 |
+
"model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
23 |
+
"model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
24 |
+
"model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
25 |
+
"model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
26 |
+
"model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
27 |
+
"model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
28 |
+
"model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
29 |
+
"model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
30 |
+
"model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
31 |
+
"model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
32 |
+
"model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
33 |
+
"model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
34 |
+
"model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
35 |
+
"model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
36 |
+
"model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
37 |
+
"model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
38 |
+
"model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
39 |
+
"model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
40 |
+
"model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
41 |
+
"model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
42 |
+
"model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
43 |
+
"model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
44 |
+
"model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
45 |
+
"model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
46 |
+
"model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
47 |
+
"model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
48 |
+
"model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
49 |
+
"model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
50 |
+
"model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
51 |
+
"model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
52 |
+
"model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
53 |
+
"model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
54 |
+
"model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
55 |
+
"model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
56 |
+
"model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
57 |
+
"model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
58 |
+
"model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
59 |
+
"model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
60 |
+
"model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
61 |
+
"model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
62 |
+
"model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
63 |
+
"model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
64 |
+
"model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
65 |
+
"model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
66 |
+
"model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
67 |
+
"model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
68 |
+
"model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
69 |
+
"model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
70 |
+
"model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
71 |
+
"model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
72 |
+
"model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
73 |
+
"model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
74 |
+
"model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
75 |
+
"model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
76 |
+
"model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
77 |
+
"model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
78 |
+
"model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
79 |
+
"model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
80 |
+
"model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
81 |
+
"model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
82 |
+
"model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
83 |
+
"model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
84 |
+
"model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
85 |
+
"model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
86 |
+
"model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
87 |
+
"model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
88 |
+
"model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
89 |
+
"model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
90 |
+
"model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
91 |
+
"model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
92 |
+
"model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
93 |
+
"model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
94 |
+
"model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
95 |
+
"model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
96 |
+
"model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
97 |
+
"model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
98 |
+
"model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
99 |
+
"model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
100 |
+
"model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
101 |
+
"model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
102 |
+
"model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
103 |
+
"model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
104 |
+
"model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
105 |
+
"model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
106 |
+
"model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
107 |
+
"model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
108 |
+
"model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
109 |
+
"model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
110 |
+
"model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
111 |
+
"model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
112 |
+
"model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
113 |
+
"model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
114 |
+
"model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
115 |
+
"model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
116 |
+
"model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
117 |
+
"model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
118 |
+
"model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
119 |
+
"model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
120 |
+
"model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
121 |
+
"model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
122 |
+
"model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
123 |
+
"model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
124 |
+
"model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
125 |
+
"model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
126 |
+
"model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
127 |
+
"model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
128 |
+
"model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
129 |
+
"model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
130 |
+
"model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
131 |
+
"model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
132 |
+
"model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
133 |
+
"model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
134 |
+
"model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
135 |
+
"model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
136 |
+
"model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
137 |
+
"model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
138 |
+
"model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
139 |
+
"model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
140 |
+
"model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
141 |
+
"model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
142 |
+
"model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
143 |
+
"model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
|
144 |
+
"model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
|
145 |
+
"model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
146 |
+
"model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
147 |
+
"model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
|
148 |
+
"model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
149 |
+
"model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
150 |
+
"model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
151 |
+
"model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
152 |
+
"model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
153 |
+
"model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
154 |
+
"model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
|
155 |
+
"model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
|
156 |
+
"model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
157 |
+
"model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
|
158 |
+
"model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
|
159 |
+
"model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
|
160 |
+
"model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
|
161 |
+
"model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
162 |
+
"model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
163 |
+
"model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
164 |
+
"model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
165 |
+
"model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
166 |
+
"model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
167 |
+
"model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
168 |
+
"model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
169 |
+
"model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
170 |
+
"model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
171 |
+
"model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
172 |
+
"model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
173 |
+
"model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
174 |
+
"model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
175 |
+
"model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
176 |
+
"model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
177 |
+
"model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
178 |
+
"model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
179 |
+
"model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
180 |
+
"model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
181 |
+
"model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
182 |
+
"model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
183 |
+
"model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
184 |
+
"model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
185 |
+
"model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
186 |
+
"model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
187 |
+
"model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
188 |
+
"model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
189 |
+
"model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
190 |
+
"model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
191 |
+
"model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
192 |
+
"model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
193 |
+
"model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
194 |
+
"model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
195 |
+
"model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
196 |
+
"model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
197 |
+
"model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
198 |
+
"model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
199 |
+
"model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
200 |
+
"model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
201 |
+
"model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
202 |
+
"model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
203 |
+
"model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
204 |
+
"model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
205 |
+
"model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
206 |
+
"model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
207 |
+
"model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
208 |
+
"model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
209 |
+
"model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
210 |
+
"model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
211 |
+
"model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
212 |
+
"model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
213 |
+
"model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
214 |
+
"model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
215 |
+
"model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
216 |
+
"model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
217 |
+
"model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
218 |
+
"model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
219 |
+
"model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
220 |
+
"model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
221 |
+
"model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
222 |
+
"model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
223 |
+
"model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
224 |
+
"model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
225 |
+
"model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
226 |
+
"model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
227 |
+
"model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
228 |
+
"model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
229 |
+
"model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
230 |
+
"model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
231 |
+
"model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
232 |
+
"model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
233 |
+
"model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
|
234 |
+
"model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
|
235 |
+
"model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
|
236 |
+
"model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
|
237 |
+
"model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
|
238 |
+
"model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
|
239 |
+
"model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
|
240 |
+
"model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
|
241 |
+
"model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
|
242 |
+
"model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
243 |
+
"model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
244 |
+
"model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
245 |
+
"model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
246 |
+
"model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
247 |
+
"model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
248 |
+
"model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
249 |
+
"model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
250 |
+
"model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
251 |
+
"model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
252 |
+
"model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
253 |
+
"model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
254 |
+
"model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
255 |
+
"model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
256 |
+
"model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
257 |
+
"model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
258 |
+
"model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
259 |
+
"model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
260 |
+
"model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
261 |
+
"model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
262 |
+
"model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
263 |
+
"model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
264 |
+
"model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
265 |
+
"model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
266 |
+
"model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
267 |
+
"model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
268 |
+
"model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
269 |
+
"model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
270 |
+
"model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
271 |
+
"model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
272 |
+
"model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
273 |
+
"model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
274 |
+
"model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
275 |
+
"model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
276 |
+
"model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
277 |
+
"model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
278 |
+
"model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
279 |
+
"model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
280 |
+
"model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
281 |
+
"model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
282 |
+
"model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
283 |
+
"model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
284 |
+
"model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
285 |
+
"model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
286 |
+
"model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
287 |
+
"model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
|
288 |
+
"model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
|
289 |
+
"model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
|
290 |
+
"model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
|
291 |
+
"model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
|
292 |
+
"model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
|
293 |
+
"model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
|
294 |
+
"model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
|
295 |
+
"model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
|
296 |
+
"model.norm.weight": "model-00003-of-00003.safetensors"
|
297 |
+
}
|
298 |
+
}
|
train_results.json
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"epoch": 3.0,
|
3 |
+
"train_loss": 0.019671504644591626,
|
4 |
+
"train_runtime": 60672.7973,
|
5 |
+
"train_samples": 140201,
|
6 |
+
"train_samples_per_second": 6.932,
|
7 |
+
"train_steps_per_second": 0.108
|
8 |
+
}
|
trainer_state.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|