daemonkiller
commited on
Commit
•
5c7f1f8
1
Parent(s):
695e934
End of training
Browse files- README.md +207 -9
- config.json +1 -1
- generation_config.json +1 -1
- model.safetensors +1 -1
- runs/Dec14_08-16-04_d966d963c9e6/events.out.tfevents.1702541764.d966d963c9e6.1203.0 +3 -0
- runs/Dec14_08-19-59_d966d963c9e6/events.out.tfevents.1702542003.d966d963c9e6.2803.0 +3 -0
- special_tokens_map.json +21 -3
- training_args.bin +1 -1
README.md
CHANGED
@@ -17,9 +17,9 @@ should probably proofread and complete it, then remove this comment. -->
|
|
17 |
|
18 |
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on an unknown dataset.
|
19 |
It achieves the following results on the evaluation set:
|
20 |
-
- Loss:
|
21 |
-
- Bleu:
|
22 |
-
- Gen Len:
|
23 |
|
24 |
## Model description
|
25 |
|
@@ -44,20 +44,218 @@ The following hyperparameters were used during training:
|
|
44 |
- seed: 42
|
45 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
46 |
- lr_scheduler_type: linear
|
47 |
-
- num_epochs:
|
48 |
- mixed_precision_training: Native AMP
|
49 |
|
50 |
### Training results
|
51 |
|
52 |
-
| Training Loss | Epoch | Step | Validation Loss | Bleu
|
53 |
-
|
54 |
-
| No log | 1.0 | 1 | 5.3172 | 0.0
|
55 |
-
| No log | 2.0 | 2 | 5.2144 | 0.0
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
56 |
|
57 |
|
58 |
### Framework versions
|
59 |
|
60 |
-
- Transformers 4.36.
|
61 |
- Pytorch 2.1.0+cu118
|
62 |
- Datasets 2.15.0
|
63 |
- Tokenizers 0.15.0
|
|
|
17 |
|
18 |
This model is a fine-tuned version of [t5-small](https://huggingface.co/t5-small) on an unknown dataset.
|
19 |
It achieves the following results on the evaluation set:
|
20 |
+
- Loss: 0.0130
|
21 |
+
- Bleu: 100.0
|
22 |
+
- Gen Len: 13.0
|
23 |
|
24 |
## Model description
|
25 |
|
|
|
44 |
- seed: 42
|
45 |
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
46 |
- lr_scheduler_type: linear
|
47 |
+
- num_epochs: 200
|
48 |
- mixed_precision_training: Native AMP
|
49 |
|
50 |
### Training results
|
51 |
|
52 |
+
| Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
|
53 |
+
|:-------------:|:-----:|:----:|:---------------:|:------:|:-------:|
|
54 |
+
| No log | 1.0 | 1 | 5.3172 | 0.0 | 19.0 |
|
55 |
+
| No log | 2.0 | 2 | 5.2144 | 0.0 | 19.0 |
|
56 |
+
| No log | 3.0 | 3 | 5.0701 | 0.0 | 19.0 |
|
57 |
+
| No log | 4.0 | 4 | 4.9647 | 0.0 | 19.0 |
|
58 |
+
| No log | 5.0 | 5 | 4.8118 | 0.0 | 19.0 |
|
59 |
+
| No log | 6.0 | 6 | 4.6864 | 0.0 | 19.0 |
|
60 |
+
| No log | 7.0 | 7 | 4.5744 | 0.0 | 19.0 |
|
61 |
+
| No log | 8.0 | 8 | 4.5744 | 0.0 | 19.0 |
|
62 |
+
| No log | 9.0 | 9 | 4.3982 | 0.0 | 19.0 |
|
63 |
+
| No log | 10.0 | 10 | 4.2774 | 0.0 | 19.0 |
|
64 |
+
| No log | 11.0 | 11 | 4.2774 | 0.0 | 19.0 |
|
65 |
+
| No log | 12.0 | 12 | 4.1643 | 0.0 | 19.0 |
|
66 |
+
| No log | 13.0 | 13 | 4.0517 | 0.0 | 19.0 |
|
67 |
+
| No log | 14.0 | 14 | 4.0517 | 0.0 | 19.0 |
|
68 |
+
| No log | 15.0 | 15 | 3.9429 | 0.0 | 19.0 |
|
69 |
+
| No log | 16.0 | 16 | 3.8468 | 0.0 | 19.0 |
|
70 |
+
| No log | 17.0 | 17 | 3.7367 | 0.0 | 19.0 |
|
71 |
+
| No log | 18.0 | 18 | 3.5792 | 0.0 | 19.0 |
|
72 |
+
| No log | 19.0 | 19 | 3.4629 | 0.0 | 19.0 |
|
73 |
+
| No log | 20.0 | 20 | 3.3615 | 0.0 | 19.0 |
|
74 |
+
| No log | 21.0 | 21 | 3.2668 | 0.0 | 19.0 |
|
75 |
+
| No log | 22.0 | 22 | 3.1780 | 0.0 | 19.0 |
|
76 |
+
| No log | 23.0 | 23 | 3.0935 | 0.0 | 19.0 |
|
77 |
+
| No log | 24.0 | 24 | 3.0095 | 0.0 | 19.0 |
|
78 |
+
| No log | 25.0 | 25 | 2.9206 | 0.0 | 19.0 |
|
79 |
+
| No log | 26.0 | 26 | 2.8406 | 0.0 | 19.0 |
|
80 |
+
| No log | 27.0 | 27 | 2.7719 | 0.0 | 19.0 |
|
81 |
+
| No log | 28.0 | 28 | 2.7076 | 0.0 | 19.0 |
|
82 |
+
| No log | 29.0 | 29 | 2.6483 | 0.0 | 19.0 |
|
83 |
+
| No log | 30.0 | 30 | 2.5892 | 0.0 | 19.0 |
|
84 |
+
| No log | 31.0 | 31 | 2.5263 | 0.0 | 19.0 |
|
85 |
+
| No log | 32.0 | 32 | 2.4594 | 0.0 | 19.0 |
|
86 |
+
| No log | 33.0 | 33 | 2.3968 | 0.0 | 19.0 |
|
87 |
+
| No log | 34.0 | 34 | 2.3354 | 0.0 | 19.0 |
|
88 |
+
| No log | 35.0 | 35 | 2.2768 | 0.0 | 19.0 |
|
89 |
+
| No log | 36.0 | 36 | 2.2195 | 0.0 | 19.0 |
|
90 |
+
| No log | 37.0 | 37 | 2.1600 | 0.0 | 19.0 |
|
91 |
+
| No log | 38.0 | 38 | 2.0993 | 0.0 | 19.0 |
|
92 |
+
| No log | 39.0 | 39 | 2.0412 | 0.0 | 19.0 |
|
93 |
+
| No log | 40.0 | 40 | 1.9845 | 0.0 | 19.0 |
|
94 |
+
| No log | 41.0 | 41 | 1.9296 | 0.0 | 19.0 |
|
95 |
+
| No log | 42.0 | 42 | 1.8756 | 0.0 | 19.0 |
|
96 |
+
| No log | 43.0 | 43 | 1.8229 | 0.0 | 19.0 |
|
97 |
+
| No log | 44.0 | 44 | 1.7675 | 0.0 | 19.0 |
|
98 |
+
| No log | 45.0 | 45 | 1.7102 | 1.9525 | 13.0 |
|
99 |
+
| No log | 46.0 | 46 | 1.6531 | 1.9525 | 13.0 |
|
100 |
+
| No log | 47.0 | 47 | 1.5962 | 1.9525 | 13.0 |
|
101 |
+
| No log | 48.0 | 48 | 1.5414 | 0.0 | 4.0 |
|
102 |
+
| No log | 49.0 | 49 | 1.4880 | 0.0 | 4.0 |
|
103 |
+
| No log | 50.0 | 50 | 1.4359 | 0.0 | 4.0 |
|
104 |
+
| No log | 51.0 | 51 | 1.3848 | 0.0 | 4.0 |
|
105 |
+
| No log | 52.0 | 52 | 1.3357 | 0.0 | 4.0 |
|
106 |
+
| No log | 53.0 | 53 | 1.2897 | 0.0 | 4.0 |
|
107 |
+
| No log | 54.0 | 54 | 1.2446 | 1.5757 | 19.0 |
|
108 |
+
| No log | 55.0 | 55 | 1.2016 | 1.7438 | 16.0 |
|
109 |
+
| No log | 56.0 | 56 | 1.1599 | 1.7438 | 16.0 |
|
110 |
+
| No log | 57.0 | 57 | 1.1233 | 1.7438 | 16.0 |
|
111 |
+
| No log | 58.0 | 58 | 1.0875 | 1.7438 | 16.0 |
|
112 |
+
| No log | 59.0 | 59 | 1.0514 | 1.7438 | 16.0 |
|
113 |
+
| No log | 60.0 | 60 | 1.0139 | 1.7438 | 16.0 |
|
114 |
+
| No log | 61.0 | 61 | 0.9764 | 1.7438 | 16.0 |
|
115 |
+
| No log | 62.0 | 62 | 0.9385 | 1.7438 | 16.0 |
|
116 |
+
| No log | 63.0 | 63 | 0.9002 | 1.5757 | 18.0 |
|
117 |
+
| No log | 64.0 | 64 | 0.8637 | 1.5757 | 18.0 |
|
118 |
+
| No log | 65.0 | 65 | 0.8288 | 1.7438 | 16.0 |
|
119 |
+
| No log | 66.0 | 66 | 0.7973 | 1.7438 | 16.0 |
|
120 |
+
| No log | 67.0 | 67 | 0.7670 | 1.7438 | 16.0 |
|
121 |
+
| No log | 68.0 | 68 | 0.7366 | 1.7438 | 16.0 |
|
122 |
+
| No log | 69.0 | 69 | 0.7065 | 1.7438 | 16.0 |
|
123 |
+
| No log | 70.0 | 70 | 0.6762 | 1.7438 | 16.0 |
|
124 |
+
| No log | 71.0 | 71 | 0.6464 | 1.7438 | 16.0 |
|
125 |
+
| No log | 72.0 | 72 | 0.6207 | 1.7438 | 16.0 |
|
126 |
+
| No log | 73.0 | 73 | 0.5970 | 1.3214 | 19.0 |
|
127 |
+
| No log | 74.0 | 74 | 0.5729 | 1.3214 | 19.0 |
|
128 |
+
| No log | 75.0 | 75 | 0.5499 | 1.3214 | 19.0 |
|
129 |
+
| No log | 76.0 | 76 | 0.5274 | 1.3214 | 19.0 |
|
130 |
+
| No log | 77.0 | 77 | 0.5048 | 1.194 | 19.0 |
|
131 |
+
| No log | 78.0 | 78 | 0.4828 | 1.194 | 19.0 |
|
132 |
+
| No log | 79.0 | 79 | 0.4609 | 100.0 | 13.0 |
|
133 |
+
| No log | 80.0 | 80 | 0.4389 | 100.0 | 13.0 |
|
134 |
+
| No log | 81.0 | 81 | 0.4186 | 100.0 | 13.0 |
|
135 |
+
| No log | 82.0 | 82 | 0.3998 | 100.0 | 13.0 |
|
136 |
+
| No log | 83.0 | 83 | 0.3815 | 100.0 | 13.0 |
|
137 |
+
| No log | 84.0 | 84 | 0.3634 | 100.0 | 13.0 |
|
138 |
+
| No log | 85.0 | 85 | 0.3460 | 100.0 | 13.0 |
|
139 |
+
| No log | 86.0 | 86 | 0.3291 | 100.0 | 13.0 |
|
140 |
+
| No log | 87.0 | 87 | 0.3125 | 100.0 | 13.0 |
|
141 |
+
| No log | 88.0 | 88 | 0.2972 | 100.0 | 13.0 |
|
142 |
+
| No log | 89.0 | 89 | 0.2824 | 100.0 | 13.0 |
|
143 |
+
| No log | 90.0 | 90 | 0.2668 | 100.0 | 13.0 |
|
144 |
+
| No log | 91.0 | 91 | 0.2523 | 100.0 | 13.0 |
|
145 |
+
| No log | 92.0 | 92 | 0.2386 | 100.0 | 13.0 |
|
146 |
+
| No log | 93.0 | 93 | 0.2272 | 100.0 | 13.0 |
|
147 |
+
| No log | 94.0 | 94 | 0.2159 | 100.0 | 13.0 |
|
148 |
+
| No log | 95.0 | 95 | 0.2043 | 100.0 | 13.0 |
|
149 |
+
| No log | 96.0 | 96 | 0.1932 | 100.0 | 13.0 |
|
150 |
+
| No log | 97.0 | 97 | 0.1827 | 100.0 | 13.0 |
|
151 |
+
| No log | 98.0 | 98 | 0.1725 | 100.0 | 13.0 |
|
152 |
+
| No log | 99.0 | 99 | 0.1638 | 100.0 | 13.0 |
|
153 |
+
| No log | 100.0 | 100 | 0.1554 | 100.0 | 13.0 |
|
154 |
+
| No log | 101.0 | 101 | 0.1473 | 100.0 | 13.0 |
|
155 |
+
| No log | 102.0 | 102 | 0.1401 | 100.0 | 13.0 |
|
156 |
+
| No log | 103.0 | 103 | 0.1334 | 100.0 | 13.0 |
|
157 |
+
| No log | 104.0 | 104 | 0.1273 | 100.0 | 13.0 |
|
158 |
+
| No log | 105.0 | 105 | 0.1212 | 100.0 | 13.0 |
|
159 |
+
| No log | 106.0 | 106 | 0.1157 | 100.0 | 13.0 |
|
160 |
+
| No log | 107.0 | 107 | 0.1100 | 100.0 | 13.0 |
|
161 |
+
| No log | 108.0 | 108 | 0.1046 | 100.0 | 13.0 |
|
162 |
+
| No log | 109.0 | 109 | 0.0998 | 100.0 | 13.0 |
|
163 |
+
| No log | 110.0 | 110 | 0.0950 | 100.0 | 13.0 |
|
164 |
+
| No log | 111.0 | 111 | 0.0950 | 100.0 | 13.0 |
|
165 |
+
| No log | 112.0 | 112 | 0.0907 | 100.0 | 13.0 |
|
166 |
+
| No log | 113.0 | 113 | 0.0865 | 100.0 | 13.0 |
|
167 |
+
| No log | 114.0 | 114 | 0.0825 | 100.0 | 13.0 |
|
168 |
+
| No log | 115.0 | 115 | 0.0789 | 100.0 | 13.0 |
|
169 |
+
| No log | 116.0 | 116 | 0.0755 | 100.0 | 13.0 |
|
170 |
+
| No log | 117.0 | 117 | 0.0722 | 100.0 | 13.0 |
|
171 |
+
| No log | 118.0 | 118 | 0.0691 | 100.0 | 13.0 |
|
172 |
+
| No log | 119.0 | 119 | 0.0666 | 100.0 | 13.0 |
|
173 |
+
| No log | 120.0 | 120 | 0.0642 | 100.0 | 13.0 |
|
174 |
+
| No log | 121.0 | 121 | 0.0619 | 100.0 | 13.0 |
|
175 |
+
| No log | 122.0 | 122 | 0.0596 | 100.0 | 13.0 |
|
176 |
+
| No log | 123.0 | 123 | 0.0575 | 100.0 | 13.0 |
|
177 |
+
| No log | 124.0 | 124 | 0.0554 | 100.0 | 13.0 |
|
178 |
+
| No log | 125.0 | 125 | 0.0536 | 100.0 | 13.0 |
|
179 |
+
| No log | 126.0 | 126 | 0.0517 | 100.0 | 13.0 |
|
180 |
+
| No log | 127.0 | 127 | 0.0499 | 100.0 | 13.0 |
|
181 |
+
| No log | 128.0 | 128 | 0.0484 | 100.0 | 13.0 |
|
182 |
+
| No log | 129.0 | 129 | 0.0468 | 100.0 | 13.0 |
|
183 |
+
| No log | 130.0 | 130 | 0.0454 | 100.0 | 13.0 |
|
184 |
+
| No log | 131.0 | 131 | 0.0439 | 100.0 | 13.0 |
|
185 |
+
| No log | 132.0 | 132 | 0.0425 | 100.0 | 13.0 |
|
186 |
+
| No log | 133.0 | 133 | 0.0411 | 100.0 | 13.0 |
|
187 |
+
| No log | 134.0 | 134 | 0.0398 | 100.0 | 13.0 |
|
188 |
+
| No log | 135.0 | 135 | 0.0386 | 100.0 | 13.0 |
|
189 |
+
| No log | 136.0 | 136 | 0.0375 | 100.0 | 13.0 |
|
190 |
+
| No log | 137.0 | 137 | 0.0365 | 100.0 | 13.0 |
|
191 |
+
| No log | 138.0 | 138 | 0.0354 | 100.0 | 13.0 |
|
192 |
+
| No log | 139.0 | 139 | 0.0344 | 100.0 | 13.0 |
|
193 |
+
| No log | 140.0 | 140 | 0.0335 | 100.0 | 13.0 |
|
194 |
+
| No log | 141.0 | 141 | 0.0326 | 100.0 | 13.0 |
|
195 |
+
| No log | 142.0 | 142 | 0.0317 | 100.0 | 13.0 |
|
196 |
+
| No log | 143.0 | 143 | 0.0309 | 100.0 | 13.0 |
|
197 |
+
| No log | 144.0 | 144 | 0.0300 | 100.0 | 13.0 |
|
198 |
+
| No log | 145.0 | 145 | 0.0292 | 100.0 | 13.0 |
|
199 |
+
| No log | 146.0 | 146 | 0.0284 | 100.0 | 13.0 |
|
200 |
+
| No log | 147.0 | 147 | 0.0276 | 100.0 | 13.0 |
|
201 |
+
| No log | 148.0 | 148 | 0.0268 | 100.0 | 13.0 |
|
202 |
+
| No log | 149.0 | 149 | 0.0261 | 100.0 | 13.0 |
|
203 |
+
| No log | 150.0 | 150 | 0.0255 | 100.0 | 13.0 |
|
204 |
+
| No log | 151.0 | 151 | 0.0248 | 100.0 | 13.0 |
|
205 |
+
| No log | 152.0 | 152 | 0.0242 | 100.0 | 13.0 |
|
206 |
+
| No log | 153.0 | 153 | 0.0236 | 100.0 | 13.0 |
|
207 |
+
| No log | 154.0 | 154 | 0.0230 | 100.0 | 13.0 |
|
208 |
+
| No log | 155.0 | 155 | 0.0224 | 100.0 | 13.0 |
|
209 |
+
| No log | 156.0 | 156 | 0.0219 | 100.0 | 13.0 |
|
210 |
+
| No log | 157.0 | 157 | 0.0214 | 100.0 | 13.0 |
|
211 |
+
| No log | 158.0 | 158 | 0.0210 | 100.0 | 13.0 |
|
212 |
+
| No log | 159.0 | 159 | 0.0205 | 100.0 | 13.0 |
|
213 |
+
| No log | 160.0 | 160 | 0.0200 | 100.0 | 13.0 |
|
214 |
+
| No log | 161.0 | 161 | 0.0196 | 100.0 | 13.0 |
|
215 |
+
| No log | 162.0 | 162 | 0.0192 | 100.0 | 13.0 |
|
216 |
+
| No log | 163.0 | 163 | 0.0189 | 100.0 | 13.0 |
|
217 |
+
| No log | 164.0 | 164 | 0.0185 | 100.0 | 13.0 |
|
218 |
+
| No log | 165.0 | 165 | 0.0182 | 100.0 | 13.0 |
|
219 |
+
| No log | 166.0 | 166 | 0.0178 | 100.0 | 13.0 |
|
220 |
+
| No log | 167.0 | 167 | 0.0175 | 100.0 | 13.0 |
|
221 |
+
| No log | 168.0 | 168 | 0.0172 | 100.0 | 13.0 |
|
222 |
+
| No log | 169.0 | 169 | 0.0170 | 100.0 | 13.0 |
|
223 |
+
| No log | 170.0 | 170 | 0.0167 | 100.0 | 13.0 |
|
224 |
+
| No log | 171.0 | 171 | 0.0165 | 100.0 | 13.0 |
|
225 |
+
| No log | 172.0 | 172 | 0.0162 | 100.0 | 13.0 |
|
226 |
+
| No log | 173.0 | 173 | 0.0160 | 100.0 | 13.0 |
|
227 |
+
| No log | 174.0 | 174 | 0.0158 | 100.0 | 13.0 |
|
228 |
+
| No log | 175.0 | 175 | 0.0156 | 100.0 | 13.0 |
|
229 |
+
| No log | 176.0 | 176 | 0.0154 | 100.0 | 13.0 |
|
230 |
+
| No log | 177.0 | 177 | 0.0152 | 100.0 | 13.0 |
|
231 |
+
| No log | 178.0 | 178 | 0.0150 | 100.0 | 13.0 |
|
232 |
+
| No log | 179.0 | 179 | 0.0148 | 100.0 | 13.0 |
|
233 |
+
| No log | 180.0 | 180 | 0.0147 | 100.0 | 13.0 |
|
234 |
+
| No log | 181.0 | 181 | 0.0145 | 100.0 | 13.0 |
|
235 |
+
| No log | 182.0 | 182 | 0.0144 | 100.0 | 13.0 |
|
236 |
+
| No log | 183.0 | 183 | 0.0142 | 100.0 | 13.0 |
|
237 |
+
| No log | 184.0 | 184 | 0.0141 | 100.0 | 13.0 |
|
238 |
+
| No log | 185.0 | 185 | 0.0139 | 100.0 | 13.0 |
|
239 |
+
| No log | 186.0 | 186 | 0.0138 | 100.0 | 13.0 |
|
240 |
+
| No log | 187.0 | 187 | 0.0137 | 100.0 | 13.0 |
|
241 |
+
| No log | 188.0 | 188 | 0.0136 | 100.0 | 13.0 |
|
242 |
+
| No log | 189.0 | 189 | 0.0135 | 100.0 | 13.0 |
|
243 |
+
| No log | 190.0 | 190 | 0.0135 | 100.0 | 13.0 |
|
244 |
+
| No log | 191.0 | 191 | 0.0134 | 100.0 | 13.0 |
|
245 |
+
| No log | 192.0 | 192 | 0.0133 | 100.0 | 13.0 |
|
246 |
+
| No log | 193.0 | 193 | 0.0133 | 100.0 | 13.0 |
|
247 |
+
| No log | 194.0 | 194 | 0.0132 | 100.0 | 13.0 |
|
248 |
+
| No log | 195.0 | 195 | 0.0132 | 100.0 | 13.0 |
|
249 |
+
| No log | 196.0 | 196 | 0.0131 | 100.0 | 13.0 |
|
250 |
+
| No log | 197.0 | 197 | 0.0131 | 100.0 | 13.0 |
|
251 |
+
| No log | 198.0 | 198 | 0.0131 | 100.0 | 13.0 |
|
252 |
+
| No log | 199.0 | 199 | 0.0130 | 100.0 | 13.0 |
|
253 |
+
| No log | 200.0 | 200 | 0.0130 | 100.0 | 13.0 |
|
254 |
|
255 |
|
256 |
### Framework versions
|
257 |
|
258 |
+
- Transformers 4.36.1
|
259 |
- Pytorch 2.1.0+cu118
|
260 |
- Datasets 2.15.0
|
261 |
- Tokenizers 0.15.0
|
config.json
CHANGED
@@ -55,7 +55,7 @@
|
|
55 |
}
|
56 |
},
|
57 |
"torch_dtype": "float32",
|
58 |
-
"transformers_version": "4.36.
|
59 |
"use_cache": true,
|
60 |
"vocab_size": 32128
|
61 |
}
|
|
|
55 |
}
|
56 |
},
|
57 |
"torch_dtype": "float32",
|
58 |
+
"transformers_version": "4.36.1",
|
59 |
"use_cache": true,
|
60 |
"vocab_size": 32128
|
61 |
}
|
generation_config.json
CHANGED
@@ -2,5 +2,5 @@
|
|
2 |
"decoder_start_token_id": 0,
|
3 |
"eos_token_id": 1,
|
4 |
"pad_token_id": 0,
|
5 |
-
"transformers_version": "4.36.
|
6 |
}
|
|
|
2 |
"decoder_start_token_id": 0,
|
3 |
"eos_token_id": 1,
|
4 |
"pad_token_id": 0,
|
5 |
+
"transformers_version": "4.36.1"
|
6 |
}
|
model.safetensors
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 242041896
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1fbc87a5f76263d6aaa17fa0d8262c32ab4a00b33d43306b23ae1eac737e9fa8
|
3 |
size 242041896
|
runs/Dec14_08-16-04_d966d963c9e6/events.out.tfevents.1702541764.d966d963c9e6.1203.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:502452da74d845b1b15f0f9ad16b62b084bf97e3baa6c95cb469aef125bb12ff
|
3 |
+
size 42017
|
runs/Dec14_08-19-59_d966d963c9e6/events.out.tfevents.1702542003.d966d963c9e6.2803.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0871110e95bbc959ba76a422a2ca4afa8d22b045cf55f93dcfa0d247a3118f83
|
3 |
+
size 78833
|
special_tokens_map.json
CHANGED
@@ -101,7 +101,25 @@
|
|
101 |
"<extra_id_98>",
|
102 |
"<extra_id_99>"
|
103 |
],
|
104 |
-
"eos_token":
|
105 |
-
|
106 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
107 |
}
|
|
|
101 |
"<extra_id_98>",
|
102 |
"<extra_id_99>"
|
103 |
],
|
104 |
+
"eos_token": {
|
105 |
+
"content": "</s>",
|
106 |
+
"lstrip": false,
|
107 |
+
"normalized": false,
|
108 |
+
"rstrip": false,
|
109 |
+
"single_word": false
|
110 |
+
},
|
111 |
+
"pad_token": {
|
112 |
+
"content": "<pad>",
|
113 |
+
"lstrip": false,
|
114 |
+
"normalized": false,
|
115 |
+
"rstrip": false,
|
116 |
+
"single_word": false
|
117 |
+
},
|
118 |
+
"unk_token": {
|
119 |
+
"content": "<unk>",
|
120 |
+
"lstrip": false,
|
121 |
+
"normalized": false,
|
122 |
+
"rstrip": false,
|
123 |
+
"single_word": false
|
124 |
+
}
|
125 |
}
|
training_args.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
size 4856
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:116913ab98a09a1ffb8bd4e0fad6dbefc9709513166268025ec51202f7aba81c
|
3 |
size 4856
|