pritamdeka commited on
Commit
7ee1dd8
1 Parent(s): 4adde1c

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ tokenizer.json filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language:
3
+ - en
4
+ - bgc
5
+ license: apache-2.0
6
+ base_model: google/mt5-base
7
+ tags:
8
+ - generated_from_trainer
9
+ metrics:
10
+ - bleu
11
+ model-index:
12
+ - name: tst-translation
13
+ results: []
14
+ ---
15
+
16
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
17
+ should probably proofread and complete it, then remove this comment. -->
18
+
19
+ # tst-translation
20
+
21
+ This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
22
+ It achieves the following results on the evaluation set:
23
+ - Loss: 2.8421
24
+ - Bleu: 13.1948
25
+ - Gen Len: 49.9179
26
+
27
+ ## Model description
28
+
29
+ More information needed
30
+
31
+ ## Intended uses & limitations
32
+
33
+ More information needed
34
+
35
+ ## Training and evaluation data
36
+
37
+ More information needed
38
+
39
+ ## Training procedure
40
+
41
+ ### Training hyperparameters
42
+
43
+ The following hyperparameters were used during training:
44
+ - learning_rate: 0.0005
45
+ - train_batch_size: 8
46
+ - eval_batch_size: 16
47
+ - seed: 42
48
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
49
+ - lr_scheduler_type: linear
50
+ - num_epochs: 20.0
51
+
52
+ ### Training results
53
+
54
+ | Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
55
+ |:-------------:|:-------:|:----:|:---------------:|:-------:|:-------:|
56
+ | 3.4257 | 1.9900 | 400 | 2.1087 | 4.1008 | 77.8284 |
57
+ | 1.8571 | 3.9801 | 800 | 1.9292 | 8.6198 | 61.1418 |
58
+ | 1.2467 | 5.9701 | 1200 | 1.9779 | 10.7074 | 48.3184 |
59
+ | 0.8749 | 7.9602 | 1600 | 2.0539 | 11.8538 | 49.3483 |
60
+ | 0.6141 | 9.9502 | 2000 | 2.1948 | 12.4452 | 51.1269 |
61
+ | 0.4446 | 11.9403 | 2400 | 2.3902 | 12.3052 | 48.0995 |
62
+ | 0.3251 | 13.9303 | 2800 | 2.5698 | 12.5824 | 49.1244 |
63
+ | 0.2501 | 15.9204 | 3200 | 2.6631 | 13.0619 | 50.6095 |
64
+ | 0.1986 | 17.9104 | 3600 | 2.7877 | 13.0557 | 51.1443 |
65
+ | 0.1692 | 19.9005 | 4000 | 2.8421 | 13.1948 | 49.9179 |
66
+
67
+
68
+ ### Framework versions
69
+
70
+ - Transformers 4.43.0.dev0
71
+ - Pytorch 2.3.0+cu121
72
+ - Datasets 2.20.0
73
+ - Tokenizers 0.19.1
all_results.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "eval_bleu": 13.1948,
4
+ "eval_gen_len": 49.9179,
5
+ "eval_loss": 2.8421177864074707,
6
+ "eval_runtime": 82.86,
7
+ "eval_samples": 402,
8
+ "eval_samples_per_second": 4.852,
9
+ "eval_steps_per_second": 0.314,
10
+ "total_flos": 4364211764689920.0,
11
+ "train_loss": 0.9366569099141591,
12
+ "train_runtime": 4542.9296,
13
+ "train_samples": 1607,
14
+ "train_samples_per_second": 7.075,
15
+ "train_steps_per_second": 0.885
16
+ }
config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/mt5-base",
3
+ "architectures": [
4
+ "MT5ForConditionalGeneration"
5
+ ],
6
+ "classifier_dropout": 0.0,
7
+ "d_ff": 2048,
8
+ "d_kv": 64,
9
+ "d_model": 768,
10
+ "decoder_start_token_id": 0,
11
+ "dense_act_fn": "gelu_new",
12
+ "dropout_rate": 0.1,
13
+ "eos_token_id": 1,
14
+ "feed_forward_proj": "gated-gelu",
15
+ "initializer_factor": 1.0,
16
+ "is_encoder_decoder": true,
17
+ "is_gated_act": true,
18
+ "layer_norm_epsilon": 1e-06,
19
+ "model_type": "mt5",
20
+ "num_decoder_layers": 12,
21
+ "num_heads": 12,
22
+ "num_layers": 12,
23
+ "output_past": true,
24
+ "pad_token_id": 0,
25
+ "relative_attention_max_distance": 128,
26
+ "relative_attention_num_buckets": 32,
27
+ "tie_word_embeddings": false,
28
+ "tokenizer_class": "T5Tokenizer",
29
+ "torch_dtype": "float32",
30
+ "transformers_version": "4.43.0.dev0",
31
+ "use_cache": true,
32
+ "vocab_size": 250112
33
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "eval_bleu": 13.1948,
4
+ "eval_gen_len": 49.9179,
5
+ "eval_loss": 2.8421177864074707,
6
+ "eval_runtime": 82.86,
7
+ "eval_samples": 402,
8
+ "eval_samples_per_second": 4.852,
9
+ "eval_steps_per_second": 0.314
10
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "decoder_start_token_id": 0,
3
+ "eos_token_id": 1,
4
+ "pad_token_id": 0,
5
+ "transformers_version": "4.43.0.dev0"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:826d8270f3ef5c20774e6b122f5cd8667645329c6d77efa3cb4a0654ad02fb03
3
+ size 2329638768
runs/Jul09_14-45-13_015fb6e6cd9b/events.out.tfevents.1720536323.015fb6e6cd9b.53266.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f65b96360cdd6409d863465baefaef8957ce6b54726c16a85bbb24191baae3cd
3
+ size 11412
runs/Jul09_14-45-13_015fb6e6cd9b/events.out.tfevents.1720541097.015fb6e6cd9b.53266.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a8403c3c87cdaa60d181966da216d3fe77d374e9295e37a6b2b5982470071f90
3
+ size 458
special_tokens_map.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "eos_token": {
3
+ "content": "</s>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "pad_token": {
10
+ "content": "<pad>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "unk_token": {
17
+ "content": "<unk>",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ }
23
+ }
spiece.model ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef78f86560d809067d12bac6c09f19a462cb3af3f54d2b8acbba26e1433125d6
3
+ size 4309802
tokenizer.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5f47eb5c71090da540a205198ab25536a203255a5d4500e51f861050302a22c1
3
+ size 16350028
tokenizer_config.json ADDED
@@ -0,0 +1,839 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "add_prefix_space": true,
3
+ "added_tokens_decoder": {
4
+ "0": {
5
+ "content": "<pad>",
6
+ "lstrip": false,
7
+ "normalized": false,
8
+ "rstrip": false,
9
+ "single_word": false,
10
+ "special": true
11
+ },
12
+ "1": {
13
+ "content": "</s>",
14
+ "lstrip": false,
15
+ "normalized": false,
16
+ "rstrip": false,
17
+ "single_word": false,
18
+ "special": true
19
+ },
20
+ "2": {
21
+ "content": "<unk>",
22
+ "lstrip": false,
23
+ "normalized": false,
24
+ "rstrip": false,
25
+ "single_word": false,
26
+ "special": true
27
+ },
28
+ "250000": {
29
+ "content": "▁<extra_id_99>",
30
+ "lstrip": false,
31
+ "normalized": false,
32
+ "rstrip": false,
33
+ "single_word": false,
34
+ "special": false
35
+ },
36
+ "250001": {
37
+ "content": "▁<extra_id_98>",
38
+ "lstrip": false,
39
+ "normalized": false,
40
+ "rstrip": false,
41
+ "single_word": false,
42
+ "special": false
43
+ },
44
+ "250002": {
45
+ "content": "▁<extra_id_97>",
46
+ "lstrip": false,
47
+ "normalized": false,
48
+ "rstrip": false,
49
+ "single_word": false,
50
+ "special": false
51
+ },
52
+ "250003": {
53
+ "content": "▁<extra_id_96>",
54
+ "lstrip": false,
55
+ "normalized": false,
56
+ "rstrip": false,
57
+ "single_word": false,
58
+ "special": false
59
+ },
60
+ "250004": {
61
+ "content": "▁<extra_id_95>",
62
+ "lstrip": false,
63
+ "normalized": false,
64
+ "rstrip": false,
65
+ "single_word": false,
66
+ "special": false
67
+ },
68
+ "250005": {
69
+ "content": "▁<extra_id_94>",
70
+ "lstrip": false,
71
+ "normalized": false,
72
+ "rstrip": false,
73
+ "single_word": false,
74
+ "special": false
75
+ },
76
+ "250006": {
77
+ "content": "▁<extra_id_93>",
78
+ "lstrip": false,
79
+ "normalized": false,
80
+ "rstrip": false,
81
+ "single_word": false,
82
+ "special": false
83
+ },
84
+ "250007": {
85
+ "content": "▁<extra_id_92>",
86
+ "lstrip": false,
87
+ "normalized": false,
88
+ "rstrip": false,
89
+ "single_word": false,
90
+ "special": false
91
+ },
92
+ "250008": {
93
+ "content": "▁<extra_id_91>",
94
+ "lstrip": false,
95
+ "normalized": false,
96
+ "rstrip": false,
97
+ "single_word": false,
98
+ "special": false
99
+ },
100
+ "250009": {
101
+ "content": "▁<extra_id_90>",
102
+ "lstrip": false,
103
+ "normalized": false,
104
+ "rstrip": false,
105
+ "single_word": false,
106
+ "special": false
107
+ },
108
+ "250010": {
109
+ "content": "▁<extra_id_89>",
110
+ "lstrip": false,
111
+ "normalized": false,
112
+ "rstrip": false,
113
+ "single_word": false,
114
+ "special": false
115
+ },
116
+ "250011": {
117
+ "content": "▁<extra_id_88>",
118
+ "lstrip": false,
119
+ "normalized": false,
120
+ "rstrip": false,
121
+ "single_word": false,
122
+ "special": false
123
+ },
124
+ "250012": {
125
+ "content": "▁<extra_id_87>",
126
+ "lstrip": false,
127
+ "normalized": false,
128
+ "rstrip": false,
129
+ "single_word": false,
130
+ "special": false
131
+ },
132
+ "250013": {
133
+ "content": "▁<extra_id_86>",
134
+ "lstrip": false,
135
+ "normalized": false,
136
+ "rstrip": false,
137
+ "single_word": false,
138
+ "special": false
139
+ },
140
+ "250014": {
141
+ "content": "▁<extra_id_85>",
142
+ "lstrip": false,
143
+ "normalized": false,
144
+ "rstrip": false,
145
+ "single_word": false,
146
+ "special": false
147
+ },
148
+ "250015": {
149
+ "content": "▁<extra_id_84>",
150
+ "lstrip": false,
151
+ "normalized": false,
152
+ "rstrip": false,
153
+ "single_word": false,
154
+ "special": false
155
+ },
156
+ "250016": {
157
+ "content": "▁<extra_id_83>",
158
+ "lstrip": false,
159
+ "normalized": false,
160
+ "rstrip": false,
161
+ "single_word": false,
162
+ "special": false
163
+ },
164
+ "250017": {
165
+ "content": "▁<extra_id_82>",
166
+ "lstrip": false,
167
+ "normalized": false,
168
+ "rstrip": false,
169
+ "single_word": false,
170
+ "special": false
171
+ },
172
+ "250018": {
173
+ "content": "▁<extra_id_81>",
174
+ "lstrip": false,
175
+ "normalized": false,
176
+ "rstrip": false,
177
+ "single_word": false,
178
+ "special": false
179
+ },
180
+ "250019": {
181
+ "content": "▁<extra_id_80>",
182
+ "lstrip": false,
183
+ "normalized": false,
184
+ "rstrip": false,
185
+ "single_word": false,
186
+ "special": false
187
+ },
188
+ "250020": {
189
+ "content": "▁<extra_id_79>",
190
+ "lstrip": false,
191
+ "normalized": false,
192
+ "rstrip": false,
193
+ "single_word": false,
194
+ "special": false
195
+ },
196
+ "250021": {
197
+ "content": "▁<extra_id_78>",
198
+ "lstrip": false,
199
+ "normalized": false,
200
+ "rstrip": false,
201
+ "single_word": false,
202
+ "special": false
203
+ },
204
+ "250022": {
205
+ "content": "▁<extra_id_77>",
206
+ "lstrip": false,
207
+ "normalized": false,
208
+ "rstrip": false,
209
+ "single_word": false,
210
+ "special": false
211
+ },
212
+ "250023": {
213
+ "content": "▁<extra_id_76>",
214
+ "lstrip": false,
215
+ "normalized": false,
216
+ "rstrip": false,
217
+ "single_word": false,
218
+ "special": false
219
+ },
220
+ "250024": {
221
+ "content": "▁<extra_id_75>",
222
+ "lstrip": false,
223
+ "normalized": false,
224
+ "rstrip": false,
225
+ "single_word": false,
226
+ "special": false
227
+ },
228
+ "250025": {
229
+ "content": "▁<extra_id_74>",
230
+ "lstrip": false,
231
+ "normalized": false,
232
+ "rstrip": false,
233
+ "single_word": false,
234
+ "special": false
235
+ },
236
+ "250026": {
237
+ "content": "▁<extra_id_73>",
238
+ "lstrip": false,
239
+ "normalized": false,
240
+ "rstrip": false,
241
+ "single_word": false,
242
+ "special": false
243
+ },
244
+ "250027": {
245
+ "content": "▁<extra_id_72>",
246
+ "lstrip": false,
247
+ "normalized": false,
248
+ "rstrip": false,
249
+ "single_word": false,
250
+ "special": false
251
+ },
252
+ "250028": {
253
+ "content": "▁<extra_id_71>",
254
+ "lstrip": false,
255
+ "normalized": false,
256
+ "rstrip": false,
257
+ "single_word": false,
258
+ "special": false
259
+ },
260
+ "250029": {
261
+ "content": "▁<extra_id_70>",
262
+ "lstrip": false,
263
+ "normalized": false,
264
+ "rstrip": false,
265
+ "single_word": false,
266
+ "special": false
267
+ },
268
+ "250030": {
269
+ "content": "▁<extra_id_69>",
270
+ "lstrip": false,
271
+ "normalized": false,
272
+ "rstrip": false,
273
+ "single_word": false,
274
+ "special": false
275
+ },
276
+ "250031": {
277
+ "content": "▁<extra_id_68>",
278
+ "lstrip": false,
279
+ "normalized": false,
280
+ "rstrip": false,
281
+ "single_word": false,
282
+ "special": false
283
+ },
284
+ "250032": {
285
+ "content": "▁<extra_id_67>",
286
+ "lstrip": false,
287
+ "normalized": false,
288
+ "rstrip": false,
289
+ "single_word": false,
290
+ "special": false
291
+ },
292
+ "250033": {
293
+ "content": "▁<extra_id_66>",
294
+ "lstrip": false,
295
+ "normalized": false,
296
+ "rstrip": false,
297
+ "single_word": false,
298
+ "special": false
299
+ },
300
+ "250034": {
301
+ "content": "▁<extra_id_65>",
302
+ "lstrip": false,
303
+ "normalized": false,
304
+ "rstrip": false,
305
+ "single_word": false,
306
+ "special": false
307
+ },
308
+ "250035": {
309
+ "content": "▁<extra_id_64>",
310
+ "lstrip": false,
311
+ "normalized": false,
312
+ "rstrip": false,
313
+ "single_word": false,
314
+ "special": false
315
+ },
316
+ "250036": {
317
+ "content": "▁<extra_id_63>",
318
+ "lstrip": false,
319
+ "normalized": false,
320
+ "rstrip": false,
321
+ "single_word": false,
322
+ "special": false
323
+ },
324
+ "250037": {
325
+ "content": "▁<extra_id_62>",
326
+ "lstrip": false,
327
+ "normalized": false,
328
+ "rstrip": false,
329
+ "single_word": false,
330
+ "special": false
331
+ },
332
+ "250038": {
333
+ "content": "▁<extra_id_61>",
334
+ "lstrip": false,
335
+ "normalized": false,
336
+ "rstrip": false,
337
+ "single_word": false,
338
+ "special": false
339
+ },
340
+ "250039": {
341
+ "content": "▁<extra_id_60>",
342
+ "lstrip": false,
343
+ "normalized": false,
344
+ "rstrip": false,
345
+ "single_word": false,
346
+ "special": false
347
+ },
348
+ "250040": {
349
+ "content": "▁<extra_id_59>",
350
+ "lstrip": false,
351
+ "normalized": false,
352
+ "rstrip": false,
353
+ "single_word": false,
354
+ "special": false
355
+ },
356
+ "250041": {
357
+ "content": "▁<extra_id_58>",
358
+ "lstrip": false,
359
+ "normalized": false,
360
+ "rstrip": false,
361
+ "single_word": false,
362
+ "special": false
363
+ },
364
+ "250042": {
365
+ "content": "▁<extra_id_57>",
366
+ "lstrip": false,
367
+ "normalized": false,
368
+ "rstrip": false,
369
+ "single_word": false,
370
+ "special": false
371
+ },
372
+ "250043": {
373
+ "content": "▁<extra_id_56>",
374
+ "lstrip": false,
375
+ "normalized": false,
376
+ "rstrip": false,
377
+ "single_word": false,
378
+ "special": false
379
+ },
380
+ "250044": {
381
+ "content": "▁<extra_id_55>",
382
+ "lstrip": false,
383
+ "normalized": false,
384
+ "rstrip": false,
385
+ "single_word": false,
386
+ "special": false
387
+ },
388
+ "250045": {
389
+ "content": "▁<extra_id_54>",
390
+ "lstrip": false,
391
+ "normalized": false,
392
+ "rstrip": false,
393
+ "single_word": false,
394
+ "special": false
395
+ },
396
+ "250046": {
397
+ "content": "▁<extra_id_53>",
398
+ "lstrip": false,
399
+ "normalized": false,
400
+ "rstrip": false,
401
+ "single_word": false,
402
+ "special": false
403
+ },
404
+ "250047": {
405
+ "content": "▁<extra_id_52>",
406
+ "lstrip": false,
407
+ "normalized": false,
408
+ "rstrip": false,
409
+ "single_word": false,
410
+ "special": false
411
+ },
412
+ "250048": {
413
+ "content": "▁<extra_id_51>",
414
+ "lstrip": false,
415
+ "normalized": false,
416
+ "rstrip": false,
417
+ "single_word": false,
418
+ "special": false
419
+ },
420
+ "250049": {
421
+ "content": "▁<extra_id_50>",
422
+ "lstrip": false,
423
+ "normalized": false,
424
+ "rstrip": false,
425
+ "single_word": false,
426
+ "special": false
427
+ },
428
+ "250050": {
429
+ "content": "���<extra_id_49>",
430
+ "lstrip": false,
431
+ "normalized": false,
432
+ "rstrip": false,
433
+ "single_word": false,
434
+ "special": false
435
+ },
436
+ "250051": {
437
+ "content": "▁<extra_id_48>",
438
+ "lstrip": false,
439
+ "normalized": false,
440
+ "rstrip": false,
441
+ "single_word": false,
442
+ "special": false
443
+ },
444
+ "250052": {
445
+ "content": "▁<extra_id_47>",
446
+ "lstrip": false,
447
+ "normalized": false,
448
+ "rstrip": false,
449
+ "single_word": false,
450
+ "special": false
451
+ },
452
+ "250053": {
453
+ "content": "▁<extra_id_46>",
454
+ "lstrip": false,
455
+ "normalized": false,
456
+ "rstrip": false,
457
+ "single_word": false,
458
+ "special": false
459
+ },
460
+ "250054": {
461
+ "content": "▁<extra_id_45>",
462
+ "lstrip": false,
463
+ "normalized": false,
464
+ "rstrip": false,
465
+ "single_word": false,
466
+ "special": false
467
+ },
468
+ "250055": {
469
+ "content": "▁<extra_id_44>",
470
+ "lstrip": false,
471
+ "normalized": false,
472
+ "rstrip": false,
473
+ "single_word": false,
474
+ "special": false
475
+ },
476
+ "250056": {
477
+ "content": "▁<extra_id_43>",
478
+ "lstrip": false,
479
+ "normalized": false,
480
+ "rstrip": false,
481
+ "single_word": false,
482
+ "special": false
483
+ },
484
+ "250057": {
485
+ "content": "▁<extra_id_42>",
486
+ "lstrip": false,
487
+ "normalized": false,
488
+ "rstrip": false,
489
+ "single_word": false,
490
+ "special": false
491
+ },
492
+ "250058": {
493
+ "content": "▁<extra_id_41>",
494
+ "lstrip": false,
495
+ "normalized": false,
496
+ "rstrip": false,
497
+ "single_word": false,
498
+ "special": false
499
+ },
500
+ "250059": {
501
+ "content": "▁<extra_id_40>",
502
+ "lstrip": false,
503
+ "normalized": false,
504
+ "rstrip": false,
505
+ "single_word": false,
506
+ "special": false
507
+ },
508
+ "250060": {
509
+ "content": "▁<extra_id_39>",
510
+ "lstrip": false,
511
+ "normalized": false,
512
+ "rstrip": false,
513
+ "single_word": false,
514
+ "special": false
515
+ },
516
+ "250061": {
517
+ "content": "▁<extra_id_38>",
518
+ "lstrip": false,
519
+ "normalized": false,
520
+ "rstrip": false,
521
+ "single_word": false,
522
+ "special": false
523
+ },
524
+ "250062": {
525
+ "content": "▁<extra_id_37>",
526
+ "lstrip": false,
527
+ "normalized": false,
528
+ "rstrip": false,
529
+ "single_word": false,
530
+ "special": false
531
+ },
532
+ "250063": {
533
+ "content": "▁<extra_id_36>",
534
+ "lstrip": false,
535
+ "normalized": false,
536
+ "rstrip": false,
537
+ "single_word": false,
538
+ "special": false
539
+ },
540
+ "250064": {
541
+ "content": "▁<extra_id_35>",
542
+ "lstrip": false,
543
+ "normalized": false,
544
+ "rstrip": false,
545
+ "single_word": false,
546
+ "special": false
547
+ },
548
+ "250065": {
549
+ "content": "▁<extra_id_34>",
550
+ "lstrip": false,
551
+ "normalized": false,
552
+ "rstrip": false,
553
+ "single_word": false,
554
+ "special": false
555
+ },
556
+ "250066": {
557
+ "content": "▁<extra_id_33>",
558
+ "lstrip": false,
559
+ "normalized": false,
560
+ "rstrip": false,
561
+ "single_word": false,
562
+ "special": false
563
+ },
564
+ "250067": {
565
+ "content": "▁<extra_id_32>",
566
+ "lstrip": false,
567
+ "normalized": false,
568
+ "rstrip": false,
569
+ "single_word": false,
570
+ "special": false
571
+ },
572
+ "250068": {
573
+ "content": "▁<extra_id_31>",
574
+ "lstrip": false,
575
+ "normalized": false,
576
+ "rstrip": false,
577
+ "single_word": false,
578
+ "special": false
579
+ },
580
+ "250069": {
581
+ "content": "▁<extra_id_30>",
582
+ "lstrip": false,
583
+ "normalized": false,
584
+ "rstrip": false,
585
+ "single_word": false,
586
+ "special": false
587
+ },
588
+ "250070": {
589
+ "content": "▁<extra_id_29>",
590
+ "lstrip": false,
591
+ "normalized": false,
592
+ "rstrip": false,
593
+ "single_word": false,
594
+ "special": false
595
+ },
596
+ "250071": {
597
+ "content": "▁<extra_id_28>",
598
+ "lstrip": false,
599
+ "normalized": false,
600
+ "rstrip": false,
601
+ "single_word": false,
602
+ "special": false
603
+ },
604
+ "250072": {
605
+ "content": "▁<extra_id_27>",
606
+ "lstrip": false,
607
+ "normalized": false,
608
+ "rstrip": false,
609
+ "single_word": false,
610
+ "special": false
611
+ },
612
+ "250073": {
613
+ "content": "▁<extra_id_26>",
614
+ "lstrip": false,
615
+ "normalized": false,
616
+ "rstrip": false,
617
+ "single_word": false,
618
+ "special": false
619
+ },
620
+ "250074": {
621
+ "content": "▁<extra_id_25>",
622
+ "lstrip": false,
623
+ "normalized": false,
624
+ "rstrip": false,
625
+ "single_word": false,
626
+ "special": false
627
+ },
628
+ "250075": {
629
+ "content": "▁<extra_id_24>",
630
+ "lstrip": false,
631
+ "normalized": false,
632
+ "rstrip": false,
633
+ "single_word": false,
634
+ "special": false
635
+ },
636
+ "250076": {
637
+ "content": "▁<extra_id_23>",
638
+ "lstrip": false,
639
+ "normalized": false,
640
+ "rstrip": false,
641
+ "single_word": false,
642
+ "special": false
643
+ },
644
+ "250077": {
645
+ "content": "▁<extra_id_22>",
646
+ "lstrip": false,
647
+ "normalized": false,
648
+ "rstrip": false,
649
+ "single_word": false,
650
+ "special": false
651
+ },
652
+ "250078": {
653
+ "content": "▁<extra_id_21>",
654
+ "lstrip": false,
655
+ "normalized": false,
656
+ "rstrip": false,
657
+ "single_word": false,
658
+ "special": false
659
+ },
660
+ "250079": {
661
+ "content": "▁<extra_id_20>",
662
+ "lstrip": false,
663
+ "normalized": false,
664
+ "rstrip": false,
665
+ "single_word": false,
666
+ "special": false
667
+ },
668
+ "250080": {
669
+ "content": "▁<extra_id_19>",
670
+ "lstrip": false,
671
+ "normalized": false,
672
+ "rstrip": false,
673
+ "single_word": false,
674
+ "special": false
675
+ },
676
+ "250081": {
677
+ "content": "▁<extra_id_18>",
678
+ "lstrip": false,
679
+ "normalized": false,
680
+ "rstrip": false,
681
+ "single_word": false,
682
+ "special": false
683
+ },
684
+ "250082": {
685
+ "content": "▁<extra_id_17>",
686
+ "lstrip": false,
687
+ "normalized": false,
688
+ "rstrip": false,
689
+ "single_word": false,
690
+ "special": false
691
+ },
692
+ "250083": {
693
+ "content": "▁<extra_id_16>",
694
+ "lstrip": false,
695
+ "normalized": false,
696
+ "rstrip": false,
697
+ "single_word": false,
698
+ "special": false
699
+ },
700
+ "250084": {
701
+ "content": "▁<extra_id_15>",
702
+ "lstrip": false,
703
+ "normalized": false,
704
+ "rstrip": false,
705
+ "single_word": false,
706
+ "special": false
707
+ },
708
+ "250085": {
709
+ "content": "▁<extra_id_14>",
710
+ "lstrip": false,
711
+ "normalized": false,
712
+ "rstrip": false,
713
+ "single_word": false,
714
+ "special": false
715
+ },
716
+ "250086": {
717
+ "content": "▁<extra_id_13>",
718
+ "lstrip": false,
719
+ "normalized": false,
720
+ "rstrip": false,
721
+ "single_word": false,
722
+ "special": false
723
+ },
724
+ "250087": {
725
+ "content": "▁<extra_id_12>",
726
+ "lstrip": false,
727
+ "normalized": false,
728
+ "rstrip": false,
729
+ "single_word": false,
730
+ "special": false
731
+ },
732
+ "250088": {
733
+ "content": "▁<extra_id_11>",
734
+ "lstrip": false,
735
+ "normalized": false,
736
+ "rstrip": false,
737
+ "single_word": false,
738
+ "special": false
739
+ },
740
+ "250089": {
741
+ "content": "▁<extra_id_10>",
742
+ "lstrip": false,
743
+ "normalized": false,
744
+ "rstrip": false,
745
+ "single_word": false,
746
+ "special": false
747
+ },
748
+ "250090": {
749
+ "content": "▁<extra_id_9>",
750
+ "lstrip": false,
751
+ "normalized": false,
752
+ "rstrip": false,
753
+ "single_word": false,
754
+ "special": false
755
+ },
756
+ "250091": {
757
+ "content": "▁<extra_id_8>",
758
+ "lstrip": false,
759
+ "normalized": false,
760
+ "rstrip": false,
761
+ "single_word": false,
762
+ "special": false
763
+ },
764
+ "250092": {
765
+ "content": "▁<extra_id_7>",
766
+ "lstrip": false,
767
+ "normalized": false,
768
+ "rstrip": false,
769
+ "single_word": false,
770
+ "special": false
771
+ },
772
+ "250093": {
773
+ "content": "▁<extra_id_6>",
774
+ "lstrip": false,
775
+ "normalized": false,
776
+ "rstrip": false,
777
+ "single_word": false,
778
+ "special": false
779
+ },
780
+ "250094": {
781
+ "content": "▁<extra_id_5>",
782
+ "lstrip": false,
783
+ "normalized": false,
784
+ "rstrip": false,
785
+ "single_word": false,
786
+ "special": false
787
+ },
788
+ "250095": {
789
+ "content": "▁<extra_id_4>",
790
+ "lstrip": false,
791
+ "normalized": false,
792
+ "rstrip": false,
793
+ "single_word": false,
794
+ "special": false
795
+ },
796
+ "250096": {
797
+ "content": "▁<extra_id_3>",
798
+ "lstrip": false,
799
+ "normalized": false,
800
+ "rstrip": false,
801
+ "single_word": false,
802
+ "special": false
803
+ },
804
+ "250097": {
805
+ "content": "▁<extra_id_2>",
806
+ "lstrip": false,
807
+ "normalized": false,
808
+ "rstrip": false,
809
+ "single_word": false,
810
+ "special": false
811
+ },
812
+ "250098": {
813
+ "content": "▁<extra_id_1>",
814
+ "lstrip": false,
815
+ "normalized": false,
816
+ "rstrip": false,
817
+ "single_word": false,
818
+ "special": false
819
+ },
820
+ "250099": {
821
+ "content": "▁<extra_id_0>",
822
+ "lstrip": false,
823
+ "normalized": false,
824
+ "rstrip": false,
825
+ "single_word": false,
826
+ "special": false
827
+ }
828
+ },
829
+ "additional_special_tokens": [],
830
+ "clean_up_tokenization_spaces": true,
831
+ "eos_token": "</s>",
832
+ "extra_ids": 0,
833
+ "legacy": true,
834
+ "model_max_length": 1000000000000000019884624838656,
835
+ "pad_token": "<pad>",
836
+ "sp_model_kwargs": {},
837
+ "tokenizer_class": "T5Tokenizer",
838
+ "unk_token": "<unk>"
839
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 20.0,
3
+ "total_flos": 4364211764689920.0,
4
+ "train_loss": 0.9366569099141591,
5
+ "train_runtime": 4542.9296,
6
+ "train_samples": 1607,
7
+ "train_samples_per_second": 7.075,
8
+ "train_steps_per_second": 0.885
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,212 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": 13.1948,
3
+ "best_model_checkpoint": "/content/tst-translation/checkpoint-4000",
4
+ "epoch": 20.0,
5
+ "eval_steps": 400,
6
+ "global_step": 4020,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 1.9900497512437811,
13
+ "grad_norm": 2.333070993423462,
14
+ "learning_rate": 0.00045024875621890546,
15
+ "loss": 3.4257,
16
+ "step": 400
17
+ },
18
+ {
19
+ "epoch": 1.9900497512437811,
20
+ "eval_bleu": 4.1008,
21
+ "eval_gen_len": 77.8284,
22
+ "eval_loss": 2.1086840629577637,
23
+ "eval_runtime": 201.8226,
24
+ "eval_samples_per_second": 1.992,
25
+ "eval_steps_per_second": 0.129,
26
+ "step": 400
27
+ },
28
+ {
29
+ "epoch": 3.9800995024875623,
30
+ "grad_norm": 2.2885234355926514,
31
+ "learning_rate": 0.00040049751243781097,
32
+ "loss": 1.8571,
33
+ "step": 800
34
+ },
35
+ {
36
+ "epoch": 3.9800995024875623,
37
+ "eval_bleu": 8.6198,
38
+ "eval_gen_len": 61.1418,
39
+ "eval_loss": 1.9292024374008179,
40
+ "eval_runtime": 149.5469,
41
+ "eval_samples_per_second": 2.688,
42
+ "eval_steps_per_second": 0.174,
43
+ "step": 800
44
+ },
45
+ {
46
+ "epoch": 5.970149253731344,
47
+ "grad_norm": 1.9040518999099731,
48
+ "learning_rate": 0.0003507462686567164,
49
+ "loss": 1.2467,
50
+ "step": 1200
51
+ },
52
+ {
53
+ "epoch": 5.970149253731344,
54
+ "eval_bleu": 10.7074,
55
+ "eval_gen_len": 48.3184,
56
+ "eval_loss": 1.9778931140899658,
57
+ "eval_runtime": 82.3257,
58
+ "eval_samples_per_second": 4.883,
59
+ "eval_steps_per_second": 0.316,
60
+ "step": 1200
61
+ },
62
+ {
63
+ "epoch": 7.960199004975125,
64
+ "grad_norm": 1.6420375108718872,
65
+ "learning_rate": 0.00030099502487562194,
66
+ "loss": 0.8749,
67
+ "step": 1600
68
+ },
69
+ {
70
+ "epoch": 7.960199004975125,
71
+ "eval_bleu": 11.8538,
72
+ "eval_gen_len": 49.3483,
73
+ "eval_loss": 2.0539379119873047,
74
+ "eval_runtime": 80.462,
75
+ "eval_samples_per_second": 4.996,
76
+ "eval_steps_per_second": 0.323,
77
+ "step": 1600
78
+ },
79
+ {
80
+ "epoch": 9.950248756218905,
81
+ "grad_norm": 1.6268000602722168,
82
+ "learning_rate": 0.0002512437810945274,
83
+ "loss": 0.6141,
84
+ "step": 2000
85
+ },
86
+ {
87
+ "epoch": 9.950248756218905,
88
+ "eval_bleu": 12.4452,
89
+ "eval_gen_len": 51.1269,
90
+ "eval_loss": 2.1948225498199463,
91
+ "eval_runtime": 88.7102,
92
+ "eval_samples_per_second": 4.532,
93
+ "eval_steps_per_second": 0.293,
94
+ "step": 2000
95
+ },
96
+ {
97
+ "epoch": 11.940298507462687,
98
+ "grad_norm": 1.412307858467102,
99
+ "learning_rate": 0.00020149253731343284,
100
+ "loss": 0.4446,
101
+ "step": 2400
102
+ },
103
+ {
104
+ "epoch": 11.940298507462687,
105
+ "eval_bleu": 12.3052,
106
+ "eval_gen_len": 48.0995,
107
+ "eval_loss": 2.3901803493499756,
108
+ "eval_runtime": 67.4373,
109
+ "eval_samples_per_second": 5.961,
110
+ "eval_steps_per_second": 0.386,
111
+ "step": 2400
112
+ },
113
+ {
114
+ "epoch": 13.930348258706468,
115
+ "grad_norm": 1.7401809692382812,
116
+ "learning_rate": 0.00015174129353233832,
117
+ "loss": 0.3251,
118
+ "step": 2800
119
+ },
120
+ {
121
+ "epoch": 13.930348258706468,
122
+ "eval_bleu": 12.5824,
123
+ "eval_gen_len": 49.1244,
124
+ "eval_loss": 2.5697591304779053,
125
+ "eval_runtime": 74.1886,
126
+ "eval_samples_per_second": 5.419,
127
+ "eval_steps_per_second": 0.35,
128
+ "step": 2800
129
+ },
130
+ {
131
+ "epoch": 15.92039800995025,
132
+ "grad_norm": 1.2750484943389893,
133
+ "learning_rate": 0.00010199004975124378,
134
+ "loss": 0.2501,
135
+ "step": 3200
136
+ },
137
+ {
138
+ "epoch": 15.92039800995025,
139
+ "eval_bleu": 13.0619,
140
+ "eval_gen_len": 50.6095,
141
+ "eval_loss": 2.663144588470459,
142
+ "eval_runtime": 80.5868,
143
+ "eval_samples_per_second": 4.988,
144
+ "eval_steps_per_second": 0.323,
145
+ "step": 3200
146
+ },
147
+ {
148
+ "epoch": 17.91044776119403,
149
+ "grad_norm": 0.941927433013916,
150
+ "learning_rate": 5.223880597014925e-05,
151
+ "loss": 0.1986,
152
+ "step": 3600
153
+ },
154
+ {
155
+ "epoch": 17.91044776119403,
156
+ "eval_bleu": 13.0557,
157
+ "eval_gen_len": 51.1443,
158
+ "eval_loss": 2.787724494934082,
159
+ "eval_runtime": 83.6803,
160
+ "eval_samples_per_second": 4.804,
161
+ "eval_steps_per_second": 0.311,
162
+ "step": 3600
163
+ },
164
+ {
165
+ "epoch": 19.90049751243781,
166
+ "grad_norm": 1.0183725357055664,
167
+ "learning_rate": 2.4875621890547264e-06,
168
+ "loss": 0.1692,
169
+ "step": 4000
170
+ },
171
+ {
172
+ "epoch": 19.90049751243781,
173
+ "eval_bleu": 13.1948,
174
+ "eval_gen_len": 49.9179,
175
+ "eval_loss": 2.8421177864074707,
176
+ "eval_runtime": 82.7516,
177
+ "eval_samples_per_second": 4.858,
178
+ "eval_steps_per_second": 0.314,
179
+ "step": 4000
180
+ },
181
+ {
182
+ "epoch": 20.0,
183
+ "step": 4020,
184
+ "total_flos": 4364211764689920.0,
185
+ "train_loss": 0.9366569099141591,
186
+ "train_runtime": 4542.9296,
187
+ "train_samples_per_second": 7.075,
188
+ "train_steps_per_second": 0.885
189
+ }
190
+ ],
191
+ "logging_steps": 400,
192
+ "max_steps": 4020,
193
+ "num_input_tokens_seen": 0,
194
+ "num_train_epochs": 20,
195
+ "save_steps": 400,
196
+ "stateful_callbacks": {
197
+ "TrainerControl": {
198
+ "args": {
199
+ "should_epoch_stop": false,
200
+ "should_evaluate": false,
201
+ "should_log": false,
202
+ "should_save": true,
203
+ "should_training_stop": true
204
+ },
205
+ "attributes": {}
206
+ }
207
+ },
208
+ "total_flos": 4364211764689920.0,
209
+ "train_batch_size": 8,
210
+ "trial_name": null,
211
+ "trial_params": null
212
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:195e6ee3a6f8fee17f85303741e2fd95897667f915f800df04603d859f6b5d15
3
+ size 5368