TinyPixel commited on
Commit
8482335
·
verified ·
1 Parent(s): e88de56

Upload folder using huggingface_hub

Browse files
adapter_config.json CHANGED
@@ -19,10 +19,10 @@
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
22
- "dense",
23
  "query_key_value",
24
  "dense_4h_to_h",
25
- "dense_h_to_4h"
 
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
 
19
  "rank_pattern": {},
20
  "revision": null,
21
  "target_modules": [
 
22
  "query_key_value",
23
  "dense_4h_to_h",
24
+ "dense_h_to_4h",
25
+ "dense"
26
  ],
27
  "task_type": "CAUSAL_LM",
28
  "use_rslora": false
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:26149c03ac38ca0744de5f731d962ba8c20df12aaf1f059141df31b63003dae7
3
  size 134235712
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:479253fb407c2b773aa0e4708b46fb1b01738cf207a50c9732ee33cfcb20e962
3
  size 134235712
optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:3aa8e3c073cb2bd0c53f3dabed66d7fbd57b30558341b3933a8685df94cc4903
3
  size 268514874
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e6d2a29607b68480488d20bc0ebae500791673df1131ed8ca6b8ed447c29c3c6
3
  size 268514874
rng_state.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b92b6afc22aa3d8e3b84032cd942b44e926135489b08fc919c11ca57c0acefe2
3
  size 14244
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6723a233677874772314ee161275d22ba0b66952553e0d4e124483f43b2dc4f0
3
  size 14244
scheduler.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b64e03a349d65a4a265ddcde43b6866f14518090a6d33cf547d68471d9df8eba
3
  size 1064
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:824a429c5f95423b97fd2ff7dc1e1dffcd96691286babb37392f00bfcecaeb1b
3
  size 1064
trainer_state.json CHANGED
@@ -1,9 +1,9 @@
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
- "epoch": 2.976,
5
  "eval_steps": 500,
6
- "global_step": 186,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
@@ -11,568 +11,208 @@
11
  {
12
  "epoch": 0.03,
13
  "learning_rate": 0.0002,
14
- "loss": 2.0276,
15
  "step": 2
16
  },
17
  {
18
  "epoch": 0.06,
19
  "learning_rate": 0.0002,
20
- "loss": 1.8115,
21
  "step": 4
22
  },
23
  {
24
- "epoch": 0.1,
25
  "learning_rate": 0.0002,
26
- "loss": 1.7782,
27
  "step": 6
28
  },
29
  {
30
- "epoch": 0.13,
31
  "learning_rate": 0.0002,
32
- "loss": 1.5044,
33
  "step": 8
34
  },
35
  {
36
- "epoch": 0.16,
37
  "learning_rate": 0.0002,
38
- "loss": 1.5173,
39
  "step": 10
40
  },
41
  {
42
- "epoch": 0.19,
43
  "learning_rate": 0.0002,
44
- "loss": 1.941,
45
  "step": 12
46
  },
47
  {
48
- "epoch": 0.22,
49
  "learning_rate": 0.0002,
50
- "loss": 2.1896,
51
  "step": 14
52
  },
53
  {
54
- "epoch": 0.26,
55
  "learning_rate": 0.0002,
56
- "loss": 2.4559,
57
  "step": 16
58
  },
59
  {
60
- "epoch": 0.29,
61
  "learning_rate": 0.0002,
62
- "loss": 1.7852,
63
  "step": 18
64
  },
65
  {
66
- "epoch": 0.32,
67
  "learning_rate": 0.0002,
68
- "loss": 1.6024,
69
  "step": 20
70
  },
71
  {
72
- "epoch": 0.35,
73
  "learning_rate": 0.0002,
74
- "loss": 1.6719,
75
  "step": 22
76
  },
77
  {
78
- "epoch": 0.38,
79
  "learning_rate": 0.0002,
80
- "loss": 1.4681,
81
  "step": 24
82
  },
83
  {
84
- "epoch": 0.42,
85
  "learning_rate": 0.0002,
86
- "loss": 1.581,
87
  "step": 26
88
  },
89
  {
90
- "epoch": 0.45,
91
  "learning_rate": 0.0002,
92
- "loss": 2.3833,
93
  "step": 28
94
  },
95
  {
96
- "epoch": 0.48,
97
  "learning_rate": 0.0002,
98
- "loss": 2.4453,
99
  "step": 30
100
  },
101
  {
102
- "epoch": 0.51,
103
  "learning_rate": 0.0002,
104
- "loss": 1.8454,
105
  "step": 32
106
  },
107
  {
108
- "epoch": 0.54,
109
  "learning_rate": 0.0002,
110
- "loss": 1.605,
111
  "step": 34
112
  },
113
  {
114
- "epoch": 0.58,
115
  "learning_rate": 0.0002,
116
- "loss": 1.7273,
117
  "step": 36
118
  },
119
  {
120
- "epoch": 0.61,
121
  "learning_rate": 0.0002,
122
- "loss": 1.4928,
123
  "step": 38
124
  },
125
  {
126
- "epoch": 0.64,
127
  "learning_rate": 0.0002,
128
- "loss": 1.3592,
129
  "step": 40
130
  },
131
  {
132
- "epoch": 0.67,
133
  "learning_rate": 0.0002,
134
- "loss": 1.4555,
135
  "step": 42
136
  },
137
  {
138
- "epoch": 0.7,
139
  "learning_rate": 0.0002,
140
- "loss": 1.7814,
141
  "step": 44
142
  },
143
  {
144
- "epoch": 0.74,
145
  "learning_rate": 0.0002,
146
- "loss": 1.9702,
147
  "step": 46
148
  },
149
  {
150
- "epoch": 0.77,
151
  "learning_rate": 0.0002,
152
- "loss": 1.7386,
153
  "step": 48
154
  },
155
  {
156
- "epoch": 0.8,
157
  "learning_rate": 0.0002,
158
- "loss": 1.5286,
159
  "step": 50
160
  },
161
  {
162
- "epoch": 0.83,
163
  "learning_rate": 0.0002,
164
- "loss": 1.4299,
165
  "step": 52
166
  },
167
  {
168
- "epoch": 0.86,
169
  "learning_rate": 0.0002,
170
- "loss": 1.3375,
171
  "step": 54
172
  },
173
  {
174
- "epoch": 0.9,
175
  "learning_rate": 0.0002,
176
- "loss": 1.3909,
177
  "step": 56
178
  },
179
  {
180
- "epoch": 0.93,
181
  "learning_rate": 0.0002,
182
- "loss": 1.6683,
183
  "step": 58
184
  },
185
  {
186
- "epoch": 0.96,
187
  "learning_rate": 0.0002,
188
- "loss": 1.8606,
189
  "step": 60
190
  },
191
  {
192
- "epoch": 0.99,
193
  "learning_rate": 0.0002,
194
- "loss": 1.4344,
195
  "step": 62
196
  },
197
  {
198
- "epoch": 1.02,
199
  "learning_rate": 0.0002,
200
- "loss": 1.9833,
201
  "step": 64
202
  },
203
  {
204
- "epoch": 1.06,
205
  "learning_rate": 0.0002,
206
- "loss": 1.6625,
207
  "step": 66
208
- },
209
- {
210
- "epoch": 1.09,
211
- "learning_rate": 0.0002,
212
- "loss": 1.6241,
213
- "step": 68
214
- },
215
- {
216
- "epoch": 1.12,
217
- "learning_rate": 0.0002,
218
- "loss": 1.2086,
219
- "step": 70
220
- },
221
- {
222
- "epoch": 1.15,
223
- "learning_rate": 0.0002,
224
- "loss": 1.2527,
225
- "step": 72
226
- },
227
- {
228
- "epoch": 1.18,
229
- "learning_rate": 0.0002,
230
- "loss": 1.231,
231
- "step": 74
232
- },
233
- {
234
- "epoch": 1.22,
235
- "learning_rate": 0.0002,
236
- "loss": 1.6654,
237
- "step": 76
238
- },
239
- {
240
- "epoch": 1.25,
241
- "learning_rate": 0.0002,
242
- "loss": 1.6974,
243
- "step": 78
244
- },
245
- {
246
- "epoch": 1.28,
247
- "learning_rate": 0.0002,
248
- "loss": 1.7472,
249
- "step": 80
250
- },
251
- {
252
- "epoch": 1.31,
253
- "learning_rate": 0.0002,
254
- "loss": 1.7494,
255
- "step": 82
256
- },
257
- {
258
- "epoch": 1.34,
259
- "learning_rate": 0.0002,
260
- "loss": 1.3061,
261
- "step": 84
262
- },
263
- {
264
- "epoch": 1.38,
265
- "learning_rate": 0.0002,
266
- "loss": 1.2908,
267
- "step": 86
268
- },
269
- {
270
- "epoch": 1.41,
271
- "learning_rate": 0.0002,
272
- "loss": 1.2471,
273
- "step": 88
274
- },
275
- {
276
- "epoch": 1.44,
277
- "learning_rate": 0.0002,
278
- "loss": 1.4795,
279
- "step": 90
280
- },
281
- {
282
- "epoch": 1.47,
283
- "learning_rate": 0.0002,
284
- "loss": 1.6725,
285
- "step": 92
286
- },
287
- {
288
- "epoch": 1.5,
289
- "learning_rate": 0.0002,
290
- "loss": 1.7614,
291
- "step": 94
292
- },
293
- {
294
- "epoch": 1.54,
295
- "learning_rate": 0.0002,
296
- "loss": 1.452,
297
- "step": 96
298
- },
299
- {
300
- "epoch": 1.57,
301
- "learning_rate": 0.0002,
302
- "loss": 1.52,
303
- "step": 98
304
- },
305
- {
306
- "epoch": 1.6,
307
- "learning_rate": 0.0002,
308
- "loss": 1.4723,
309
- "step": 100
310
- },
311
- {
312
- "epoch": 1.63,
313
- "learning_rate": 0.0002,
314
- "loss": 1.3179,
315
- "step": 102
316
- },
317
- {
318
- "epoch": 1.66,
319
- "learning_rate": 0.0002,
320
- "loss": 1.2664,
321
- "step": 104
322
- },
323
- {
324
- "epoch": 1.7,
325
- "learning_rate": 0.0002,
326
- "loss": 1.9138,
327
- "step": 106
328
- },
329
- {
330
- "epoch": 1.73,
331
- "learning_rate": 0.0002,
332
- "loss": 1.6798,
333
- "step": 108
334
- },
335
- {
336
- "epoch": 1.76,
337
- "learning_rate": 0.0002,
338
- "loss": 1.6716,
339
- "step": 110
340
- },
341
- {
342
- "epoch": 1.79,
343
- "learning_rate": 0.0002,
344
- "loss": 1.5811,
345
- "step": 112
346
- },
347
- {
348
- "epoch": 1.82,
349
- "learning_rate": 0.0002,
350
- "loss": 1.4369,
351
- "step": 114
352
- },
353
- {
354
- "epoch": 1.86,
355
- "learning_rate": 0.0002,
356
- "loss": 1.0898,
357
- "step": 116
358
- },
359
- {
360
- "epoch": 1.89,
361
- "learning_rate": 0.0002,
362
- "loss": 1.1264,
363
- "step": 118
364
- },
365
- {
366
- "epoch": 1.92,
367
- "learning_rate": 0.0002,
368
- "loss": 1.3378,
369
- "step": 120
370
- },
371
- {
372
- "epoch": 1.95,
373
- "learning_rate": 0.0002,
374
- "loss": 1.4177,
375
- "step": 122
376
- },
377
- {
378
- "epoch": 1.98,
379
- "learning_rate": 0.0002,
380
- "loss": 1.5868,
381
- "step": 124
382
- },
383
- {
384
- "epoch": 2.02,
385
- "learning_rate": 0.0002,
386
- "loss": 1.7034,
387
- "step": 126
388
- },
389
- {
390
- "epoch": 2.05,
391
- "learning_rate": 0.0002,
392
- "loss": 1.5494,
393
- "step": 128
394
- },
395
- {
396
- "epoch": 2.08,
397
- "learning_rate": 0.0002,
398
- "loss": 1.4917,
399
- "step": 130
400
- },
401
- {
402
- "epoch": 2.11,
403
- "learning_rate": 0.0002,
404
- "loss": 1.4009,
405
- "step": 132
406
- },
407
- {
408
- "epoch": 2.14,
409
- "learning_rate": 0.0002,
410
- "loss": 1.1435,
411
- "step": 134
412
- },
413
- {
414
- "epoch": 2.18,
415
- "learning_rate": 0.0002,
416
- "loss": 1.2246,
417
- "step": 136
418
- },
419
- {
420
- "epoch": 2.21,
421
- "learning_rate": 0.0002,
422
- "loss": 1.2992,
423
- "step": 138
424
- },
425
- {
426
- "epoch": 2.24,
427
- "learning_rate": 0.0002,
428
- "loss": 1.1456,
429
- "step": 140
430
- },
431
- {
432
- "epoch": 2.27,
433
- "learning_rate": 0.0002,
434
- "loss": 1.6365,
435
- "step": 142
436
- },
437
- {
438
- "epoch": 2.3,
439
- "learning_rate": 0.0002,
440
- "loss": 1.5837,
441
- "step": 144
442
- },
443
- {
444
- "epoch": 2.34,
445
- "learning_rate": 0.0002,
446
- "loss": 1.3038,
447
- "step": 146
448
- },
449
- {
450
- "epoch": 2.37,
451
- "learning_rate": 0.0002,
452
- "loss": 1.1338,
453
- "step": 148
454
- },
455
- {
456
- "epoch": 2.4,
457
- "learning_rate": 0.0002,
458
- "loss": 1.1426,
459
- "step": 150
460
- },
461
- {
462
- "epoch": 2.43,
463
- "learning_rate": 0.0002,
464
- "loss": 1.3578,
465
- "step": 152
466
- },
467
- {
468
- "epoch": 2.46,
469
- "learning_rate": 0.0002,
470
- "loss": 1.2643,
471
- "step": 154
472
- },
473
- {
474
- "epoch": 2.5,
475
- "learning_rate": 0.0002,
476
- "loss": 1.4978,
477
- "step": 156
478
- },
479
- {
480
- "epoch": 2.53,
481
- "learning_rate": 0.0002,
482
- "loss": 1.7535,
483
- "step": 158
484
- },
485
- {
486
- "epoch": 2.56,
487
- "learning_rate": 0.0002,
488
- "loss": 1.5326,
489
- "step": 160
490
- },
491
- {
492
- "epoch": 2.59,
493
- "learning_rate": 0.0002,
494
- "loss": 1.4421,
495
- "step": 162
496
- },
497
- {
498
- "epoch": 2.62,
499
- "learning_rate": 0.0002,
500
- "loss": 1.0652,
501
- "step": 164
502
- },
503
- {
504
- "epoch": 2.66,
505
- "learning_rate": 0.0002,
506
- "loss": 1.1587,
507
- "step": 166
508
- },
509
- {
510
- "epoch": 2.69,
511
- "learning_rate": 0.0002,
512
- "loss": 1.4067,
513
- "step": 168
514
- },
515
- {
516
- "epoch": 2.72,
517
- "learning_rate": 0.0002,
518
- "loss": 1.3189,
519
- "step": 170
520
- },
521
- {
522
- "epoch": 2.75,
523
- "learning_rate": 0.0002,
524
- "loss": 1.8257,
525
- "step": 172
526
- },
527
- {
528
- "epoch": 2.78,
529
- "learning_rate": 0.0002,
530
- "loss": 1.3862,
531
- "step": 174
532
- },
533
- {
534
- "epoch": 2.82,
535
- "learning_rate": 0.0002,
536
- "loss": 1.3753,
537
- "step": 176
538
- },
539
- {
540
- "epoch": 2.85,
541
- "learning_rate": 0.0002,
542
- "loss": 1.1937,
543
- "step": 178
544
- },
545
- {
546
- "epoch": 2.88,
547
- "learning_rate": 0.0002,
548
- "loss": 1.1223,
549
- "step": 180
550
- },
551
- {
552
- "epoch": 2.91,
553
- "learning_rate": 0.0002,
554
- "loss": 0.9919,
555
- "step": 182
556
- },
557
- {
558
- "epoch": 2.94,
559
- "learning_rate": 0.0002,
560
- "loss": 1.3945,
561
- "step": 184
562
- },
563
- {
564
- "epoch": 2.98,
565
- "learning_rate": 0.0002,
566
- "loss": 1.2668,
567
- "step": 186
568
  }
569
  ],
570
  "logging_steps": 2,
571
- "max_steps": 186,
572
  "num_input_tokens_seen": 0,
573
  "num_train_epochs": 3,
574
  "save_steps": 500,
575
- "total_flos": 6323175937130496.0,
576
  "train_batch_size": 1,
577
  "trial_name": null,
578
  "trial_params": null
 
1
  {
2
  "best_metric": null,
3
  "best_model_checkpoint": null,
4
+ "epoch": 0.9990680335507922,
5
  "eval_steps": 500,
6
+ "global_step": 67,
7
  "is_hyper_param_search": false,
8
  "is_local_process_zero": true,
9
  "is_world_process_zero": true,
 
11
  {
12
  "epoch": 0.03,
13
  "learning_rate": 0.0002,
14
+ "loss": 2.5126,
15
  "step": 2
16
  },
17
  {
18
  "epoch": 0.06,
19
  "learning_rate": 0.0002,
20
+ "loss": 2.2003,
21
  "step": 4
22
  },
23
  {
24
+ "epoch": 0.09,
25
  "learning_rate": 0.0002,
26
+ "loss": 1.757,
27
  "step": 6
28
  },
29
  {
30
+ "epoch": 0.12,
31
  "learning_rate": 0.0002,
32
+ "loss": 1.6726,
33
  "step": 8
34
  },
35
  {
36
+ "epoch": 0.15,
37
  "learning_rate": 0.0002,
38
+ "loss": 1.6462,
39
  "step": 10
40
  },
41
  {
42
+ "epoch": 0.18,
43
  "learning_rate": 0.0002,
44
+ "loss": 1.5562,
45
  "step": 12
46
  },
47
  {
48
+ "epoch": 0.21,
49
  "learning_rate": 0.0002,
50
+ "loss": 1.9898,
51
  "step": 14
52
  },
53
  {
54
+ "epoch": 0.24,
55
  "learning_rate": 0.0002,
56
+ "loss": 1.9362,
57
  "step": 16
58
  },
59
  {
60
+ "epoch": 0.27,
61
  "learning_rate": 0.0002,
62
+ "loss": 2.2908,
63
  "step": 18
64
  },
65
  {
66
+ "epoch": 0.3,
67
  "learning_rate": 0.0002,
68
+ "loss": 1.9168,
69
  "step": 20
70
  },
71
  {
72
+ "epoch": 0.33,
73
  "learning_rate": 0.0002,
74
+ "loss": 1.7947,
75
  "step": 22
76
  },
77
  {
78
+ "epoch": 0.36,
79
  "learning_rate": 0.0002,
80
+ "loss": 1.8443,
81
  "step": 24
82
  },
83
  {
84
+ "epoch": 0.39,
85
  "learning_rate": 0.0002,
86
+ "loss": 1.5734,
87
  "step": 26
88
  },
89
  {
90
+ "epoch": 0.42,
91
  "learning_rate": 0.0002,
92
+ "loss": 1.6823,
93
  "step": 28
94
  },
95
  {
96
+ "epoch": 0.45,
97
  "learning_rate": 0.0002,
98
+ "loss": 1.7899,
99
  "step": 30
100
  },
101
  {
102
+ "epoch": 0.48,
103
  "learning_rate": 0.0002,
104
+ "loss": 1.8474,
105
  "step": 32
106
  },
107
  {
108
+ "epoch": 0.51,
109
  "learning_rate": 0.0002,
110
+ "loss": 2.0992,
111
  "step": 34
112
  },
113
  {
114
+ "epoch": 0.54,
115
  "learning_rate": 0.0002,
116
+ "loss": 1.8281,
117
  "step": 36
118
  },
119
  {
120
+ "epoch": 0.57,
121
  "learning_rate": 0.0002,
122
+ "loss": 1.737,
123
  "step": 38
124
  },
125
  {
126
+ "epoch": 0.6,
127
  "learning_rate": 0.0002,
128
+ "loss": 1.7205,
129
  "step": 40
130
  },
131
  {
132
+ "epoch": 0.63,
133
  "learning_rate": 0.0002,
134
+ "loss": 1.5048,
135
  "step": 42
136
  },
137
  {
138
+ "epoch": 0.66,
139
  "learning_rate": 0.0002,
140
+ "loss": 1.7679,
141
  "step": 44
142
  },
143
  {
144
+ "epoch": 0.69,
145
  "learning_rate": 0.0002,
146
+ "loss": 1.8433,
147
  "step": 46
148
  },
149
  {
150
+ "epoch": 0.72,
151
  "learning_rate": 0.0002,
152
+ "loss": 1.6205,
153
  "step": 48
154
  },
155
  {
156
+ "epoch": 0.75,
157
  "learning_rate": 0.0002,
158
+ "loss": 2.5937,
159
  "step": 50
160
  },
161
  {
162
+ "epoch": 0.78,
163
  "learning_rate": 0.0002,
164
+ "loss": 1.5589,
165
  "step": 52
166
  },
167
  {
168
+ "epoch": 0.81,
169
  "learning_rate": 0.0002,
170
+ "loss": 1.7151,
171
  "step": 54
172
  },
173
  {
174
+ "epoch": 0.84,
175
  "learning_rate": 0.0002,
176
+ "loss": 1.7156,
177
  "step": 56
178
  },
179
  {
180
+ "epoch": 0.86,
181
  "learning_rate": 0.0002,
182
+ "loss": 1.418,
183
  "step": 58
184
  },
185
  {
186
+ "epoch": 0.89,
187
  "learning_rate": 0.0002,
188
+ "loss": 1.4181,
189
  "step": 60
190
  },
191
  {
192
+ "epoch": 0.92,
193
  "learning_rate": 0.0002,
194
+ "loss": 1.6132,
195
  "step": 62
196
  },
197
  {
198
+ "epoch": 0.95,
199
  "learning_rate": 0.0002,
200
+ "loss": 1.4498,
201
  "step": 64
202
  },
203
  {
204
+ "epoch": 0.98,
205
  "learning_rate": 0.0002,
206
+ "loss": 1.8295,
207
  "step": 66
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
208
  }
209
  ],
210
  "logging_steps": 2,
211
+ "max_steps": 201,
212
  "num_input_tokens_seen": 0,
213
  "num_train_epochs": 3,
214
  "save_steps": 500,
215
+ "total_flos": 2957918219354112.0,
216
  "train_batch_size": 1,
217
  "trial_name": null,
218
  "trial_params": null
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:0ff44e03440ef4f895c3d0e1b40cc60ba65d4e53e1a2dafb7bbff3b709e2f8eb
3
  size 4728
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83e9a2a1c97c8be5dac62bc06704782b1571af93ae84e801bd429c75183ffaae
3
  size 4728