|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9996810207336523, |
|
"eval_steps": 500, |
|
"global_step": 1567, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0006379585326953748, |
|
"grad_norm": 210.7223358154297, |
|
"learning_rate": 1.910828025477707e-06, |
|
"loss": 45.3725, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003189792663476874, |
|
"grad_norm": 226.75299072265625, |
|
"learning_rate": 9.554140127388534e-06, |
|
"loss": 48.3317, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006379585326953748, |
|
"grad_norm": 153.64804077148438, |
|
"learning_rate": 1.9108280254777068e-05, |
|
"loss": 46.1831, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009569377990430622, |
|
"grad_norm": 100.49105072021484, |
|
"learning_rate": 2.8662420382165606e-05, |
|
"loss": 35.4306, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.012759170653907496, |
|
"grad_norm": 76.70584106445312, |
|
"learning_rate": 3.8216560509554137e-05, |
|
"loss": 29.7582, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01594896331738437, |
|
"grad_norm": 18.13677215576172, |
|
"learning_rate": 4.777070063694267e-05, |
|
"loss": 24.9798, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.019138755980861243, |
|
"grad_norm": 14.398043632507324, |
|
"learning_rate": 5.732484076433121e-05, |
|
"loss": 22.6081, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.022328548644338118, |
|
"grad_norm": 10.421934127807617, |
|
"learning_rate": 6.687898089171974e-05, |
|
"loss": 20.6468, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.025518341307814992, |
|
"grad_norm": 6.249530792236328, |
|
"learning_rate": 7.643312101910827e-05, |
|
"loss": 20.6855, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.028708133971291867, |
|
"grad_norm": 4.4145917892456055, |
|
"learning_rate": 8.59872611464968e-05, |
|
"loss": 18.4075, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03189792663476874, |
|
"grad_norm": 4.664849281311035, |
|
"learning_rate": 9.554140127388533e-05, |
|
"loss": 18.931, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.03508771929824561, |
|
"grad_norm": 5.491772174835205, |
|
"learning_rate": 0.00010509554140127387, |
|
"loss": 18.2993, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.03827751196172249, |
|
"grad_norm": 11.026978492736816, |
|
"learning_rate": 0.00011464968152866242, |
|
"loss": 17.9978, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04146730462519936, |
|
"grad_norm": 16.8328914642334, |
|
"learning_rate": 0.00012420382165605095, |
|
"loss": 16.1541, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.044657097288676235, |
|
"grad_norm": 24.968544006347656, |
|
"learning_rate": 0.00013375796178343948, |
|
"loss": 13.5497, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.04784688995215311, |
|
"grad_norm": 34.1843147277832, |
|
"learning_rate": 0.00014331210191082802, |
|
"loss": 10.0932, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.051036682615629984, |
|
"grad_norm": 26.95475196838379, |
|
"learning_rate": 0.00015286624203821655, |
|
"loss": 6.0162, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.05422647527910686, |
|
"grad_norm": 89.71574401855469, |
|
"learning_rate": 0.00016242038216560508, |
|
"loss": 3.1634, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.05741626794258373, |
|
"grad_norm": 10.840144157409668, |
|
"learning_rate": 0.0001719745222929936, |
|
"loss": 2.7482, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 5.291273593902588, |
|
"learning_rate": 0.00018152866242038214, |
|
"loss": 2.3438, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.06379585326953748, |
|
"grad_norm": 2.17537260055542, |
|
"learning_rate": 0.00019108280254777067, |
|
"loss": 2.0583, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.06698564593301436, |
|
"grad_norm": 1.8088818788528442, |
|
"learning_rate": 0.0002006369426751592, |
|
"loss": 1.9395, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07017543859649122, |
|
"grad_norm": 1.0773868560791016, |
|
"learning_rate": 0.00021019108280254773, |
|
"loss": 1.8029, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0733652312599681, |
|
"grad_norm": 2.8027162551879883, |
|
"learning_rate": 0.00021974522292993626, |
|
"loss": 1.734, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.07655502392344497, |
|
"grad_norm": 1.8964555263519287, |
|
"learning_rate": 0.00022929936305732485, |
|
"loss": 1.6716, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.07974481658692185, |
|
"grad_norm": 1.0901538133621216, |
|
"learning_rate": 0.00023885350318471338, |
|
"loss": 1.6162, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.08293460925039872, |
|
"grad_norm": 1.5070509910583496, |
|
"learning_rate": 0.0002484076433121019, |
|
"loss": 1.6087, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0861244019138756, |
|
"grad_norm": 1.8919157981872559, |
|
"learning_rate": 0.00025796178343949044, |
|
"loss": 1.5317, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.08931419457735247, |
|
"grad_norm": 1.317736268043518, |
|
"learning_rate": 0.00026751592356687897, |
|
"loss": 1.4964, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.09250398724082935, |
|
"grad_norm": 2.569183111190796, |
|
"learning_rate": 0.0002770700636942675, |
|
"loss": 1.5564, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.09569377990430622, |
|
"grad_norm": 2.390894651412964, |
|
"learning_rate": 0.00028662420382165603, |
|
"loss": 1.4957, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.09888357256778309, |
|
"grad_norm": 0.9399936199188232, |
|
"learning_rate": 0.00029617834394904456, |
|
"loss": 1.484, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.10207336523125997, |
|
"grad_norm": 2.9535129070281982, |
|
"learning_rate": 0.0002999966490829603, |
|
"loss": 1.4503, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.10526315789473684, |
|
"grad_norm": 1.0490965843200684, |
|
"learning_rate": 0.00029997617179878324, |
|
"loss": 1.4321, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.10845295055821372, |
|
"grad_norm": 4.929164409637451, |
|
"learning_rate": 0.0002999370813893047, |
|
"loss": 1.4327, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.11164274322169059, |
|
"grad_norm": 1.0862678289413452, |
|
"learning_rate": 0.00029987938270592676, |
|
"loss": 1.4237, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.11483253588516747, |
|
"grad_norm": 2.1204566955566406, |
|
"learning_rate": 0.0002998030829094724, |
|
"loss": 1.418, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.11802232854864433, |
|
"grad_norm": 2.93302583694458, |
|
"learning_rate": 0.00029970819146929694, |
|
"loss": 1.4072, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 2.221076011657715, |
|
"learning_rate": 0.0002995947201621131, |
|
"loss": 1.4132, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.12440191387559808, |
|
"grad_norm": 1.8285828828811646, |
|
"learning_rate": 0.00029946268307052886, |
|
"loss": 1.4, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.12759170653907495, |
|
"grad_norm": 1.0585997104644775, |
|
"learning_rate": 0.0002993120965813003, |
|
"loss": 1.374, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.13078149920255183, |
|
"grad_norm": 2.2776687145233154, |
|
"learning_rate": 0.0002991429793832975, |
|
"loss": 1.3503, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.1339712918660287, |
|
"grad_norm": 2.238420009613037, |
|
"learning_rate": 0.00029895535246518524, |
|
"loss": 1.3755, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1371610845295056, |
|
"grad_norm": 0.9944085478782654, |
|
"learning_rate": 0.0002987492391128182, |
|
"loss": 1.3629, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.14035087719298245, |
|
"grad_norm": 1.7009069919586182, |
|
"learning_rate": 0.0002985246649063509, |
|
"loss": 1.3613, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.14354066985645933, |
|
"grad_norm": 1.9367361068725586, |
|
"learning_rate": 0.0002982816577170631, |
|
"loss": 1.3585, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.1467304625199362, |
|
"grad_norm": 0.831079363822937, |
|
"learning_rate": 0.00029802024770390087, |
|
"loss": 1.3393, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.14992025518341306, |
|
"grad_norm": 1.10426664352417, |
|
"learning_rate": 0.00029774046730973334, |
|
"loss": 1.3418, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.15311004784688995, |
|
"grad_norm": 1.438004493713379, |
|
"learning_rate": 0.00029744235125732664, |
|
"loss": 1.3439, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.15629984051036683, |
|
"grad_norm": 4.208047866821289, |
|
"learning_rate": 0.0002971259365450344, |
|
"loss": 1.3377, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.1594896331738437, |
|
"grad_norm": 2.597606897354126, |
|
"learning_rate": 0.00029679126244220596, |
|
"loss": 1.2973, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.16267942583732056, |
|
"grad_norm": 3.7126145362854004, |
|
"learning_rate": 0.00029643837048431293, |
|
"loss": 1.336, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.16586921850079744, |
|
"grad_norm": 2.6157450675964355, |
|
"learning_rate": 0.0002960673044677939, |
|
"loss": 1.3273, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.16905901116427433, |
|
"grad_norm": 1.1743855476379395, |
|
"learning_rate": 0.00029567811044461977, |
|
"loss": 1.2961, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.1722488038277512, |
|
"grad_norm": 2.6032323837280273, |
|
"learning_rate": 0.00029527083671657746, |
|
"loss": 1.308, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.17543859649122806, |
|
"grad_norm": 1.2666016817092896, |
|
"learning_rate": 0.00029484553382927594, |
|
"loss": 1.2867, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.17862838915470494, |
|
"grad_norm": 2.308387279510498, |
|
"learning_rate": 0.000294402254565873, |
|
"loss": 1.3159, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 2.5119080543518066, |
|
"learning_rate": 0.00029394105394052434, |
|
"loss": 1.3205, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.1850079744816587, |
|
"grad_norm": 2.0319368839263916, |
|
"learning_rate": 0.00029346198919155616, |
|
"loss": 1.3144, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.18819776714513556, |
|
"grad_norm": 1.0513851642608643, |
|
"learning_rate": 0.00029296511977436107, |
|
"loss": 1.2852, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.19138755980861244, |
|
"grad_norm": 1.9946495294570923, |
|
"learning_rate": 0.0002924505073540198, |
|
"loss": 1.2848, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.19457735247208932, |
|
"grad_norm": 0.9887159466743469, |
|
"learning_rate": 0.0002919182157976476, |
|
"loss": 1.2786, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.19776714513556617, |
|
"grad_norm": 2.8245911598205566, |
|
"learning_rate": 0.00029136831116646815, |
|
"loss": 1.2801, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.20095693779904306, |
|
"grad_norm": 1.0787121057510376, |
|
"learning_rate": 0.000290800861707615, |
|
"loss": 1.2676, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.20414673046251994, |
|
"grad_norm": 1.7965648174285889, |
|
"learning_rate": 0.00029021593784566113, |
|
"loss": 1.2793, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.20733652312599682, |
|
"grad_norm": 3.4487974643707275, |
|
"learning_rate": 0.0002896136121738793, |
|
"loss": 1.2753, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 2.19303035736084, |
|
"learning_rate": 0.0002889939594452323, |
|
"loss": 1.2631, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.21371610845295055, |
|
"grad_norm": 1.1072807312011719, |
|
"learning_rate": 0.00028835705656309583, |
|
"loss": 1.2789, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.21690590111642744, |
|
"grad_norm": 3.124202251434326, |
|
"learning_rate": 0.0002877029825717142, |
|
"loss": 1.3006, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.22009569377990432, |
|
"grad_norm": 1.0346769094467163, |
|
"learning_rate": 0.0002870318186463901, |
|
"loss": 1.2368, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.22328548644338117, |
|
"grad_norm": 5.13249397277832, |
|
"learning_rate": 0.0002863436480834105, |
|
"loss": 1.2836, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.22647527910685805, |
|
"grad_norm": 0.9209335446357727, |
|
"learning_rate": 0.00028563855628970886, |
|
"loss": 1.259, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.22966507177033493, |
|
"grad_norm": 1.4709413051605225, |
|
"learning_rate": 0.0002849166307722653, |
|
"loss": 1.2966, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.23285486443381181, |
|
"grad_norm": 0.9667356610298157, |
|
"learning_rate": 0.00028417796112724684, |
|
"loss": 1.2771, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.23604465709728867, |
|
"grad_norm": 0.8139356374740601, |
|
"learning_rate": 0.0002834226390288873, |
|
"loss": 1.2409, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.23923444976076555, |
|
"grad_norm": 1.1209094524383545, |
|
"learning_rate": 0.0002826507582181103, |
|
"loss": 1.2499, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 1.3784332275390625, |
|
"learning_rate": 0.00028186241449089524, |
|
"loss": 1.2648, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.24561403508771928, |
|
"grad_norm": 4.830300807952881, |
|
"learning_rate": 0.000281057705686388, |
|
"loss": 1.2524, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.24880382775119617, |
|
"grad_norm": 1.3136839866638184, |
|
"learning_rate": 0.0002802367316747589, |
|
"loss": 1.2545, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.25199362041467305, |
|
"grad_norm": 0.94329434633255, |
|
"learning_rate": 0.0002793995943448078, |
|
"loss": 1.2159, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.2551834130781499, |
|
"grad_norm": 1.6266140937805176, |
|
"learning_rate": 0.00027854639759131893, |
|
"loss": 1.2211, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2583732057416268, |
|
"grad_norm": 1.1264214515686035, |
|
"learning_rate": 0.00027767724730216696, |
|
"loss": 1.252, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.26156299840510366, |
|
"grad_norm": 1.0141706466674805, |
|
"learning_rate": 0.0002767922513451754, |
|
"loss": 1.2613, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2647527910685805, |
|
"grad_norm": 1.6436514854431152, |
|
"learning_rate": 0.00027589151955472965, |
|
"loss": 1.2489, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.2679425837320574, |
|
"grad_norm": 1.739797592163086, |
|
"learning_rate": 0.00027497516371814543, |
|
"loss": 1.2377, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2711323763955343, |
|
"grad_norm": 1.399665117263794, |
|
"learning_rate": 0.00027404329756179537, |
|
"loss": 1.2445, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.2743221690590112, |
|
"grad_norm": 2.8554205894470215, |
|
"learning_rate": 0.0002730960367369949, |
|
"loss": 1.2909, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.27751196172248804, |
|
"grad_norm": 3.405055046081543, |
|
"learning_rate": 0.00027213349880564873, |
|
"loss": 1.2489, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.2807017543859649, |
|
"grad_norm": 0.9365301728248596, |
|
"learning_rate": 0.0002711558032256607, |
|
"loss": 1.236, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2838915470494418, |
|
"grad_norm": 2.3004956245422363, |
|
"learning_rate": 0.0002701630713361085, |
|
"loss": 1.2398, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.28708133971291866, |
|
"grad_norm": 1.1962858438491821, |
|
"learning_rate": 0.00026915542634218403, |
|
"loss": 1.2487, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2902711323763955, |
|
"grad_norm": 1.5624754428863525, |
|
"learning_rate": 0.00026813299329990335, |
|
"loss": 1.2467, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.2934609250398724, |
|
"grad_norm": 1.7164920568466187, |
|
"learning_rate": 0.0002670958991005859, |
|
"loss": 1.2415, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.2966507177033493, |
|
"grad_norm": 1.4587774276733398, |
|
"learning_rate": 0.0002660442724551065, |
|
"loss": 1.221, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.29984051036682613, |
|
"grad_norm": 3.389615774154663, |
|
"learning_rate": 0.00026497824387792146, |
|
"loss": 1.2282, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 1.0039730072021484, |
|
"learning_rate": 0.00026389794567087085, |
|
"loss": 1.2331, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.3062200956937799, |
|
"grad_norm": 1.3434313535690308, |
|
"learning_rate": 0.0002628035119067586, |
|
"loss": 1.212, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.3094098883572568, |
|
"grad_norm": 1.2468559741973877, |
|
"learning_rate": 0.0002616950784127135, |
|
"loss": 1.1965, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.31259968102073366, |
|
"grad_norm": 1.0449252128601074, |
|
"learning_rate": 0.00026057278275333165, |
|
"loss": 1.2279, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.3157894736842105, |
|
"grad_norm": 6.562210559844971, |
|
"learning_rate": 0.00025943676421360395, |
|
"loss": 1.2084, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.3189792663476874, |
|
"grad_norm": 2.9650490283966064, |
|
"learning_rate": 0.00025828716378163, |
|
"loss": 1.2578, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.32216905901116427, |
|
"grad_norm": 2.523132801055908, |
|
"learning_rate": 0.00025712412413112006, |
|
"loss": 1.2443, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.3253588516746411, |
|
"grad_norm": 1.9372252225875854, |
|
"learning_rate": 0.00025594778960368844, |
|
"loss": 1.2412, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.32854864433811803, |
|
"grad_norm": 2.129570722579956, |
|
"learning_rate": 0.0002547583061909396, |
|
"loss": 1.2089, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.3317384370015949, |
|
"grad_norm": 1.6949142217636108, |
|
"learning_rate": 0.00025355582151634956, |
|
"loss": 1.2235, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.3349282296650718, |
|
"grad_norm": 3.6476926803588867, |
|
"learning_rate": 0.00025234048481694477, |
|
"loss": 1.1841, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.33811802232854865, |
|
"grad_norm": 2.0265157222747803, |
|
"learning_rate": 0.0002511124469247809, |
|
"loss": 1.2114, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.3413078149920255, |
|
"grad_norm": 0.7946969270706177, |
|
"learning_rate": 0.00024987186024822295, |
|
"loss": 1.2057, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.3444976076555024, |
|
"grad_norm": 2.393213987350464, |
|
"learning_rate": 0.0002486188787530309, |
|
"loss": 1.2209, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.34768740031897927, |
|
"grad_norm": 1.4503653049468994, |
|
"learning_rate": 0.00024735365794325117, |
|
"loss": 1.2303, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.3508771929824561, |
|
"grad_norm": 2.4336180686950684, |
|
"learning_rate": 0.0002460763548419172, |
|
"loss": 1.205, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.35406698564593303, |
|
"grad_norm": 1.3000552654266357, |
|
"learning_rate": 0.0002447871279715624, |
|
"loss": 1.2139, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.3572567783094099, |
|
"grad_norm": 1.1987704038619995, |
|
"learning_rate": 0.00024348613733454565, |
|
"loss": 1.2001, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.36044657097288674, |
|
"grad_norm": 1.4307914972305298, |
|
"learning_rate": 0.00024217354439319427, |
|
"loss": 1.1822, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 2.28100848197937, |
|
"learning_rate": 0.00024084951204976528, |
|
"loss": 1.1826, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.3668261562998405, |
|
"grad_norm": 1.1329950094223022, |
|
"learning_rate": 0.0002395142046262281, |
|
"loss": 1.2024, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.3700159489633174, |
|
"grad_norm": 1.3367254734039307, |
|
"learning_rate": 0.00023816778784387094, |
|
"loss": 1.1933, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.37320574162679426, |
|
"grad_norm": 0.9757080078125, |
|
"learning_rate": 0.0002368104288027336, |
|
"loss": 1.1826, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.3763955342902711, |
|
"grad_norm": 2.52244234085083, |
|
"learning_rate": 0.0002354422959608692, |
|
"loss": 1.2129, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.379585326953748, |
|
"grad_norm": 2.156867265701294, |
|
"learning_rate": 0.00023406355911343717, |
|
"loss": 1.2185, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.3827751196172249, |
|
"grad_norm": 3.1043102741241455, |
|
"learning_rate": 0.00023267438937163077, |
|
"loss": 1.1834, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.38596491228070173, |
|
"grad_norm": 2.570702314376831, |
|
"learning_rate": 0.00023127495914144051, |
|
"loss": 1.2028, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.38915470494417864, |
|
"grad_norm": 0.836580753326416, |
|
"learning_rate": 0.00022986544210225774, |
|
"loss": 1.1895, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.3923444976076555, |
|
"grad_norm": 1.2737889289855957, |
|
"learning_rate": 0.00022844601318531955, |
|
"loss": 1.1641, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.39553429027113235, |
|
"grad_norm": 1.6395554542541504, |
|
"learning_rate": 0.00022701684855199857, |
|
"loss": 1.2172, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.39872408293460926, |
|
"grad_norm": 0.8626974821090698, |
|
"learning_rate": 0.00022557812557194, |
|
"loss": 1.1785, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.4019138755980861, |
|
"grad_norm": 2.091778039932251, |
|
"learning_rate": 0.00022413002280104915, |
|
"loss": 1.1817, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.405103668261563, |
|
"grad_norm": 1.2207390069961548, |
|
"learning_rate": 0.00022267271995933074, |
|
"loss": 1.2138, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.4082934609250399, |
|
"grad_norm": 1.2744272947311401, |
|
"learning_rate": 0.00022120639790858482, |
|
"loss": 1.1901, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.41148325358851673, |
|
"grad_norm": 1.1834518909454346, |
|
"learning_rate": 0.00021973123862996044, |
|
"loss": 1.2143, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.41467304625199364, |
|
"grad_norm": 1.2653913497924805, |
|
"learning_rate": 0.00021824742520137026, |
|
"loss": 1.2088, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.4178628389154705, |
|
"grad_norm": 1.1800236701965332, |
|
"learning_rate": 0.00021675514177476945, |
|
"loss": 1.1787, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.42105263157894735, |
|
"grad_norm": 1.9899177551269531, |
|
"learning_rate": 0.0002152545735533012, |
|
"loss": 1.1963, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 1.0640288591384888, |
|
"learning_rate": 0.00021374590676831136, |
|
"loss": 1.1854, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.4274322169059011, |
|
"grad_norm": 1.9654450416564941, |
|
"learning_rate": 0.00021222932865623605, |
|
"loss": 1.1844, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.430622009569378, |
|
"grad_norm": 2.232189893722534, |
|
"learning_rate": 0.00021070502743536414, |
|
"loss": 1.1608, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.43381180223285487, |
|
"grad_norm": 2.0131752490997314, |
|
"learning_rate": 0.00020917319228247805, |
|
"loss": 1.1726, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.4370015948963317, |
|
"grad_norm": 3.602222204208374, |
|
"learning_rate": 0.00020763401330937555, |
|
"loss": 1.1757, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.44019138755980863, |
|
"grad_norm": 2.3335506916046143, |
|
"learning_rate": 0.00020608768153927546, |
|
"loss": 1.184, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.4433811802232855, |
|
"grad_norm": 1.028072714805603, |
|
"learning_rate": 0.00020453438888311042, |
|
"loss": 1.1648, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.44657097288676234, |
|
"grad_norm": 3.57364559173584, |
|
"learning_rate": 0.00020297432811570916, |
|
"loss": 1.1965, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.44976076555023925, |
|
"grad_norm": 1.4527466297149658, |
|
"learning_rate": 0.00020140769285187187, |
|
"loss": 1.1597, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.4529505582137161, |
|
"grad_norm": 1.855742335319519, |
|
"learning_rate": 0.00019983467752234132, |
|
"loss": 1.1677, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.45614035087719296, |
|
"grad_norm": 1.952112078666687, |
|
"learning_rate": 0.0001982554773496723, |
|
"loss": 1.2063, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.45933014354066987, |
|
"grad_norm": 1.2015854120254517, |
|
"learning_rate": 0.00019667028832400345, |
|
"loss": 1.1462, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.4625199362041467, |
|
"grad_norm": 3.997166872024536, |
|
"learning_rate": 0.00019507930717873313, |
|
"loss": 1.1694, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.46570972886762363, |
|
"grad_norm": 2.941664934158325, |
|
"learning_rate": 0.00019348273136610364, |
|
"loss": 1.1751, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.4688995215311005, |
|
"grad_norm": 1.275882601737976, |
|
"learning_rate": 0.00019188075903269587, |
|
"loss": 1.1763, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.47208931419457734, |
|
"grad_norm": 1.3392881155014038, |
|
"learning_rate": 0.00019027358899483776, |
|
"loss": 1.1659, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.47527910685805425, |
|
"grad_norm": 1.5464389324188232, |
|
"learning_rate": 0.00018866142071393013, |
|
"loss": 1.1615, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.4784688995215311, |
|
"grad_norm": 0.8039942979812622, |
|
"learning_rate": 0.00018704445427169156, |
|
"loss": 1.1703, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.48165869218500795, |
|
"grad_norm": 1.1516778469085693, |
|
"learning_rate": 0.00018542289034532733, |
|
"loss": 1.1602, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 1.191139578819275, |
|
"learning_rate": 0.00018379693018262349, |
|
"loss": 1.1802, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.4880382775119617, |
|
"grad_norm": 1.7178115844726562, |
|
"learning_rate": 0.00018216677557697083, |
|
"loss": 1.1797, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.49122807017543857, |
|
"grad_norm": 2.011737585067749, |
|
"learning_rate": 0.00018053262884232078, |
|
"loss": 1.1788, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.4944178628389155, |
|
"grad_norm": 0.8188173174858093, |
|
"learning_rate": 0.0001788946927880768, |
|
"loss": 1.1602, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.49760765550239233, |
|
"grad_norm": 2.8875205516815186, |
|
"learning_rate": 0.00017725317069392418, |
|
"loss": 1.1633, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5007974481658692, |
|
"grad_norm": 2.8602237701416016, |
|
"learning_rate": 0.00017560826628460182, |
|
"loss": 1.1598, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.5039872408293461, |
|
"grad_norm": 6.106576919555664, |
|
"learning_rate": 0.00017396018370461808, |
|
"loss": 1.1752, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.507177033492823, |
|
"grad_norm": 1.2913662195205688, |
|
"learning_rate": 0.00017230912749291547, |
|
"loss": 1.1865, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.5103668261562998, |
|
"grad_norm": 1.422013521194458, |
|
"learning_rate": 0.00017065530255748557, |
|
"loss": 1.1697, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5135566188197768, |
|
"grad_norm": 1.5933501720428467, |
|
"learning_rate": 0.00016899891414993854, |
|
"loss": 1.1669, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.5167464114832536, |
|
"grad_norm": 1.5201044082641602, |
|
"learning_rate": 0.00016734016784002994, |
|
"loss": 1.1658, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.5199362041467305, |
|
"grad_norm": 1.0443668365478516, |
|
"learning_rate": 0.000165679269490148, |
|
"loss": 1.1562, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.5231259968102073, |
|
"grad_norm": 0.9849405884742737, |
|
"learning_rate": 0.0001640164252297648, |
|
"loss": 1.1485, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.5263157894736842, |
|
"grad_norm": 1.2747652530670166, |
|
"learning_rate": 0.00016235184142985367, |
|
"loss": 1.1511, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.529505582137161, |
|
"grad_norm": 1.8182193040847778, |
|
"learning_rate": 0.00016068572467727762, |
|
"loss": 1.1778, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.532695374800638, |
|
"grad_norm": 1.002661108970642, |
|
"learning_rate": 0.00015901828174915005, |
|
"loss": 1.1604, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.5358851674641149, |
|
"grad_norm": 1.4460569620132446, |
|
"learning_rate": 0.00015734971958717228, |
|
"loss": 1.1688, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.5390749601275917, |
|
"grad_norm": 1.7245659828186035, |
|
"learning_rate": 0.00015568024527195067, |
|
"loss": 1.1794, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.5422647527910686, |
|
"grad_norm": 1.0630652904510498, |
|
"learning_rate": 0.0001540100659972963, |
|
"loss": 1.133, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 1.1989563703536987, |
|
"learning_rate": 0.000152339389044511, |
|
"loss": 1.163, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.5486443381180224, |
|
"grad_norm": 8.01758861541748, |
|
"learning_rate": 0.00015066842175666186, |
|
"loss": 1.1632, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.5518341307814992, |
|
"grad_norm": 0.8911394476890564, |
|
"learning_rate": 0.0001489973715128487, |
|
"loss": 1.136, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.5550239234449761, |
|
"grad_norm": 1.598703145980835, |
|
"learning_rate": 0.00014732644570246675, |
|
"loss": 1.1476, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.5582137161084529, |
|
"grad_norm": 1.122660756111145, |
|
"learning_rate": 0.00014565585169946824, |
|
"loss": 1.1598, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.5614035087719298, |
|
"grad_norm": 3.6311142444610596, |
|
"learning_rate": 0.0001439857968366256, |
|
"loss": 1.158, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.5645933014354066, |
|
"grad_norm": 2.771517515182495, |
|
"learning_rate": 0.00014231648837980022, |
|
"loss": 1.1269, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.5677830940988836, |
|
"grad_norm": 1.8766001462936401, |
|
"learning_rate": 0.00014064813350221894, |
|
"loss": 1.1855, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.5709728867623605, |
|
"grad_norm": 1.0254100561141968, |
|
"learning_rate": 0.00013898093925876267, |
|
"loss": 1.1206, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.5741626794258373, |
|
"grad_norm": 1.0249429941177368, |
|
"learning_rate": 0.00013731511256026913, |
|
"loss": 1.1452, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.5773524720893142, |
|
"grad_norm": 1.6923766136169434, |
|
"learning_rate": 0.00013565086014785406, |
|
"loss": 1.1558, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.580542264752791, |
|
"grad_norm": 1.1442967653274536, |
|
"learning_rate": 0.00013398838856725257, |
|
"loss": 1.1555, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.583732057416268, |
|
"grad_norm": 1.3362458944320679, |
|
"learning_rate": 0.00013232790414318608, |
|
"loss": 1.1294, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.5869218500797448, |
|
"grad_norm": 2.405693531036377, |
|
"learning_rate": 0.0001306696129537553, |
|
"loss": 1.1569, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.5901116427432217, |
|
"grad_norm": 1.8836421966552734, |
|
"learning_rate": 0.00012901372080486472, |
|
"loss": 1.1246, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.5933014354066986, |
|
"grad_norm": 1.0528597831726074, |
|
"learning_rate": 0.00012736043320468073, |
|
"loss": 1.1255, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.5964912280701754, |
|
"grad_norm": 1.1211520433425903, |
|
"learning_rate": 0.0001257099553381262, |
|
"loss": 1.1451, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.5996810207336523, |
|
"grad_norm": 1.0666625499725342, |
|
"learning_rate": 0.00012406249204141603, |
|
"loss": 1.1326, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6028708133971292, |
|
"grad_norm": 1.0989271402359009, |
|
"learning_rate": 0.000122418247776635, |
|
"loss": 1.1428, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 1.260853886604309, |
|
"learning_rate": 0.00012077742660636299, |
|
"loss": 1.1376, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.6092503987240829, |
|
"grad_norm": 0.7593328356742859, |
|
"learning_rate": 0.00011914023216834904, |
|
"loss": 1.114, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.6124401913875598, |
|
"grad_norm": 1.079769253730774, |
|
"learning_rate": 0.0001175068676502386, |
|
"loss": 1.1034, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.6156299840510366, |
|
"grad_norm": 3.5690054893493652, |
|
"learning_rate": 0.00011587753576435634, |
|
"loss": 1.1128, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.6188197767145136, |
|
"grad_norm": 1.176243543624878, |
|
"learning_rate": 0.00011425243872254835, |
|
"loss": 1.1106, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.6220095693779905, |
|
"grad_norm": 2.7429394721984863, |
|
"learning_rate": 0.00011263177821108573, |
|
"loss": 1.1347, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.6251993620414673, |
|
"grad_norm": 1.3033727407455444, |
|
"learning_rate": 0.00011101575536563433, |
|
"loss": 1.1124, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.6283891547049442, |
|
"grad_norm": 1.5057191848754883, |
|
"learning_rate": 0.00010940457074629217, |
|
"loss": 1.1176, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.631578947368421, |
|
"grad_norm": 1.3368103504180908, |
|
"learning_rate": 0.00010779842431269843, |
|
"loss": 1.1189, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.6347687400318979, |
|
"grad_norm": 1.240006685256958, |
|
"learning_rate": 0.0001061975153992172, |
|
"loss": 1.1093, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.6379585326953748, |
|
"grad_norm": 3.0179250240325928, |
|
"learning_rate": 0.00010460204269019829, |
|
"loss": 1.1025, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.6411483253588517, |
|
"grad_norm": 1.1859713792800903, |
|
"learning_rate": 0.0001030122041953196, |
|
"loss": 1.1334, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.6443381180223285, |
|
"grad_norm": 1.4847084283828735, |
|
"learning_rate": 0.0001014281972250121, |
|
"loss": 1.1351, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.6475279106858054, |
|
"grad_norm": 1.057880163192749, |
|
"learning_rate": 9.985021836597273e-05, |
|
"loss": 1.1072, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.6507177033492823, |
|
"grad_norm": 0.9342829585075378, |
|
"learning_rate": 9.827846345676614e-05, |
|
"loss": 1.1045, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.6539074960127592, |
|
"grad_norm": 0.9297033548355103, |
|
"learning_rate": 9.671312756351998e-05, |
|
"loss": 1.1009, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.6570972886762361, |
|
"grad_norm": 1.077148675918579, |
|
"learning_rate": 9.515440495571569e-05, |
|
"loss": 1.1088, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.6602870813397129, |
|
"grad_norm": 1.4433211088180542, |
|
"learning_rate": 9.360248908207813e-05, |
|
"loss": 1.1026, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.6634768740031898, |
|
"grad_norm": 1.0340611934661865, |
|
"learning_rate": 9.205757254656755e-05, |
|
"loss": 1.0965, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 1.4647072553634644, |
|
"learning_rate": 9.05198470844756e-05, |
|
"loss": 1.095, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.6698564593301436, |
|
"grad_norm": 5.960877895355225, |
|
"learning_rate": 8.898950353862998e-05, |
|
"loss": 1.107, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.6730462519936204, |
|
"grad_norm": 3.8718395233154297, |
|
"learning_rate": 8.746673183570923e-05, |
|
"loss": 1.108, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.6762360446570973, |
|
"grad_norm": 1.6608846187591553, |
|
"learning_rate": 8.595172096267157e-05, |
|
"loss": 1.1405, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.6794258373205742, |
|
"grad_norm": 1.0977705717086792, |
|
"learning_rate": 8.444465894330024e-05, |
|
"loss": 1.0896, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.682615629984051, |
|
"grad_norm": 1.2941663265228271, |
|
"learning_rate": 8.294573281486828e-05, |
|
"loss": 1.099, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.6858054226475279, |
|
"grad_norm": 1.8003350496292114, |
|
"learning_rate": 8.145512860492596e-05, |
|
"loss": 1.0998, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.6889952153110048, |
|
"grad_norm": 8.684834480285645, |
|
"learning_rate": 7.997303130821362e-05, |
|
"loss": 1.1139, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.6921850079744817, |
|
"grad_norm": 1.1457107067108154, |
|
"learning_rate": 7.849962486370206e-05, |
|
"loss": 1.1079, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.6953748006379585, |
|
"grad_norm": 1.4086178541183472, |
|
"learning_rate": 7.703509213176451e-05, |
|
"loss": 1.1205, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.6985645933014354, |
|
"grad_norm": 8.64991283416748, |
|
"learning_rate": 7.557961487148272e-05, |
|
"loss": 1.12, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.7017543859649122, |
|
"grad_norm": 0.8322669267654419, |
|
"learning_rate": 7.413337371808884e-05, |
|
"loss": 1.0927, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.7049441786283892, |
|
"grad_norm": 1.1140292882919312, |
|
"learning_rate": 7.269654816054756e-05, |
|
"loss": 1.0876, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.7081339712918661, |
|
"grad_norm": 1.5485320091247559, |
|
"learning_rate": 7.126931651928012e-05, |
|
"loss": 1.0904, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.7113237639553429, |
|
"grad_norm": 0.8744588494300842, |
|
"learning_rate": 6.985185592403367e-05, |
|
"loss": 1.1068, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.7145135566188198, |
|
"grad_norm": 1.0260374546051025, |
|
"learning_rate": 6.844434229189787e-05, |
|
"loss": 1.1022, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.7177033492822966, |
|
"grad_norm": 1.5165791511535645, |
|
"learning_rate": 6.704695030547252e-05, |
|
"loss": 1.0818, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.7208931419457735, |
|
"grad_norm": 1.1698341369628906, |
|
"learning_rate": 6.56598533911881e-05, |
|
"loss": 1.0937, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.7240829346092504, |
|
"grad_norm": 5.878746032714844, |
|
"learning_rate": 6.428322369778254e-05, |
|
"loss": 1.1108, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.82503342628479, |
|
"learning_rate": 6.291723207493577e-05, |
|
"loss": 1.0722, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.7304625199362041, |
|
"grad_norm": 0.8583028316497803, |
|
"learning_rate": 6.15620480520666e-05, |
|
"loss": 1.0876, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.733652312599681, |
|
"grad_norm": 0.9114026427268982, |
|
"learning_rate": 6.0217839817292765e-05, |
|
"loss": 1.0782, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.7368421052631579, |
|
"grad_norm": 1.1162232160568237, |
|
"learning_rate": 5.888477419655733e-05, |
|
"loss": 1.0968, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.7400318979266348, |
|
"grad_norm": 0.9924772381782532, |
|
"learning_rate": 5.7563016632924555e-05, |
|
"loss": 1.0821, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.7432216905901117, |
|
"grad_norm": 0.9202504754066467, |
|
"learning_rate": 5.625273116604728e-05, |
|
"loss": 1.1216, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.7464114832535885, |
|
"grad_norm": 1.0332766771316528, |
|
"learning_rate": 5.495408041180829e-05, |
|
"loss": 1.1094, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.7496012759170654, |
|
"grad_norm": 1.1432628631591797, |
|
"learning_rate": 5.3667225542138507e-05, |
|
"loss": 1.0725, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.7527910685805422, |
|
"grad_norm": 1.2031751871109009, |
|
"learning_rate": 5.239232626501464e-05, |
|
"loss": 1.1096, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.7559808612440191, |
|
"grad_norm": 1.1352425813674927, |
|
"learning_rate": 5.1129540804637906e-05, |
|
"loss": 1.0949, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.759170653907496, |
|
"grad_norm": 4.166080474853516, |
|
"learning_rate": 4.9879025881797466e-05, |
|
"loss": 1.0776, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.7623604465709729, |
|
"grad_norm": 1.7659157514572144, |
|
"learning_rate": 4.8640936694420245e-05, |
|
"loss": 1.0777, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.7655502392344498, |
|
"grad_norm": 0.734951376914978, |
|
"learning_rate": 4.7415426898309704e-05, |
|
"loss": 1.0787, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.7687400318979266, |
|
"grad_norm": 1.0419098138809204, |
|
"learning_rate": 4.62026485880761e-05, |
|
"loss": 1.0811, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.7719298245614035, |
|
"grad_norm": 0.9413526654243469, |
|
"learning_rate": 4.50027522782603e-05, |
|
"loss": 1.0771, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.7751196172248804, |
|
"grad_norm": 1.194632649421692, |
|
"learning_rate": 4.3815886884654136e-05, |
|
"loss": 1.0781, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.7783094098883573, |
|
"grad_norm": 1.6069307327270508, |
|
"learning_rate": 4.264219970581854e-05, |
|
"loss": 1.1017, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.7814992025518341, |
|
"grad_norm": 1.064291000366211, |
|
"learning_rate": 4.148183640480293e-05, |
|
"loss": 1.0943, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.784688995215311, |
|
"grad_norm": 1.8470526933670044, |
|
"learning_rate": 4.0334940991067276e-05, |
|
"loss": 1.1089, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.7878787878787878, |
|
"grad_norm": 0.8893584609031677, |
|
"learning_rate": 3.920165580260973e-05, |
|
"loss": 1.0944, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.7910685805422647, |
|
"grad_norm": 1.3273100852966309, |
|
"learning_rate": 3.808212148830095e-05, |
|
"loss": 1.0753, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.7942583732057417, |
|
"grad_norm": 1.6681938171386719, |
|
"learning_rate": 3.697647699042918e-05, |
|
"loss": 1.0755, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.7974481658692185, |
|
"grad_norm": 1.1112189292907715, |
|
"learning_rate": 3.5884859527455995e-05, |
|
"loss": 1.0852, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.8006379585326954, |
|
"grad_norm": 0.9270617365837097, |
|
"learning_rate": 3.48074045769868e-05, |
|
"loss": 1.0873, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.8038277511961722, |
|
"grad_norm": 1.1638352870941162, |
|
"learning_rate": 3.3744245858956967e-05, |
|
"loss": 1.1141, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.8070175438596491, |
|
"grad_norm": 1.0835789442062378, |
|
"learning_rate": 3.2695515319036186e-05, |
|
"loss": 1.097, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.810207336523126, |
|
"grad_norm": 0.7704162001609802, |
|
"learning_rate": 3.1661343112253304e-05, |
|
"loss": 1.081, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.8133971291866029, |
|
"grad_norm": 0.9477359056472778, |
|
"learning_rate": 3.064185758684265e-05, |
|
"loss": 1.0632, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.8165869218500797, |
|
"grad_norm": 1.0834468603134155, |
|
"learning_rate": 2.96371852683157e-05, |
|
"loss": 1.0939, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.8197767145135566, |
|
"grad_norm": 1.0074337720870972, |
|
"learning_rate": 2.8647450843757897e-05, |
|
"loss": 1.1007, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.8229665071770335, |
|
"grad_norm": 0.9465036392211914, |
|
"learning_rate": 2.7672777146354246e-05, |
|
"loss": 1.095, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.8261562998405104, |
|
"grad_norm": 0.9678279161453247, |
|
"learning_rate": 2.6713285140144802e-05, |
|
"loss": 1.0927, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.8293460925039873, |
|
"grad_norm": 1.2122068405151367, |
|
"learning_rate": 2.5769093905012333e-05, |
|
"loss": 1.0784, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.8325358851674641, |
|
"grad_norm": 1.0039793252944946, |
|
"learning_rate": 2.4840320621903253e-05, |
|
"loss": 1.0766, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.835725677830941, |
|
"grad_norm": 1.5479018688201904, |
|
"learning_rate": 2.392708055828495e-05, |
|
"loss": 1.0768, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.8389154704944178, |
|
"grad_norm": 0.8418304324150085, |
|
"learning_rate": 2.3029487053840295e-05, |
|
"loss": 1.0777, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.8421052631578947, |
|
"grad_norm": 2.277839183807373, |
|
"learning_rate": 2.214765150640108e-05, |
|
"loss": 1.0962, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.8452950558213717, |
|
"grad_norm": 1.1263689994812012, |
|
"learning_rate": 2.1281683358122996e-05, |
|
"loss": 1.0756, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.8484848484848485, |
|
"grad_norm": 1.8724466562271118, |
|
"learning_rate": 2.043169008190289e-05, |
|
"loss": 1.092, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.8516746411483254, |
|
"grad_norm": 0.8570544123649597, |
|
"learning_rate": 1.9597777168040872e-05, |
|
"loss": 1.0956, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.8548644338118022, |
|
"grad_norm": 0.7986017465591431, |
|
"learning_rate": 1.8780048111147776e-05, |
|
"loss": 1.0964, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.8580542264752791, |
|
"grad_norm": 0.973429799079895, |
|
"learning_rate": 1.797860439730126e-05, |
|
"loss": 1.0466, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.861244019138756, |
|
"grad_norm": 1.8588981628417969, |
|
"learning_rate": 1.7193545491450183e-05, |
|
"loss": 1.0906, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.8644338118022329, |
|
"grad_norm": 0.8553722500801086, |
|
"learning_rate": 1.6424968825070567e-05, |
|
"loss": 1.0621, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.8676236044657097, |
|
"grad_norm": 0.7932642698287964, |
|
"learning_rate": 1.567296978407353e-05, |
|
"loss": 1.0786, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.8708133971291866, |
|
"grad_norm": 0.7676968574523926, |
|
"learning_rate": 1.4937641696967245e-05, |
|
"loss": 1.0959, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.8740031897926634, |
|
"grad_norm": 0.8007521033287048, |
|
"learning_rate": 1.4219075823274251e-05, |
|
"loss": 1.075, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.8771929824561403, |
|
"grad_norm": 0.7606749534606934, |
|
"learning_rate": 1.3517361342205295e-05, |
|
"loss": 1.0748, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.8803827751196173, |
|
"grad_norm": 0.9343269467353821, |
|
"learning_rate": 1.2832585341591844e-05, |
|
"loss": 1.0499, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.8835725677830941, |
|
"grad_norm": 1.2631272077560425, |
|
"learning_rate": 1.2164832807077585e-05, |
|
"loss": 1.0824, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.886762360446571, |
|
"grad_norm": 0.8094949126243591, |
|
"learning_rate": 1.151418661157122e-05, |
|
"loss": 1.069, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.8899521531100478, |
|
"grad_norm": 0.9011691808700562, |
|
"learning_rate": 1.0880727504961339e-05, |
|
"loss": 1.0696, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.8931419457735247, |
|
"grad_norm": 0.710112988948822, |
|
"learning_rate": 1.0264534104094812e-05, |
|
"loss": 1.0808, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.8963317384370016, |
|
"grad_norm": 0.9122905135154724, |
|
"learning_rate": 9.665682883019732e-06, |
|
"loss": 1.0729, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.8995215311004785, |
|
"grad_norm": 0.9680069088935852, |
|
"learning_rate": 9.084248163494511e-06, |
|
"loss": 1.0741, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.9027113237639554, |
|
"grad_norm": 1.0660099983215332, |
|
"learning_rate": 8.520302105764148e-06, |
|
"loss": 1.0786, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.9059011164274322, |
|
"grad_norm": 0.8283365964889526, |
|
"learning_rate": 7.973914699604367e-06, |
|
"loss": 1.0749, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 0.7764257192611694, |
|
"learning_rate": 7.445153755635569e-06, |
|
"loss": 1.0869, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.9122807017543859, |
|
"grad_norm": 0.7744048237800598, |
|
"learning_rate": 6.934084896906983e-06, |
|
"loss": 1.081, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.9154704944178629, |
|
"grad_norm": 0.8904666900634766, |
|
"learning_rate": 6.440771550752377e-06, |
|
"loss": 1.0909, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.9186602870813397, |
|
"grad_norm": 0.731381893157959, |
|
"learning_rate": 5.965274940918274e-06, |
|
"loss": 1.0837, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.9218500797448166, |
|
"grad_norm": 2.323331117630005, |
|
"learning_rate": 5.507654079965612e-06, |
|
"loss": 1.0651, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.9250398724082934, |
|
"grad_norm": 0.8549768328666687, |
|
"learning_rate": 5.067965761945869e-06, |
|
"loss": 1.0887, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.9282296650717703, |
|
"grad_norm": 0.8538034558296204, |
|
"learning_rate": 4.646264555352586e-06, |
|
"loss": 1.0685, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.9314194577352473, |
|
"grad_norm": 0.756767988204956, |
|
"learning_rate": 4.242602796348915e-06, |
|
"loss": 1.0661, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.9346092503987241, |
|
"grad_norm": 1.35940682888031, |
|
"learning_rate": 3.857030582272369e-06, |
|
"loss": 1.062, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.937799043062201, |
|
"grad_norm": 0.9812383651733398, |
|
"learning_rate": 3.489595765417441e-06, |
|
"loss": 1.0341, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.9409888357256778, |
|
"grad_norm": 0.8831263780593872, |
|
"learning_rate": 3.140343947096624e-06, |
|
"loss": 1.0486, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.9441786283891547, |
|
"grad_norm": 0.7819539904594421, |
|
"learning_rate": 2.80931847198117e-06, |
|
"loss": 1.1105, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.9473684210526315, |
|
"grad_norm": 0.9179317355155945, |
|
"learning_rate": 2.4965604227215774e-06, |
|
"loss": 1.0849, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.9505582137161085, |
|
"grad_norm": 0.8770614266395569, |
|
"learning_rate": 2.202108614848885e-06, |
|
"loss": 1.0684, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.9537480063795853, |
|
"grad_norm": 0.7987916469573975, |
|
"learning_rate": 1.925999591957561e-06, |
|
"loss": 1.0725, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.9569377990430622, |
|
"grad_norm": 0.754940927028656, |
|
"learning_rate": 1.6682676211700107e-06, |
|
"loss": 1.0815, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.960127591706539, |
|
"grad_norm": 0.941967248916626, |
|
"learning_rate": 1.4289446888838652e-06, |
|
"loss": 1.0887, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.9633173843700159, |
|
"grad_norm": 0.9065337181091309, |
|
"learning_rate": 1.2080604968022378e-06, |
|
"loss": 1.0604, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.9665071770334929, |
|
"grad_norm": 0.7943056225776672, |
|
"learning_rate": 1.0056424582474575e-06, |
|
"loss": 1.0627, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.9696969696969697, |
|
"grad_norm": 0.7137452960014343, |
|
"learning_rate": 8.217156947590064e-07, |
|
"loss": 1.0576, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.9728867623604466, |
|
"grad_norm": 0.7708435654640198, |
|
"learning_rate": 6.563030329755969e-07, |
|
"loss": 1.0446, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.9760765550239234, |
|
"grad_norm": 0.9459482431411743, |
|
"learning_rate": 5.094250018023715e-07, |
|
"loss": 1.0921, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.9792663476874003, |
|
"grad_norm": 1.0283273458480835, |
|
"learning_rate": 3.8109982986300747e-07, |
|
"loss": 1.0788, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.9824561403508771, |
|
"grad_norm": 0.7605658769607544, |
|
"learning_rate": 2.7134344323747616e-07, |
|
"loss": 1.0686, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.9856459330143541, |
|
"grad_norm": 0.7354826331138611, |
|
"learning_rate": 1.801694634854578e-07, |
|
"loss": 1.0946, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.988835725677831, |
|
"grad_norm": 1.082857608795166, |
|
"learning_rate": 1.075892059558603e-07, |
|
"loss": 1.0849, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.9920255183413078, |
|
"grad_norm": 0.8959941864013672, |
|
"learning_rate": 5.3611678382442516e-08, |
|
"loss": 1.0725, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.9952153110047847, |
|
"grad_norm": 0.8628972172737122, |
|
"learning_rate": 1.824357976594193e-08, |
|
"loss": 1.0438, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.9984051036682615, |
|
"grad_norm": 0.8962126970291138, |
|
"learning_rate": 1.4892995426396548e-09, |
|
"loss": 1.0731, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.9996810207336523, |
|
"eval_loss": 2.480717420578003, |
|
"eval_runtime": 1.759, |
|
"eval_samples_per_second": 5.685, |
|
"eval_steps_per_second": 1.705, |
|
"step": 1567 |
|
}, |
|
{ |
|
"epoch": 0.9996810207336523, |
|
"step": 1567, |
|
"total_flos": 5.97257971054936e+17, |
|
"train_loss": 2.314191315838997, |
|
"train_runtime": 7526.9596, |
|
"train_samples_per_second": 1.666, |
|
"train_steps_per_second": 0.208 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1567, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 5.97257971054936e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|