|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9955780164245104, |
|
"eval_steps": 50, |
|
"global_step": 197, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00505369551484523, |
|
"grad_norm": 5.525087356567383, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 11.0327, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00505369551484523, |
|
"eval_loss": 10.8840913772583, |
|
"eval_runtime": 8.456, |
|
"eval_samples_per_second": 39.499, |
|
"eval_steps_per_second": 9.934, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01010739102969046, |
|
"grad_norm": 5.266482830047607, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 10.9714, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.015161086544535692, |
|
"grad_norm": 4.918356895446777, |
|
"learning_rate": 6e-06, |
|
"loss": 10.9057, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02021478205938092, |
|
"grad_norm": 4.777329921722412, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 10.939, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.025268477574226154, |
|
"grad_norm": 4.73073148727417, |
|
"learning_rate": 1e-05, |
|
"loss": 11.0369, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.030322173089071383, |
|
"grad_norm": 4.6360931396484375, |
|
"learning_rate": 1.2e-05, |
|
"loss": 10.8651, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.035375868603916616, |
|
"grad_norm": 4.678157329559326, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 10.8567, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04042956411876184, |
|
"grad_norm": 4.721307754516602, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 10.8247, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.045483259633607075, |
|
"grad_norm": 4.473278045654297, |
|
"learning_rate": 1.8e-05, |
|
"loss": 10.7567, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05053695514845231, |
|
"grad_norm": 4.979541301727295, |
|
"learning_rate": 2e-05, |
|
"loss": 10.8797, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05559065066329753, |
|
"grad_norm": 4.539080619812012, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 10.8282, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.060644346178142766, |
|
"grad_norm": 4.77044153213501, |
|
"learning_rate": 2.4e-05, |
|
"loss": 10.9363, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.065698041692988, |
|
"grad_norm": 4.023128509521484, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 10.8104, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07075173720783323, |
|
"grad_norm": 4.127583026885986, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 10.772, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07580543272267846, |
|
"grad_norm": 4.209392070770264, |
|
"learning_rate": 3e-05, |
|
"loss": 10.819, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08085912823752368, |
|
"grad_norm": 4.107909202575684, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 10.8561, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08591282375236892, |
|
"grad_norm": 3.9726462364196777, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 10.702, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.09096651926721415, |
|
"grad_norm": 3.9486467838287354, |
|
"learning_rate": 3.6e-05, |
|
"loss": 10.86, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09602021478205938, |
|
"grad_norm": 4.086248397827148, |
|
"learning_rate": 3.8e-05, |
|
"loss": 10.6329, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10107391029690461, |
|
"grad_norm": 4.198414325714111, |
|
"learning_rate": 4e-05, |
|
"loss": 10.6256, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10612760581174985, |
|
"grad_norm": 4.264832496643066, |
|
"learning_rate": 4.2e-05, |
|
"loss": 10.668, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11118130132659507, |
|
"grad_norm": 4.420608997344971, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 10.703, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1162349968414403, |
|
"grad_norm": 4.989047050476074, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 10.7338, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12128869235628553, |
|
"grad_norm": 6.062208652496338, |
|
"learning_rate": 4.8e-05, |
|
"loss": 10.8503, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12634238787113075, |
|
"grad_norm": 7.2560038566589355, |
|
"learning_rate": 5e-05, |
|
"loss": 10.6961, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.131396083385976, |
|
"grad_norm": 5.42380428314209, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 10.654, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13644977890082122, |
|
"grad_norm": 4.605566501617432, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 10.6335, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14150347441566646, |
|
"grad_norm": 4.439408779144287, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 10.5825, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14655716993051168, |
|
"grad_norm": 4.211907386779785, |
|
"learning_rate": 5.8e-05, |
|
"loss": 10.5156, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15161086544535693, |
|
"grad_norm": 3.8451991081237793, |
|
"learning_rate": 6e-05, |
|
"loss": 10.3591, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15666456096020215, |
|
"grad_norm": 3.6254827976226807, |
|
"learning_rate": 6.2e-05, |
|
"loss": 10.5195, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16171825647504737, |
|
"grad_norm": 3.743040084838867, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 10.4644, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16677195198989261, |
|
"grad_norm": 3.687465190887451, |
|
"learning_rate": 6.6e-05, |
|
"loss": 10.2991, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.17182564750473783, |
|
"grad_norm": 3.4375765323638916, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 10.3176, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17687934301958308, |
|
"grad_norm": 3.485527753829956, |
|
"learning_rate": 7e-05, |
|
"loss": 10.2831, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1819330385344283, |
|
"grad_norm": 3.4168219566345215, |
|
"learning_rate": 7.2e-05, |
|
"loss": 10.1887, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18698673404927352, |
|
"grad_norm": 2.9851391315460205, |
|
"learning_rate": 7.4e-05, |
|
"loss": 10.2611, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.19204042956411876, |
|
"grad_norm": 3.173752784729004, |
|
"learning_rate": 7.6e-05, |
|
"loss": 10.0005, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.19709412507896398, |
|
"grad_norm": 3.038637161254883, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 10.1846, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.20214782059380923, |
|
"grad_norm": 3.0100479125976562, |
|
"learning_rate": 8e-05, |
|
"loss": 10.121, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.20720151610865445, |
|
"grad_norm": 3.1173861026763916, |
|
"learning_rate": 8.2e-05, |
|
"loss": 10.055, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2122552116234997, |
|
"grad_norm": 2.935340166091919, |
|
"learning_rate": 8.4e-05, |
|
"loss": 9.9977, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21730890713834491, |
|
"grad_norm": 2.8017685413360596, |
|
"learning_rate": 8.6e-05, |
|
"loss": 9.9614, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.22236260265319013, |
|
"grad_norm": 2.6707160472869873, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 10.0095, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22741629816803538, |
|
"grad_norm": 2.65998911857605, |
|
"learning_rate": 9e-05, |
|
"loss": 9.9824, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2324699936828806, |
|
"grad_norm": 2.622680902481079, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 9.8679, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.23752368919772585, |
|
"grad_norm": 2.7016119956970215, |
|
"learning_rate": 9.4e-05, |
|
"loss": 9.8601, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.24257738471257106, |
|
"grad_norm": 2.5022776126861572, |
|
"learning_rate": 9.6e-05, |
|
"loss": 10.012, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.2476310802274163, |
|
"grad_norm": 2.6630473136901855, |
|
"learning_rate": 9.8e-05, |
|
"loss": 9.919, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2526847757422615, |
|
"grad_norm": 3.5715386867523193, |
|
"learning_rate": 0.0001, |
|
"loss": 9.9513, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2526847757422615, |
|
"eval_loss": 9.72451114654541, |
|
"eval_runtime": 8.5338, |
|
"eval_samples_per_second": 39.139, |
|
"eval_steps_per_second": 9.843, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25773847125710675, |
|
"grad_norm": 2.438196897506714, |
|
"learning_rate": 9.99885820390154e-05, |
|
"loss": 9.9308, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.262792166771952, |
|
"grad_norm": 2.2800676822662354, |
|
"learning_rate": 9.995433337085491e-05, |
|
"loss": 9.7762, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.26784586228679724, |
|
"grad_norm": 2.2999722957611084, |
|
"learning_rate": 9.989726963751682e-05, |
|
"loss": 9.6314, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.27289955780164243, |
|
"grad_norm": 2.0864381790161133, |
|
"learning_rate": 9.981741690106034e-05, |
|
"loss": 9.5644, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2779532533164877, |
|
"grad_norm": 1.9628472328186035, |
|
"learning_rate": 9.971481163170268e-05, |
|
"loss": 9.6852, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.2830069488313329, |
|
"grad_norm": 2.0100128650665283, |
|
"learning_rate": 9.95895006911623e-05, |
|
"loss": 9.532, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2880606443461781, |
|
"grad_norm": 1.903132438659668, |
|
"learning_rate": 9.944154131125642e-05, |
|
"loss": 9.5968, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.29311433986102337, |
|
"grad_norm": 1.8894416093826294, |
|
"learning_rate": 9.927100106776212e-05, |
|
"loss": 9.45, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2981680353758686, |
|
"grad_norm": 1.8624144792556763, |
|
"learning_rate": 9.907795784955327e-05, |
|
"loss": 9.462, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.30322173089071386, |
|
"grad_norm": 1.88225519657135, |
|
"learning_rate": 9.88624998230272e-05, |
|
"loss": 9.3977, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.30827542640555905, |
|
"grad_norm": 1.9469467401504517, |
|
"learning_rate": 9.862472539183756e-05, |
|
"loss": 9.2517, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3133291219204043, |
|
"grad_norm": 1.6683619022369385, |
|
"learning_rate": 9.836474315195147e-05, |
|
"loss": 9.4305, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.31838281743524954, |
|
"grad_norm": 1.663127064704895, |
|
"learning_rate": 9.808267184205183e-05, |
|
"loss": 9.2934, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.32343651295009473, |
|
"grad_norm": 1.6440531015396118, |
|
"learning_rate": 9.777864028930705e-05, |
|
"loss": 9.3013, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.32849020846494, |
|
"grad_norm": 1.6826865673065186, |
|
"learning_rate": 9.745278735053343e-05, |
|
"loss": 9.2594, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.33354390397978523, |
|
"grad_norm": 1.596401572227478, |
|
"learning_rate": 9.710526184877667e-05, |
|
"loss": 9.2143, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.3385975994946305, |
|
"grad_norm": 1.764570951461792, |
|
"learning_rate": 9.673622250534156e-05, |
|
"loss": 9.0152, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.34365129500947567, |
|
"grad_norm": 1.5638481378555298, |
|
"learning_rate": 9.63458378673011e-05, |
|
"loss": 9.1172, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.3487049905243209, |
|
"grad_norm": 1.5338134765625, |
|
"learning_rate": 9.593428623051792e-05, |
|
"loss": 9.0363, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.35375868603916616, |
|
"grad_norm": 1.6992957592010498, |
|
"learning_rate": 9.550175555821333e-05, |
|
"loss": 9.0962, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.35881238155401135, |
|
"grad_norm": 1.6055617332458496, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 9.1217, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.3638660770688566, |
|
"grad_norm": 1.7240350246429443, |
|
"learning_rate": 9.457455677726448e-05, |
|
"loss": 9.2245, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.36891977258370184, |
|
"grad_norm": 1.9936858415603638, |
|
"learning_rate": 9.408031213740045e-05, |
|
"loss": 9.3585, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.37397346809854703, |
|
"grad_norm": 2.1292333602905273, |
|
"learning_rate": 9.356593520616948e-05, |
|
"loss": 9.4107, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3790271636133923, |
|
"grad_norm": 3.4360055923461914, |
|
"learning_rate": 9.303166090900082e-05, |
|
"loss": 9.1299, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.38408085912823753, |
|
"grad_norm": 1.3017970323562622, |
|
"learning_rate": 9.24777332588177e-05, |
|
"loss": 9.224, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3891345546430828, |
|
"grad_norm": 1.1683531999588013, |
|
"learning_rate": 9.190440524459203e-05, |
|
"loss": 9.2729, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.39418825015792797, |
|
"grad_norm": 1.235701084136963, |
|
"learning_rate": 9.131193871579975e-05, |
|
"loss": 9.0347, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3992419456727732, |
|
"grad_norm": 1.1109133958816528, |
|
"learning_rate": 9.070060426282925e-05, |
|
"loss": 9.0375, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.40429564118761846, |
|
"grad_norm": 1.2668910026550293, |
|
"learning_rate": 9.007068109339784e-05, |
|
"loss": 8.9948, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.40934933670246365, |
|
"grad_norm": 1.3104698657989502, |
|
"learning_rate": 8.942245690503239e-05, |
|
"loss": 8.9623, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.4144030322173089, |
|
"grad_norm": 1.198638916015625, |
|
"learning_rate": 8.87562277536726e-05, |
|
"loss": 9.0571, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.41945672773215414, |
|
"grad_norm": 1.2889971733093262, |
|
"learning_rate": 8.807229791845673e-05, |
|
"loss": 8.9898, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4245104232469994, |
|
"grad_norm": 1.2756685018539429, |
|
"learning_rate": 8.737097976275178e-05, |
|
"loss": 8.896, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4295641187618446, |
|
"grad_norm": 1.1489578485488892, |
|
"learning_rate": 8.665259359149132e-05, |
|
"loss": 9.067, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.43461781427668983, |
|
"grad_norm": 1.1491981744766235, |
|
"learning_rate": 8.591746750488639e-05, |
|
"loss": 8.8872, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4396715097915351, |
|
"grad_norm": 1.072772741317749, |
|
"learning_rate": 8.516593724857598e-05, |
|
"loss": 8.887, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.44472520530638027, |
|
"grad_norm": 1.0014466047286987, |
|
"learning_rate": 8.439834606028594e-05, |
|
"loss": 8.8939, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4497789008212255, |
|
"grad_norm": 1.1188455820083618, |
|
"learning_rate": 8.361504451306585e-05, |
|
"loss": 8.7898, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.45483259633607076, |
|
"grad_norm": 1.039220929145813, |
|
"learning_rate": 8.28163903551759e-05, |
|
"loss": 8.8616, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.459886291850916, |
|
"grad_norm": 1.1389069557189941, |
|
"learning_rate": 8.200274834669675e-05, |
|
"loss": 8.7895, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4649399873657612, |
|
"grad_norm": 1.1462750434875488, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 8.8468, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.46999368288060644, |
|
"grad_norm": 1.1461669206619263, |
|
"learning_rate": 8.033199387471277e-05, |
|
"loss": 8.7652, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4750473783954517, |
|
"grad_norm": 1.254766583442688, |
|
"learning_rate": 7.9475644475583e-05, |
|
"loss": 8.612, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.4801010739102969, |
|
"grad_norm": 1.3156293630599976, |
|
"learning_rate": 7.860583300610849e-05, |
|
"loss": 8.7556, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.48515476942514213, |
|
"grad_norm": 1.2289044857025146, |
|
"learning_rate": 7.772295672522615e-05, |
|
"loss": 8.8208, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.4902084649399874, |
|
"grad_norm": 1.7439113855361938, |
|
"learning_rate": 7.682741885881315e-05, |
|
"loss": 9.0702, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.4952621604548326, |
|
"grad_norm": 1.3451626300811768, |
|
"learning_rate": 7.591962841552627e-05, |
|
"loss": 8.8565, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.5003158559696779, |
|
"grad_norm": 1.7298027276992798, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 9.0691, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.505369551484523, |
|
"grad_norm": 2.368654727935791, |
|
"learning_rate": 7.406895362348916e-05, |
|
"loss": 8.8959, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.505369551484523, |
|
"eval_loss": 8.748159408569336, |
|
"eval_runtime": 8.2562, |
|
"eval_samples_per_second": 40.455, |
|
"eval_steps_per_second": 10.174, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5104232469993683, |
|
"grad_norm": 1.6569247245788574, |
|
"learning_rate": 7.312691451204178e-05, |
|
"loss": 8.9028, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.5154769425142135, |
|
"grad_norm": 1.122266173362732, |
|
"learning_rate": 7.217431291229067e-05, |
|
"loss": 8.8732, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5205306380290587, |
|
"grad_norm": 0.9974745512008667, |
|
"learning_rate": 7.121158389495186e-05, |
|
"loss": 8.9486, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.525584333543904, |
|
"grad_norm": 1.1596142053604126, |
|
"learning_rate": 7.023916715611969e-05, |
|
"loss": 8.7343, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5306380290587492, |
|
"grad_norm": 1.1720257997512817, |
|
"learning_rate": 6.925750681644953e-05, |
|
"loss": 8.7557, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5356917245735945, |
|
"grad_norm": 1.1216539144515991, |
|
"learning_rate": 6.826705121831976e-05, |
|
"loss": 8.7399, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5407454200884396, |
|
"grad_norm": 1.0456095933914185, |
|
"learning_rate": 6.726825272106538e-05, |
|
"loss": 8.8259, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5457991156032849, |
|
"grad_norm": 1.1412315368652344, |
|
"learning_rate": 6.626156749437736e-05, |
|
"loss": 8.7779, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.5508528111181301, |
|
"grad_norm": 1.144679069519043, |
|
"learning_rate": 6.524745530996137e-05, |
|
"loss": 8.7377, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5559065066329754, |
|
"grad_norm": 1.1518090963363647, |
|
"learning_rate": 6.422637933155162e-05, |
|
"loss": 8.5458, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5609602021478206, |
|
"grad_norm": 1.0327682495117188, |
|
"learning_rate": 6.319880590337549e-05, |
|
"loss": 8.7034, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5660138976626659, |
|
"grad_norm": 0.9186742901802063, |
|
"learning_rate": 6.216520433716545e-05, |
|
"loss": 8.7843, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5710675931775111, |
|
"grad_norm": 0.9329281449317932, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 8.7471, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5761212886923562, |
|
"grad_norm": 1.0598094463348389, |
|
"learning_rate": 6.008180758778167e-05, |
|
"loss": 8.6698, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.5811749842072015, |
|
"grad_norm": 1.1156866550445557, |
|
"learning_rate": 5.903296393031995e-05, |
|
"loss": 8.6572, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5862286797220467, |
|
"grad_norm": 1.0410085916519165, |
|
"learning_rate": 5.7979994751668964e-05, |
|
"loss": 8.505, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.591282375236892, |
|
"grad_norm": 1.126705288887024, |
|
"learning_rate": 5.69233809622687e-05, |
|
"loss": 8.7812, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5963360707517372, |
|
"grad_norm": 1.1464685201644897, |
|
"learning_rate": 5.58636051371201e-05, |
|
"loss": 8.5813, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.6013897662665825, |
|
"grad_norm": 1.146330714225769, |
|
"learning_rate": 5.480115129538409e-05, |
|
"loss": 8.4167, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.6064434617814277, |
|
"grad_norm": 1.079012393951416, |
|
"learning_rate": 5.373650467932122e-05, |
|
"loss": 8.5148, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.6114971572962729, |
|
"grad_norm": 1.281872272491455, |
|
"learning_rate": 5.267015153267245e-05, |
|
"loss": 8.4182, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.6165508528111181, |
|
"grad_norm": 1.1823097467422485, |
|
"learning_rate": 5.1602578878582776e-05, |
|
"loss": 8.6779, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.6216045483259633, |
|
"grad_norm": 1.620906114578247, |
|
"learning_rate": 5.053427429716867e-05, |
|
"loss": 8.8053, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6266582438408086, |
|
"grad_norm": 1.704972505569458, |
|
"learning_rate": 4.9465725702831346e-05, |
|
"loss": 8.9209, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6317119393556538, |
|
"grad_norm": 2.0735604763031006, |
|
"learning_rate": 4.839742112141724e-05, |
|
"loss": 8.7427, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6367656348704991, |
|
"grad_norm": 1.3401007652282715, |
|
"learning_rate": 4.732984846732755e-05, |
|
"loss": 8.7623, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.6418193303853443, |
|
"grad_norm": 1.1091989278793335, |
|
"learning_rate": 4.626349532067879e-05, |
|
"loss": 8.7094, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6468730259001895, |
|
"grad_norm": 1.1784418821334839, |
|
"learning_rate": 4.5198848704615914e-05, |
|
"loss": 8.5668, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6519267214150347, |
|
"grad_norm": 1.323731541633606, |
|
"learning_rate": 4.4136394862879914e-05, |
|
"loss": 8.4429, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.65698041692988, |
|
"grad_norm": 0.8502984642982483, |
|
"learning_rate": 4.307661903773129e-05, |
|
"loss": 8.7092, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6620341124447252, |
|
"grad_norm": 1.1920008659362793, |
|
"learning_rate": 4.2020005248331054e-05, |
|
"loss": 8.4987, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6670878079595705, |
|
"grad_norm": 1.0251978635787964, |
|
"learning_rate": 4.096703606968006e-05, |
|
"loss": 8.6157, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6721415034744157, |
|
"grad_norm": 1.0642647743225098, |
|
"learning_rate": 3.991819241221835e-05, |
|
"loss": 8.4535, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.677195198989261, |
|
"grad_norm": 1.3170267343521118, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 8.4171, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6822488945041061, |
|
"grad_norm": 1.0994036197662354, |
|
"learning_rate": 3.783479566283457e-05, |
|
"loss": 8.6795, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6873025900189513, |
|
"grad_norm": 1.2040021419525146, |
|
"learning_rate": 3.680119409662452e-05, |
|
"loss": 8.5518, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6923562855337966, |
|
"grad_norm": 1.1834477186203003, |
|
"learning_rate": 3.5773620668448384e-05, |
|
"loss": 8.6813, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6974099810486418, |
|
"grad_norm": 0.9281287789344788, |
|
"learning_rate": 3.4752544690038647e-05, |
|
"loss": 8.535, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.7024636765634871, |
|
"grad_norm": 1.0375516414642334, |
|
"learning_rate": 3.373843250562265e-05, |
|
"loss": 8.5263, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.7075173720783323, |
|
"grad_norm": 0.9735792279243469, |
|
"learning_rate": 3.273174727893463e-05, |
|
"loss": 8.4796, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.7125710675931776, |
|
"grad_norm": 1.0425305366516113, |
|
"learning_rate": 3.173294878168025e-05, |
|
"loss": 8.4304, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.7176247631080227, |
|
"grad_norm": 1.076398491859436, |
|
"learning_rate": 3.074249318355046e-05, |
|
"loss": 8.4417, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.722678458622868, |
|
"grad_norm": 1.0114437341690063, |
|
"learning_rate": 2.976083284388031e-05, |
|
"loss": 8.3947, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.7277321541377132, |
|
"grad_norm": 1.0746240615844727, |
|
"learning_rate": 2.8788416105048122e-05, |
|
"loss": 8.3048, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7327858496525584, |
|
"grad_norm": 1.0370659828186035, |
|
"learning_rate": 2.7825687087709328e-05, |
|
"loss": 8.3032, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7378395451674037, |
|
"grad_norm": 1.1819736957550049, |
|
"learning_rate": 2.687308548795825e-05, |
|
"loss": 8.3238, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7428932406822489, |
|
"grad_norm": 1.3127453327178955, |
|
"learning_rate": 2.5931046376510877e-05, |
|
"loss": 8.5473, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7479469361970941, |
|
"grad_norm": 1.3957887887954712, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 8.4965, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7530006317119393, |
|
"grad_norm": 1.6841113567352295, |
|
"learning_rate": 2.4080371584473748e-05, |
|
"loss": 8.5345, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7580543272267846, |
|
"grad_norm": 2.3189311027526855, |
|
"learning_rate": 2.317258114118686e-05, |
|
"loss": 8.6134, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7580543272267846, |
|
"eval_loss": 8.470457077026367, |
|
"eval_runtime": 8.309, |
|
"eval_samples_per_second": 40.198, |
|
"eval_steps_per_second": 10.11, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7631080227416298, |
|
"grad_norm": 1.57240629196167, |
|
"learning_rate": 2.2277043274773857e-05, |
|
"loss": 8.6231, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7681617182564751, |
|
"grad_norm": 1.579606294631958, |
|
"learning_rate": 2.139416699389153e-05, |
|
"loss": 8.7382, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7732154137713203, |
|
"grad_norm": 1.3761146068572998, |
|
"learning_rate": 2.0524355524417017e-05, |
|
"loss": 8.6744, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7782691092861656, |
|
"grad_norm": 1.1861869096755981, |
|
"learning_rate": 1.966800612528723e-05, |
|
"loss": 8.5027, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7833228048010107, |
|
"grad_norm": 1.0866706371307373, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 8.4262, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7883765003158559, |
|
"grad_norm": 1.204898476600647, |
|
"learning_rate": 1.7997251653303248e-05, |
|
"loss": 8.3984, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7934301958307012, |
|
"grad_norm": 1.0119705200195312, |
|
"learning_rate": 1.7183609644824096e-05, |
|
"loss": 8.56, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7984838913455464, |
|
"grad_norm": 0.953957200050354, |
|
"learning_rate": 1.6384955486934156e-05, |
|
"loss": 8.5253, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.8035375868603917, |
|
"grad_norm": 1.0408637523651123, |
|
"learning_rate": 1.5601653939714074e-05, |
|
"loss": 8.3057, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.8085912823752369, |
|
"grad_norm": 0.88932204246521, |
|
"learning_rate": 1.4834062751424015e-05, |
|
"loss": 8.4848, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.8136449778900822, |
|
"grad_norm": 0.8721324801445007, |
|
"learning_rate": 1.4082532495113626e-05, |
|
"loss": 8.5545, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.8186986734049273, |
|
"grad_norm": 0.8064855933189392, |
|
"learning_rate": 1.3347406408508695e-05, |
|
"loss": 8.4817, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.8237523689197725, |
|
"grad_norm": 0.9848885536193848, |
|
"learning_rate": 1.262902023724824e-05, |
|
"loss": 8.5021, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.8288060644346178, |
|
"grad_norm": 1.0922009944915771, |
|
"learning_rate": 1.1927702081543279e-05, |
|
"loss": 8.1272, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.833859759949463, |
|
"grad_norm": 0.9150477051734924, |
|
"learning_rate": 1.1243772246327416e-05, |
|
"loss": 8.3211, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8389134554643083, |
|
"grad_norm": 1.0169014930725098, |
|
"learning_rate": 1.0577543094967612e-05, |
|
"loss": 8.3955, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.8439671509791535, |
|
"grad_norm": 1.039054036140442, |
|
"learning_rate": 9.929318906602175e-06, |
|
"loss": 8.3066, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8490208464939988, |
|
"grad_norm": 1.2704006433486938, |
|
"learning_rate": 9.299395737170757e-06, |
|
"loss": 8.1375, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8540745420088439, |
|
"grad_norm": 1.1755458116531372, |
|
"learning_rate": 8.688061284200266e-06, |
|
"loss": 8.1842, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8591282375236892, |
|
"grad_norm": 1.1061482429504395, |
|
"learning_rate": 8.09559475540797e-06, |
|
"loss": 8.4689, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8641819330385344, |
|
"grad_norm": 1.089499831199646, |
|
"learning_rate": 7.522266741182305e-06, |
|
"loss": 8.2855, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8692356285533797, |
|
"grad_norm": 1.1633696556091309, |
|
"learning_rate": 6.968339090999187e-06, |
|
"loss": 8.4485, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8742893240682249, |
|
"grad_norm": 1.172793984413147, |
|
"learning_rate": 6.43406479383053e-06, |
|
"loss": 8.4008, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8793430195830702, |
|
"grad_norm": 1.4023175239562988, |
|
"learning_rate": 5.919687862599549e-06, |
|
"loss": 8.603, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8843967150979154, |
|
"grad_norm": 2.343562602996826, |
|
"learning_rate": 5.425443222735527e-06, |
|
"loss": 8.4168, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8894504106127605, |
|
"grad_norm": 1.2631791830062866, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 8.6435, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8945041061276058, |
|
"grad_norm": 1.2387241125106812, |
|
"learning_rate": 4.498244441786675e-06, |
|
"loss": 8.5938, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.899557801642451, |
|
"grad_norm": 0.9759954214096069, |
|
"learning_rate": 4.065713769482082e-06, |
|
"loss": 8.7213, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.9046114971572963, |
|
"grad_norm": 0.8986900448799133, |
|
"learning_rate": 3.654162132698918e-06, |
|
"loss": 8.5998, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.9096651926721415, |
|
"grad_norm": 0.8717695474624634, |
|
"learning_rate": 3.2637774946584486e-06, |
|
"loss": 8.6061, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.9147188881869868, |
|
"grad_norm": 0.9505631327629089, |
|
"learning_rate": 2.894738151223331e-06, |
|
"loss": 8.5173, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.919772583701832, |
|
"grad_norm": 0.8661491870880127, |
|
"learning_rate": 2.547212649466568e-06, |
|
"loss": 8.5595, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.9248262792166771, |
|
"grad_norm": 0.8975613117218018, |
|
"learning_rate": 2.221359710692961e-06, |
|
"loss": 8.483, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.9298799747315224, |
|
"grad_norm": 0.8268275856971741, |
|
"learning_rate": 1.9173281579481892e-06, |
|
"loss": 8.4253, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.9349336702463676, |
|
"grad_norm": 0.9205915927886963, |
|
"learning_rate": 1.6352568480485276e-06, |
|
"loss": 8.5825, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.9399873657612129, |
|
"grad_norm": 0.8719313740730286, |
|
"learning_rate": 1.3752746081624467e-06, |
|
"loss": 8.3617, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.9450410612760581, |
|
"grad_norm": 0.9109625816345215, |
|
"learning_rate": 1.1375001769727999e-06, |
|
"loss": 8.3006, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.9500947567909034, |
|
"grad_norm": 0.8622517585754395, |
|
"learning_rate": 9.220421504467281e-07, |
|
"loss": 8.4956, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9551484523057486, |
|
"grad_norm": 0.9728400707244873, |
|
"learning_rate": 7.289989322378732e-07, |
|
"loss": 8.4565, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9602021478205938, |
|
"grad_norm": 0.9318665862083435, |
|
"learning_rate": 5.584586887435739e-07, |
|
"loss": 8.3316, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.965255843335439, |
|
"grad_norm": 0.9625034332275391, |
|
"learning_rate": 4.104993088376974e-07, |
|
"loss": 8.3455, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9703095388502843, |
|
"grad_norm": 1.1241123676300049, |
|
"learning_rate": 2.851883682973233e-07, |
|
"loss": 8.0974, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9753632343651295, |
|
"grad_norm": 1.197749137878418, |
|
"learning_rate": 1.8258309893965375e-07, |
|
"loss": 8.19, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.9804169298799748, |
|
"grad_norm": 0.9713776111602783, |
|
"learning_rate": 1.0273036248318324e-07, |
|
"loss": 8.4274, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.98547062539482, |
|
"grad_norm": 1.0657098293304443, |
|
"learning_rate": 4.566662914508579e-08, |
|
"loss": 8.2398, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9905243209096652, |
|
"grad_norm": 1.2770274877548218, |
|
"learning_rate": 1.1417960984605458e-08, |
|
"loss": 8.5457, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9955780164245104, |
|
"grad_norm": 1.4860167503356934, |
|
"learning_rate": 0.0, |
|
"loss": 8.5989, |
|
"step": 197 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 197, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 223101181231104.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|