|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994632313472893, |
|
"eval_steps": 500, |
|
"global_step": 931, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010735373054213634, |
|
"grad_norm": 22.789785654510972, |
|
"learning_rate": 1.0638297872340426e-07, |
|
"loss": 1.3534, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005367686527106817, |
|
"grad_norm": 21.426953387742742, |
|
"learning_rate": 5.319148936170213e-07, |
|
"loss": 1.315, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010735373054213635, |
|
"grad_norm": 8.681854043380088, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 1.2029, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01610305958132045, |
|
"grad_norm": 6.879600686982149, |
|
"learning_rate": 1.595744680851064e-06, |
|
"loss": 1.0391, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02147074610842727, |
|
"grad_norm": 3.0275647149692793, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 0.9127, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.026838432635534086, |
|
"grad_norm": 2.4868622752512706, |
|
"learning_rate": 2.6595744680851065e-06, |
|
"loss": 0.8614, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0322061191626409, |
|
"grad_norm": 2.260349279940821, |
|
"learning_rate": 3.191489361702128e-06, |
|
"loss": 0.8364, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03757380568974772, |
|
"grad_norm": 2.3883199937027952, |
|
"learning_rate": 3.723404255319149e-06, |
|
"loss": 0.8104, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04294149221685454, |
|
"grad_norm": 2.2131867254029656, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 0.799, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04830917874396135, |
|
"grad_norm": 2.192168023131458, |
|
"learning_rate": 4.787234042553192e-06, |
|
"loss": 0.7889, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05367686527106817, |
|
"grad_norm": 2.3212514172557133, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 0.7614, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.059044551798174985, |
|
"grad_norm": 2.3347499759614307, |
|
"learning_rate": 5.851063829787235e-06, |
|
"loss": 0.7482, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0644122383252818, |
|
"grad_norm": 2.285910235729559, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 0.7444, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06977992485238863, |
|
"grad_norm": 2.4172902264287255, |
|
"learning_rate": 6.914893617021278e-06, |
|
"loss": 0.7238, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07514761137949544, |
|
"grad_norm": 2.3034733454455596, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 0.7231, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08051529790660225, |
|
"grad_norm": 2.4091543127728756, |
|
"learning_rate": 7.97872340425532e-06, |
|
"loss": 0.7036, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08588298443370908, |
|
"grad_norm": 2.2455472355866837, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 0.7073, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09125067096081589, |
|
"grad_norm": 2.4464200044770936, |
|
"learning_rate": 9.042553191489362e-06, |
|
"loss": 0.7078, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0966183574879227, |
|
"grad_norm": 2.35752750364654, |
|
"learning_rate": 9.574468085106385e-06, |
|
"loss": 0.6963, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10198604401502952, |
|
"grad_norm": 2.1996004538409473, |
|
"learning_rate": 9.999964780082996e-06, |
|
"loss": 0.6979, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10735373054213634, |
|
"grad_norm": 2.203322438503073, |
|
"learning_rate": 9.998732135085665e-06, |
|
"loss": 0.6906, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11272141706924316, |
|
"grad_norm": 2.264863963599694, |
|
"learning_rate": 9.995738990383743e-06, |
|
"loss": 0.6866, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11808910359634997, |
|
"grad_norm": 2.2286418377556236, |
|
"learning_rate": 9.990986400130607e-06, |
|
"loss": 0.6782, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 2.1240149641244175, |
|
"learning_rate": 9.984476038137437e-06, |
|
"loss": 0.6697, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1288244766505636, |
|
"grad_norm": 2.0873242470478854, |
|
"learning_rate": 9.97621019728372e-06, |
|
"loss": 0.6688, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13419216317767044, |
|
"grad_norm": 2.4812498111624612, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 0.6631, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13955984970477725, |
|
"grad_norm": 2.343955721850536, |
|
"learning_rate": 9.954424340791195e-06, |
|
"loss": 0.6644, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14492753623188406, |
|
"grad_norm": 2.339967843619562, |
|
"learning_rate": 9.940911997896774e-06, |
|
"loss": 0.6642, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15029522275899088, |
|
"grad_norm": 2.2056817559033837, |
|
"learning_rate": 9.925659518928316e-06, |
|
"loss": 0.667, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1556629092860977, |
|
"grad_norm": 1.9903249362862974, |
|
"learning_rate": 9.908672275644898e-06, |
|
"loss": 0.6444, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1610305958132045, |
|
"grad_norm": 2.2039128348943744, |
|
"learning_rate": 9.889956250770933e-06, |
|
"loss": 0.6488, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16639828234031132, |
|
"grad_norm": 2.2078157899902706, |
|
"learning_rate": 9.86951803588912e-06, |
|
"loss": 0.6499, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17176596886741816, |
|
"grad_norm": 2.0109828014248774, |
|
"learning_rate": 9.847364829118963e-06, |
|
"loss": 0.6519, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17713365539452497, |
|
"grad_norm": 1.9664484718503512, |
|
"learning_rate": 9.82350443258166e-06, |
|
"loss": 0.6401, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18250134192163178, |
|
"grad_norm": 2.0667728485558308, |
|
"learning_rate": 9.797945249652295e-06, |
|
"loss": 0.6437, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1878690284487386, |
|
"grad_norm": 2.0065876578800346, |
|
"learning_rate": 9.770696282000245e-06, |
|
"loss": 0.6346, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1932367149758454, |
|
"grad_norm": 1.987617668179378, |
|
"learning_rate": 9.741767126418898e-06, |
|
"loss": 0.6356, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19860440150295222, |
|
"grad_norm": 2.04140704556592, |
|
"learning_rate": 9.711167971445766e-06, |
|
"loss": 0.6349, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.20397208803005903, |
|
"grad_norm": 2.047535907923479, |
|
"learning_rate": 9.67890959377418e-06, |
|
"loss": 0.6298, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20933977455716588, |
|
"grad_norm": 1.9558416597814228, |
|
"learning_rate": 9.645003354457872e-06, |
|
"loss": 0.6349, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2147074610842727, |
|
"grad_norm": 1.8984622795525588, |
|
"learning_rate": 9.60946119490972e-06, |
|
"loss": 0.6142, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2200751476113795, |
|
"grad_norm": 2.3906003365767496, |
|
"learning_rate": 9.57229563269612e-06, |
|
"loss": 0.6222, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22544283413848631, |
|
"grad_norm": 1.8921023328729312, |
|
"learning_rate": 9.533519757128426e-06, |
|
"loss": 0.6186, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23081052066559313, |
|
"grad_norm": 1.9626099660140686, |
|
"learning_rate": 9.49314722465304e-06, |
|
"loss": 0.6258, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23617820719269994, |
|
"grad_norm": 2.0411979584264266, |
|
"learning_rate": 9.451192254041759e-06, |
|
"loss": 0.5979, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24154589371980675, |
|
"grad_norm": 2.1889414515575183, |
|
"learning_rate": 9.407669621384073e-06, |
|
"loss": 0.6083, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 2.085807605093685, |
|
"learning_rate": 9.362594654883185e-06, |
|
"loss": 0.5919, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2522812667740204, |
|
"grad_norm": 2.0627204109631543, |
|
"learning_rate": 9.31598322945759e-06, |
|
"loss": 0.6034, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2576489533011272, |
|
"grad_norm": 1.9378180140246337, |
|
"learning_rate": 9.267851761150092e-06, |
|
"loss": 0.6038, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26301663982823403, |
|
"grad_norm": 3.1058701754069924, |
|
"learning_rate": 9.218217201346251e-06, |
|
"loss": 0.595, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2683843263553409, |
|
"grad_norm": 1.9991169619012592, |
|
"learning_rate": 9.167097030804289e-06, |
|
"loss": 0.5874, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27375201288244766, |
|
"grad_norm": 2.061863952952721, |
|
"learning_rate": 9.114509253498554e-06, |
|
"loss": 0.5969, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2791196994095545, |
|
"grad_norm": 2.0907726187864926, |
|
"learning_rate": 9.060472390278717e-06, |
|
"loss": 0.5814, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2844873859366613, |
|
"grad_norm": 2.7768778562710397, |
|
"learning_rate": 9.005005472346923e-06, |
|
"loss": 0.593, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 2.0516068085641574, |
|
"learning_rate": 8.948128034555212e-06, |
|
"loss": 0.5849, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2952227589908749, |
|
"grad_norm": 1.8867496167590758, |
|
"learning_rate": 8.889860108525544e-06, |
|
"loss": 0.5863, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.30059044551798175, |
|
"grad_norm": 1.942421908800686, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.5672, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3059581320450886, |
|
"grad_norm": 2.0272368493119393, |
|
"learning_rate": 8.76923535958783e-06, |
|
"loss": 0.5623, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3113258185721954, |
|
"grad_norm": 2.1343642849157787, |
|
"learning_rate": 8.706921019419237e-06, |
|
"loss": 0.5732, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3166935050993022, |
|
"grad_norm": 1.9323875772249934, |
|
"learning_rate": 8.643301141529619e-06, |
|
"loss": 0.5656, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.322061191626409, |
|
"grad_norm": 2.037757830855184, |
|
"learning_rate": 8.578398132155846e-06, |
|
"loss": 0.5611, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32742887815351585, |
|
"grad_norm": 1.9398690166595873, |
|
"learning_rate": 8.512234849439887e-06, |
|
"loss": 0.5645, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.33279656468062263, |
|
"grad_norm": 2.190033028948571, |
|
"learning_rate": 8.444834595378434e-06, |
|
"loss": 0.5622, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.33816425120772947, |
|
"grad_norm": 2.221598674029084, |
|
"learning_rate": 8.376221107616187e-06, |
|
"loss": 0.546, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3435319377348363, |
|
"grad_norm": 2.021210342324902, |
|
"learning_rate": 8.306418551085707e-06, |
|
"loss": 0.5524, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3488996242619431, |
|
"grad_norm": 1.87069475903427, |
|
"learning_rate": 8.23545150949679e-06, |
|
"loss": 0.5447, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35426731078904994, |
|
"grad_norm": 1.880521268570416, |
|
"learning_rate": 8.163344976678342e-06, |
|
"loss": 0.5392, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3596349973161567, |
|
"grad_norm": 2.14764020394395, |
|
"learning_rate": 8.090124347775837e-06, |
|
"loss": 0.547, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.36500268384326356, |
|
"grad_norm": 1.9583788892413105, |
|
"learning_rate": 8.0158154103074e-06, |
|
"loss": 0.5487, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 2.053310730502184, |
|
"learning_rate": 7.940444335081733e-06, |
|
"loss": 0.5408, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3757380568974772, |
|
"grad_norm": 1.9003460290083882, |
|
"learning_rate": 7.864037666981037e-06, |
|
"loss": 0.543, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.38110574342458403, |
|
"grad_norm": 1.9850745541543715, |
|
"learning_rate": 7.786622315612182e-06, |
|
"loss": 0.5405, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3864734299516908, |
|
"grad_norm": 2.071517945939152, |
|
"learning_rate": 7.708225545829446e-06, |
|
"loss": 0.5331, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.39184111647879766, |
|
"grad_norm": 1.9667341052567295, |
|
"learning_rate": 7.6288749681321115e-06, |
|
"loss": 0.5276, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.39720880300590444, |
|
"grad_norm": 1.9124777050070318, |
|
"learning_rate": 7.548598528940354e-06, |
|
"loss": 0.5205, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4025764895330113, |
|
"grad_norm": 1.9540497432924613, |
|
"learning_rate": 7.4674245007528135e-06, |
|
"loss": 0.5293, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.40794417606011807, |
|
"grad_norm": 2.0287532132914716, |
|
"learning_rate": 7.385381472189321e-06, |
|
"loss": 0.5183, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4133118625872249, |
|
"grad_norm": 2.021246551625832, |
|
"learning_rate": 7.302498337922293e-06, |
|
"loss": 0.5214, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41867954911433175, |
|
"grad_norm": 2.075086153282224, |
|
"learning_rate": 7.218804288500343e-06, |
|
"loss": 0.5051, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.42404723564143854, |
|
"grad_norm": 1.9853201300706842, |
|
"learning_rate": 7.134328800067684e-06, |
|
"loss": 0.5143, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4294149221685454, |
|
"grad_norm": 1.9075844135415194, |
|
"learning_rate": 7.049101623982938e-06, |
|
"loss": 0.5076, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 1.9963194922766025, |
|
"learning_rate": 6.963152776341044e-06, |
|
"loss": 0.5087, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.440150295222759, |
|
"grad_norm": 2.0510112086805083, |
|
"learning_rate": 6.876512527401897e-06, |
|
"loss": 0.5092, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4455179817498658, |
|
"grad_norm": 1.8923092211852284, |
|
"learning_rate": 6.789211390929497e-06, |
|
"loss": 0.4966, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.45088566827697263, |
|
"grad_norm": 2.015790172637043, |
|
"learning_rate": 6.701280113445324e-06, |
|
"loss": 0.4967, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4562533548040794, |
|
"grad_norm": 1.9134734917899814, |
|
"learning_rate": 6.6127496633997475e-06, |
|
"loss": 0.5039, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.46162104133118625, |
|
"grad_norm": 1.9388380453029543, |
|
"learning_rate": 6.523651220265269e-06, |
|
"loss": 0.4762, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4669887278582931, |
|
"grad_norm": 1.838971696143469, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.4894, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4723564143853999, |
|
"grad_norm": 1.9058911845628344, |
|
"learning_rate": 6.343876061773385e-06, |
|
"loss": 0.4858, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4777241009125067, |
|
"grad_norm": 2.027908561619023, |
|
"learning_rate": 6.2532626612936035e-06, |
|
"loss": 0.4829, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4830917874396135, |
|
"grad_norm": 1.9646108142819012, |
|
"learning_rate": 6.162207875181354e-06, |
|
"loss": 0.4838, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48845947396672035, |
|
"grad_norm": 1.891035535041024, |
|
"learning_rate": 6.070743771953157e-06, |
|
"loss": 0.4815, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 1.9516352660221894, |
|
"learning_rate": 5.978902564282616e-06, |
|
"loss": 0.4811, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.499194847020934, |
|
"grad_norm": 2.07837231336269, |
|
"learning_rate": 5.886716597655472e-06, |
|
"loss": 0.4676, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5045625335480408, |
|
"grad_norm": 2.0338000671858443, |
|
"learning_rate": 5.7942183389778536e-06, |
|
"loss": 0.4751, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5099302200751477, |
|
"grad_norm": 1.9544187569972904, |
|
"learning_rate": 5.701440365141799e-06, |
|
"loss": 0.4747, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5152979066022544, |
|
"grad_norm": 2.0013443075194517, |
|
"learning_rate": 5.608415351552014e-06, |
|
"loss": 0.4737, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5206655931293612, |
|
"grad_norm": 2.392764011448066, |
|
"learning_rate": 5.515176060617945e-06, |
|
"loss": 0.4661, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5260332796564681, |
|
"grad_norm": 2.191779753390301, |
|
"learning_rate": 5.421755330215223e-06, |
|
"loss": 0.4633, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5314009661835749, |
|
"grad_norm": 1.9807734795602283, |
|
"learning_rate": 5.328186062120509e-06, |
|
"loss": 0.4675, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5367686527106817, |
|
"grad_norm": 1.8845793314459505, |
|
"learning_rate": 5.23450121042383e-06, |
|
"loss": 0.4485, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5421363392377885, |
|
"grad_norm": 1.9504286157116426, |
|
"learning_rate": 5.140733769922525e-06, |
|
"loss": 0.4492, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5475040257648953, |
|
"grad_norm": 1.9251970055484684, |
|
"learning_rate": 5.0469167645008245e-06, |
|
"loss": 0.4537, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5528717122920022, |
|
"grad_norm": 2.0900201692813503, |
|
"learning_rate": 4.953083235499177e-06, |
|
"loss": 0.4541, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.558239398819109, |
|
"grad_norm": 1.914274695560957, |
|
"learning_rate": 4.859266230077474e-06, |
|
"loss": 0.4416, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5636070853462157, |
|
"grad_norm": 2.0363927763394685, |
|
"learning_rate": 4.7654987895761705e-06, |
|
"loss": 0.4578, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5689747718733226, |
|
"grad_norm": 1.906876750353807, |
|
"learning_rate": 4.671813937879494e-06, |
|
"loss": 0.4445, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5743424584004294, |
|
"grad_norm": 1.9970794776707403, |
|
"learning_rate": 4.5782446697847775e-06, |
|
"loss": 0.4432, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 1.9626727656883254, |
|
"learning_rate": 4.484823939382056e-06, |
|
"loss": 0.436, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5850778314546431, |
|
"grad_norm": 1.936046454009477, |
|
"learning_rate": 4.391584648447989e-06, |
|
"loss": 0.4433, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5904455179817498, |
|
"grad_norm": 1.919712775449399, |
|
"learning_rate": 4.298559634858202e-06, |
|
"loss": 0.4397, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5958132045088567, |
|
"grad_norm": 1.882081042043698, |
|
"learning_rate": 4.205781661022146e-06, |
|
"loss": 0.4274, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6011808910359635, |
|
"grad_norm": 1.9706554854402294, |
|
"learning_rate": 4.1132834023445304e-06, |
|
"loss": 0.4358, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6065485775630703, |
|
"grad_norm": 1.8892003217320965, |
|
"learning_rate": 4.021097435717386e-06, |
|
"loss": 0.4203, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6119162640901772, |
|
"grad_norm": 1.9387503523379348, |
|
"learning_rate": 3.929256228046845e-06, |
|
"loss": 0.4188, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6172839506172839, |
|
"grad_norm": 1.9263320255464111, |
|
"learning_rate": 3.837792124818647e-06, |
|
"loss": 0.4197, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6226516371443908, |
|
"grad_norm": 2.0679571176329636, |
|
"learning_rate": 3.7467373387063973e-06, |
|
"loss": 0.4104, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6280193236714976, |
|
"grad_norm": 1.88133441550826, |
|
"learning_rate": 3.656123938226618e-06, |
|
"loss": 0.4094, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6333870101986044, |
|
"grad_norm": 1.8992502262176936, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.4126, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6387546967257112, |
|
"grad_norm": 1.8928527924901577, |
|
"learning_rate": 3.476348779734732e-06, |
|
"loss": 0.4245, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.644122383252818, |
|
"grad_norm": 1.8811095797880513, |
|
"learning_rate": 3.387250336600254e-06, |
|
"loss": 0.4157, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6494900697799249, |
|
"grad_norm": 2.1867786519525736, |
|
"learning_rate": 3.298719886554677e-06, |
|
"loss": 0.4029, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6548577563070317, |
|
"grad_norm": 1.9359388771852108, |
|
"learning_rate": 3.2107886090705035e-06, |
|
"loss": 0.4073, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6602254428341385, |
|
"grad_norm": 2.0767122537438834, |
|
"learning_rate": 3.1234874725981045e-06, |
|
"loss": 0.4105, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6655931293612453, |
|
"grad_norm": 1.9568967104623627, |
|
"learning_rate": 3.036847223658958e-06, |
|
"loss": 0.4166, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6709608158883521, |
|
"grad_norm": 1.8330033161355852, |
|
"learning_rate": 2.950898376017064e-06, |
|
"loss": 0.4007, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6763285024154589, |
|
"grad_norm": 1.9534116085299529, |
|
"learning_rate": 2.865671199932318e-06, |
|
"loss": 0.4094, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6816961889425658, |
|
"grad_norm": 1.824576200330308, |
|
"learning_rate": 2.781195711499658e-06, |
|
"loss": 0.3975, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6870638754696726, |
|
"grad_norm": 1.9397944016656081, |
|
"learning_rate": 2.697501662077707e-06, |
|
"loss": 0.3938, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6924315619967794, |
|
"grad_norm": 1.9405147376762144, |
|
"learning_rate": 2.6146185278106807e-06, |
|
"loss": 0.3903, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6977992485238862, |
|
"grad_norm": 1.8735166891783446, |
|
"learning_rate": 2.5325754992471886e-06, |
|
"loss": 0.3844, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.703166935050993, |
|
"grad_norm": 1.8550788085650558, |
|
"learning_rate": 2.4514014710596467e-06, |
|
"loss": 0.3895, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7085346215780999, |
|
"grad_norm": 1.9768445053781223, |
|
"learning_rate": 2.3711250318678906e-06, |
|
"loss": 0.3976, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7139023081052066, |
|
"grad_norm": 1.8242433586161713, |
|
"learning_rate": 2.2917744541705544e-06, |
|
"loss": 0.3896, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7192699946323134, |
|
"grad_norm": 1.8766511465002484, |
|
"learning_rate": 2.2133776843878185e-06, |
|
"loss": 0.3881, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7246376811594203, |
|
"grad_norm": 1.8567022542334322, |
|
"learning_rate": 2.1359623330189655e-06, |
|
"loss": 0.3826, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7300053676865271, |
|
"grad_norm": 1.8349926895031767, |
|
"learning_rate": 2.059555664918268e-06, |
|
"loss": 0.3888, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.735373054213634, |
|
"grad_norm": 1.7933412905107806, |
|
"learning_rate": 1.9841845896926022e-06, |
|
"loss": 0.3861, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 1.8516583877916601, |
|
"learning_rate": 1.9098756522241634e-06, |
|
"loss": 0.3793, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7461084272678475, |
|
"grad_norm": 1.8073808501807826, |
|
"learning_rate": 1.8366550233216584e-06, |
|
"loss": 0.3825, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7514761137949544, |
|
"grad_norm": 2.0053953972861183, |
|
"learning_rate": 1.7645484905032129e-06, |
|
"loss": 0.3795, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7568438003220612, |
|
"grad_norm": 1.7686721996317474, |
|
"learning_rate": 1.6935814489142937e-06, |
|
"loss": 0.3726, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7622114868491681, |
|
"grad_norm": 1.7884368236538568, |
|
"learning_rate": 1.6237788923838149e-06, |
|
"loss": 0.3681, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7675791733762748, |
|
"grad_norm": 1.8578617970297808, |
|
"learning_rate": 1.555165404621567e-06, |
|
"loss": 0.37, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7729468599033816, |
|
"grad_norm": 1.8740014923246073, |
|
"learning_rate": 1.487765150560116e-06, |
|
"loss": 0.3736, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7783145464304885, |
|
"grad_norm": 1.9238343180583926, |
|
"learning_rate": 1.4216018678441558e-06, |
|
"loss": 0.376, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7836822329575953, |
|
"grad_norm": 1.907761388210502, |
|
"learning_rate": 1.3566988584703817e-06, |
|
"loss": 0.3749, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.789049919484702, |
|
"grad_norm": 1.8440526734882297, |
|
"learning_rate": 1.293078980580766e-06, |
|
"loss": 0.3656, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7944176060118089, |
|
"grad_norm": 1.8162595531294223, |
|
"learning_rate": 1.2307646404121692e-06, |
|
"loss": 0.3635, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7997852925389157, |
|
"grad_norm": 1.8090188411422603, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.3706, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.8051529790660226, |
|
"grad_norm": 1.8548359213105685, |
|
"learning_rate": 1.1101398914744565e-06, |
|
"loss": 0.3541, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8105206655931294, |
|
"grad_norm": 1.7486366206671473, |
|
"learning_rate": 1.0518719654447896e-06, |
|
"loss": 0.3566, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8158883521202361, |
|
"grad_norm": 1.7220786134987836, |
|
"learning_rate": 9.949945276530782e-07, |
|
"loss": 0.3537, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.821256038647343, |
|
"grad_norm": 1.8563952966735642, |
|
"learning_rate": 9.395276097212841e-07, |
|
"loss": 0.377, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8266237251744498, |
|
"grad_norm": 1.8261975975646805, |
|
"learning_rate": 8.854907465014479e-07, |
|
"loss": 0.3564, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8319914117015567, |
|
"grad_norm": 1.6700870770757226, |
|
"learning_rate": 8.329029691957124e-07, |
|
"loss": 0.3533, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8373590982286635, |
|
"grad_norm": 1.8714115240328053, |
|
"learning_rate": 7.817827986537508e-07, |
|
"loss": 0.3598, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8427267847557702, |
|
"grad_norm": 1.7265092801300406, |
|
"learning_rate": 7.321482388499096e-07, |
|
"loss": 0.3574, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8480944712828771, |
|
"grad_norm": 1.7192645034643335, |
|
"learning_rate": 6.840167705424106e-07, |
|
"loss": 0.3626, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8534621578099839, |
|
"grad_norm": 1.8788914839483877, |
|
"learning_rate": 6.374053451168166e-07, |
|
"loss": 0.3569, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8588298443370908, |
|
"grad_norm": 1.7223384963954047, |
|
"learning_rate": 5.92330378615929e-07, |
|
"loss": 0.3567, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8641975308641975, |
|
"grad_norm": 1.84633207462375, |
|
"learning_rate": 5.488077459582425e-07, |
|
"loss": 0.3568, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 1.8156905089011117, |
|
"learning_rate": 5.068527753469604e-07, |
|
"loss": 0.3553, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8749329039184112, |
|
"grad_norm": 1.76653198854091, |
|
"learning_rate": 4.664802428715753e-07, |
|
"loss": 0.353, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.880300590445518, |
|
"grad_norm": 1.746120045538455, |
|
"learning_rate": 4.2770436730388166e-07, |
|
"loss": 0.3548, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8856682769726248, |
|
"grad_norm": 1.7810142936647677, |
|
"learning_rate": 3.9053880509028086e-07, |
|
"loss": 0.3546, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8910359634997316, |
|
"grad_norm": 1.7315549203684755, |
|
"learning_rate": 3.549966455421305e-07, |
|
"loss": 0.3555, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8964036500268384, |
|
"grad_norm": 1.9272563135988092, |
|
"learning_rate": 3.2109040622582186e-07, |
|
"loss": 0.3521, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.9017713365539453, |
|
"grad_norm": 1.7798988754363725, |
|
"learning_rate": 2.8883202855423676e-07, |
|
"loss": 0.3506, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9071390230810521, |
|
"grad_norm": 1.7625732551828204, |
|
"learning_rate": 2.582328735811029e-07, |
|
"loss": 0.3534, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.9125067096081588, |
|
"grad_norm": 1.827775623748613, |
|
"learning_rate": 2.2930371799975593e-07, |
|
"loss": 0.3469, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9178743961352657, |
|
"grad_norm": 1.7729865240896066, |
|
"learning_rate": 2.0205475034770606e-07, |
|
"loss": 0.3478, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.9232420826623725, |
|
"grad_norm": 1.7459386763584364, |
|
"learning_rate": 1.7649556741833995e-07, |
|
"loss": 0.3506, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9286097691894794, |
|
"grad_norm": 1.7638105650910396, |
|
"learning_rate": 1.5263517088103862e-07, |
|
"loss": 0.3501, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9339774557165862, |
|
"grad_norm": 1.6634385736641513, |
|
"learning_rate": 1.304819641108801e-07, |
|
"loss": 0.3443, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9393451422436929, |
|
"grad_norm": 1.7138313811653363, |
|
"learning_rate": 1.1004374922906846e-07, |
|
"loss": 0.3519, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9447128287707998, |
|
"grad_norm": 1.7315956523188014, |
|
"learning_rate": 9.132772435510362e-08, |
|
"loss": 0.3518, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9500805152979066, |
|
"grad_norm": 1.83104419897327, |
|
"learning_rate": 7.434048107168523e-08, |
|
"loss": 0.3534, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9554482018250134, |
|
"grad_norm": 1.8614069199591219, |
|
"learning_rate": 5.908800210322696e-08, |
|
"loss": 0.3529, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9608158883521203, |
|
"grad_norm": 1.791303700726749, |
|
"learning_rate": 4.55756592088058e-08, |
|
"loss": 0.3458, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.966183574879227, |
|
"grad_norm": 1.766280788851407, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.3506, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9715512614063339, |
|
"grad_norm": 1.7183411966725215, |
|
"learning_rate": 2.378980271628195e-08, |
|
"loss": 0.3459, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9769189479334407, |
|
"grad_norm": 1.6247222017291731, |
|
"learning_rate": 1.552396186256411e-08, |
|
"loss": 0.3469, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9822866344605475, |
|
"grad_norm": 1.737776722109284, |
|
"learning_rate": 9.013599869394096e-09, |
|
"loss": 0.3432, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9876543209876543, |
|
"grad_norm": 1.6970200431542546, |
|
"learning_rate": 4.261009616257638e-09, |
|
"loss": 0.3502, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9930220075147611, |
|
"grad_norm": 1.700070440972857, |
|
"learning_rate": 1.2678649143349485e-09, |
|
"loss": 0.3439, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.998389694041868, |
|
"grad_norm": 1.7621651797235254, |
|
"learning_rate": 3.5219917003948003e-11, |
|
"loss": 0.3562, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9994632313472893, |
|
"eval_loss": 0.2994081974029541, |
|
"eval_runtime": 1.1873, |
|
"eval_samples_per_second": 1.684, |
|
"eval_steps_per_second": 0.842, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.9994632313472893, |
|
"step": 931, |
|
"total_flos": 194827769610240.0, |
|
"train_loss": 0.5084374696496478, |
|
"train_runtime": 18282.9087, |
|
"train_samples_per_second": 1.63, |
|
"train_steps_per_second": 0.051 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 931, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 194827769610240.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|