|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994632313472893, |
|
"eval_steps": 500, |
|
"global_step": 931, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010735373054213634, |
|
"grad_norm": 22.07959914956398, |
|
"learning_rate": 1.0638297872340426e-07, |
|
"loss": 1.3304, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005367686527106817, |
|
"grad_norm": 20.362741832883, |
|
"learning_rate": 5.319148936170213e-07, |
|
"loss": 1.3199, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010735373054213635, |
|
"grad_norm": 9.022624585805783, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 1.1933, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01610305958132045, |
|
"grad_norm": 7.978147779985442, |
|
"learning_rate": 1.595744680851064e-06, |
|
"loss": 1.0373, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02147074610842727, |
|
"grad_norm": 2.9506039855166453, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 0.9114, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.026838432635534086, |
|
"grad_norm": 2.3851062480608025, |
|
"learning_rate": 2.6595744680851065e-06, |
|
"loss": 0.8679, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0322061191626409, |
|
"grad_norm": 2.2663674501530258, |
|
"learning_rate": 3.191489361702128e-06, |
|
"loss": 0.8324, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03757380568974772, |
|
"grad_norm": 2.12295130188594, |
|
"learning_rate": 3.723404255319149e-06, |
|
"loss": 0.8086, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04294149221685454, |
|
"grad_norm": 2.156011284979002, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 0.8121, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04830917874396135, |
|
"grad_norm": 2.4610831728611906, |
|
"learning_rate": 4.787234042553192e-06, |
|
"loss": 0.7828, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05367686527106817, |
|
"grad_norm": 2.3689072302884027, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 0.7646, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.059044551798174985, |
|
"grad_norm": 2.429397980263518, |
|
"learning_rate": 5.851063829787235e-06, |
|
"loss": 0.7568, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0644122383252818, |
|
"grad_norm": 2.37804165752358, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 0.7398, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06977992485238863, |
|
"grad_norm": 2.524295711780105, |
|
"learning_rate": 6.914893617021278e-06, |
|
"loss": 0.7251, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07514761137949544, |
|
"grad_norm": 2.4359530361530624, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 0.7183, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08051529790660225, |
|
"grad_norm": 2.3732088149554795, |
|
"learning_rate": 7.97872340425532e-06, |
|
"loss": 0.7165, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08588298443370908, |
|
"grad_norm": 2.319404881166218, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 0.7049, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09125067096081589, |
|
"grad_norm": 2.467420301262588, |
|
"learning_rate": 9.042553191489362e-06, |
|
"loss": 0.6982, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0966183574879227, |
|
"grad_norm": 2.327678008320516, |
|
"learning_rate": 9.574468085106385e-06, |
|
"loss": 0.7016, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10198604401502952, |
|
"grad_norm": 2.336742003872521, |
|
"learning_rate": 9.999964780082996e-06, |
|
"loss": 0.6895, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10735373054213634, |
|
"grad_norm": 2.261847003362599, |
|
"learning_rate": 9.998732135085665e-06, |
|
"loss": 0.7028, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11272141706924316, |
|
"grad_norm": 2.0345743029930397, |
|
"learning_rate": 9.995738990383743e-06, |
|
"loss": 0.6877, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11808910359634997, |
|
"grad_norm": 2.424929767270555, |
|
"learning_rate": 9.990986400130607e-06, |
|
"loss": 0.676, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12345679012345678, |
|
"grad_norm": 2.1689597645668814, |
|
"learning_rate": 9.984476038137437e-06, |
|
"loss": 0.6863, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1288244766505636, |
|
"grad_norm": 2.0449283955231192, |
|
"learning_rate": 9.97621019728372e-06, |
|
"loss": 0.6746, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.13419216317767044, |
|
"grad_norm": 2.0697918308040144, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 0.6741, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.13955984970477725, |
|
"grad_norm": 2.0070574066142184, |
|
"learning_rate": 9.954424340791195e-06, |
|
"loss": 0.6586, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14492753623188406, |
|
"grad_norm": 2.0705850233290386, |
|
"learning_rate": 9.940911997896774e-06, |
|
"loss": 0.6696, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15029522275899088, |
|
"grad_norm": 2.0521551046671624, |
|
"learning_rate": 9.925659518928316e-06, |
|
"loss": 0.6581, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1556629092860977, |
|
"grad_norm": 2.085647742610215, |
|
"learning_rate": 9.908672275644898e-06, |
|
"loss": 0.6538, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1610305958132045, |
|
"grad_norm": 2.0748876889210024, |
|
"learning_rate": 9.889956250770933e-06, |
|
"loss": 0.6582, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16639828234031132, |
|
"grad_norm": 1.9231602777551962, |
|
"learning_rate": 9.86951803588912e-06, |
|
"loss": 0.6547, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17176596886741816, |
|
"grad_norm": 2.1450314001651023, |
|
"learning_rate": 9.847364829118963e-06, |
|
"loss": 0.6437, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17713365539452497, |
|
"grad_norm": 2.100281454841468, |
|
"learning_rate": 9.82350443258166e-06, |
|
"loss": 0.6369, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.18250134192163178, |
|
"grad_norm": 1.911217221900268, |
|
"learning_rate": 9.797945249652295e-06, |
|
"loss": 0.6348, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1878690284487386, |
|
"grad_norm": 2.09075575294963, |
|
"learning_rate": 9.770696282000245e-06, |
|
"loss": 0.6488, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.1932367149758454, |
|
"grad_norm": 2.0986149327599795, |
|
"learning_rate": 9.741767126418898e-06, |
|
"loss": 0.6301, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19860440150295222, |
|
"grad_norm": 1.9725424031547765, |
|
"learning_rate": 9.711167971445766e-06, |
|
"loss": 0.6215, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.20397208803005903, |
|
"grad_norm": 1.9386685500016003, |
|
"learning_rate": 9.67890959377418e-06, |
|
"loss": 0.6316, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20933977455716588, |
|
"grad_norm": 2.00224716719208, |
|
"learning_rate": 9.645003354457872e-06, |
|
"loss": 0.624, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2147074610842727, |
|
"grad_norm": 1.9315585498716268, |
|
"learning_rate": 9.60946119490972e-06, |
|
"loss": 0.6194, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2200751476113795, |
|
"grad_norm": 2.140910573853744, |
|
"learning_rate": 9.57229563269612e-06, |
|
"loss": 0.6174, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22544283413848631, |
|
"grad_norm": 2.1739763171596884, |
|
"learning_rate": 9.533519757128426e-06, |
|
"loss": 0.6231, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23081052066559313, |
|
"grad_norm": 1.9791887683055185, |
|
"learning_rate": 9.49314722465304e-06, |
|
"loss": 0.5963, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23617820719269994, |
|
"grad_norm": 2.0935279890007785, |
|
"learning_rate": 9.451192254041759e-06, |
|
"loss": 0.6165, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24154589371980675, |
|
"grad_norm": 1.9389350149885696, |
|
"learning_rate": 9.407669621384073e-06, |
|
"loss": 0.6077, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24691358024691357, |
|
"grad_norm": 1.9673443855756516, |
|
"learning_rate": 9.362594654883185e-06, |
|
"loss": 0.5986, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2522812667740204, |
|
"grad_norm": 1.9267038189193195, |
|
"learning_rate": 9.31598322945759e-06, |
|
"loss": 0.6209, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2576489533011272, |
|
"grad_norm": 1.9705398985631108, |
|
"learning_rate": 9.267851761150092e-06, |
|
"loss": 0.5949, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26301663982823403, |
|
"grad_norm": 2.763348732615502, |
|
"learning_rate": 9.218217201346251e-06, |
|
"loss": 0.6003, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2683843263553409, |
|
"grad_norm": 1.9828807479273434, |
|
"learning_rate": 9.167097030804289e-06, |
|
"loss": 0.5967, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.27375201288244766, |
|
"grad_norm": 2.067267771380607, |
|
"learning_rate": 9.114509253498554e-06, |
|
"loss": 0.5906, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2791196994095545, |
|
"grad_norm": 2.1736682143187553, |
|
"learning_rate": 9.060472390278717e-06, |
|
"loss": 0.5786, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2844873859366613, |
|
"grad_norm": 2.3050896737016333, |
|
"learning_rate": 9.005005472346923e-06, |
|
"loss": 0.5752, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2898550724637681, |
|
"grad_norm": 2.1110962605389147, |
|
"learning_rate": 8.948128034555212e-06, |
|
"loss": 0.5941, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2952227589908749, |
|
"grad_norm": 2.276509382528488, |
|
"learning_rate": 8.889860108525544e-06, |
|
"loss": 0.5666, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.30059044551798175, |
|
"grad_norm": 2.0249679866823644, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.5834, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.3059581320450886, |
|
"grad_norm": 2.0546291551649976, |
|
"learning_rate": 8.76923535958783e-06, |
|
"loss": 0.5675, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3113258185721954, |
|
"grad_norm": 2.2236300247259697, |
|
"learning_rate": 8.706921019419237e-06, |
|
"loss": 0.563, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3166935050993022, |
|
"grad_norm": 1.8037385365479262, |
|
"learning_rate": 8.643301141529619e-06, |
|
"loss": 0.5748, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.322061191626409, |
|
"grad_norm": 1.985529414306051, |
|
"learning_rate": 8.578398132155846e-06, |
|
"loss": 0.5578, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.32742887815351585, |
|
"grad_norm": 2.0083187215038674, |
|
"learning_rate": 8.512234849439887e-06, |
|
"loss": 0.5722, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.33279656468062263, |
|
"grad_norm": 1.968155782313349, |
|
"learning_rate": 8.444834595378434e-06, |
|
"loss": 0.5674, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.33816425120772947, |
|
"grad_norm": 2.0307121236920613, |
|
"learning_rate": 8.376221107616187e-06, |
|
"loss": 0.5596, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3435319377348363, |
|
"grad_norm": 2.150965816694603, |
|
"learning_rate": 8.306418551085707e-06, |
|
"loss": 0.5505, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.3488996242619431, |
|
"grad_norm": 1.9333182701879525, |
|
"learning_rate": 8.23545150949679e-06, |
|
"loss": 0.5407, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35426731078904994, |
|
"grad_norm": 2.0408489065141673, |
|
"learning_rate": 8.163344976678342e-06, |
|
"loss": 0.5341, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3596349973161567, |
|
"grad_norm": 2.039571243808379, |
|
"learning_rate": 8.090124347775837e-06, |
|
"loss": 0.5386, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.36500268384326356, |
|
"grad_norm": 1.907167090419826, |
|
"learning_rate": 8.0158154103074e-06, |
|
"loss": 0.5481, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 1.960372580403973, |
|
"learning_rate": 7.940444335081733e-06, |
|
"loss": 0.5399, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3757380568974772, |
|
"grad_norm": 2.055565141792339, |
|
"learning_rate": 7.864037666981037e-06, |
|
"loss": 0.5329, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.38110574342458403, |
|
"grad_norm": 1.9530633167577272, |
|
"learning_rate": 7.786622315612182e-06, |
|
"loss": 0.5281, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3864734299516908, |
|
"grad_norm": 1.9983415429313947, |
|
"learning_rate": 7.708225545829446e-06, |
|
"loss": 0.5332, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.39184111647879766, |
|
"grad_norm": 2.074941439605804, |
|
"learning_rate": 7.6288749681321115e-06, |
|
"loss": 0.5182, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.39720880300590444, |
|
"grad_norm": 1.9934314007469667, |
|
"learning_rate": 7.548598528940354e-06, |
|
"loss": 0.526, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.4025764895330113, |
|
"grad_norm": 2.033407744493333, |
|
"learning_rate": 7.4674245007528135e-06, |
|
"loss": 0.5263, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.40794417606011807, |
|
"grad_norm": 1.9620705282952844, |
|
"learning_rate": 7.385381472189321e-06, |
|
"loss": 0.518, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4133118625872249, |
|
"grad_norm": 1.9020125127771519, |
|
"learning_rate": 7.302498337922293e-06, |
|
"loss": 0.5154, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41867954911433175, |
|
"grad_norm": 2.0750218270909966, |
|
"learning_rate": 7.218804288500343e-06, |
|
"loss": 0.5147, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.42404723564143854, |
|
"grad_norm": 2.040651657120084, |
|
"learning_rate": 7.134328800067684e-06, |
|
"loss": 0.5099, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4294149221685454, |
|
"grad_norm": 1.9898945970825554, |
|
"learning_rate": 7.049101623982938e-06, |
|
"loss": 0.4993, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 1.9951295882452424, |
|
"learning_rate": 6.963152776341044e-06, |
|
"loss": 0.5104, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.440150295222759, |
|
"grad_norm": 1.8963421029024186, |
|
"learning_rate": 6.876512527401897e-06, |
|
"loss": 0.4934, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4455179817498658, |
|
"grad_norm": 2.0348217650465785, |
|
"learning_rate": 6.789211390929497e-06, |
|
"loss": 0.5073, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.45088566827697263, |
|
"grad_norm": 2.1752799116340986, |
|
"learning_rate": 6.701280113445324e-06, |
|
"loss": 0.4968, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4562533548040794, |
|
"grad_norm": 1.9168080270884666, |
|
"learning_rate": 6.6127496633997475e-06, |
|
"loss": 0.4881, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.46162104133118625, |
|
"grad_norm": 1.8791343828515361, |
|
"learning_rate": 6.523651220265269e-06, |
|
"loss": 0.4917, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4669887278582931, |
|
"grad_norm": 1.9398738713179469, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.4842, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4723564143853999, |
|
"grad_norm": 1.8681466257367256, |
|
"learning_rate": 6.343876061773385e-06, |
|
"loss": 0.4888, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4777241009125067, |
|
"grad_norm": 1.8826036572462372, |
|
"learning_rate": 6.2532626612936035e-06, |
|
"loss": 0.4922, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.4830917874396135, |
|
"grad_norm": 1.9400538781987646, |
|
"learning_rate": 6.162207875181354e-06, |
|
"loss": 0.4783, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.48845947396672035, |
|
"grad_norm": 1.9864272178376912, |
|
"learning_rate": 6.070743771953157e-06, |
|
"loss": 0.4754, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.49382716049382713, |
|
"grad_norm": 1.943057752510615, |
|
"learning_rate": 5.978902564282616e-06, |
|
"loss": 0.4811, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.499194847020934, |
|
"grad_norm": 2.0041710832979547, |
|
"learning_rate": 5.886716597655472e-06, |
|
"loss": 0.4717, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5045625335480408, |
|
"grad_norm": 1.9473535928377121, |
|
"learning_rate": 5.7942183389778536e-06, |
|
"loss": 0.4766, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5099302200751477, |
|
"grad_norm": 2.061547109286419, |
|
"learning_rate": 5.701440365141799e-06, |
|
"loss": 0.4667, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5152979066022544, |
|
"grad_norm": 1.9952943514062236, |
|
"learning_rate": 5.608415351552014e-06, |
|
"loss": 0.461, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5206655931293612, |
|
"grad_norm": 2.159739403315762, |
|
"learning_rate": 5.515176060617945e-06, |
|
"loss": 0.4817, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5260332796564681, |
|
"grad_norm": 2.098839883318929, |
|
"learning_rate": 5.421755330215223e-06, |
|
"loss": 0.4626, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5314009661835749, |
|
"grad_norm": 1.893289381010737, |
|
"learning_rate": 5.328186062120509e-06, |
|
"loss": 0.4493, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5367686527106817, |
|
"grad_norm": 2.034865472461479, |
|
"learning_rate": 5.23450121042383e-06, |
|
"loss": 0.461, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5421363392377885, |
|
"grad_norm": 1.913832222398215, |
|
"learning_rate": 5.140733769922525e-06, |
|
"loss": 0.4555, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5475040257648953, |
|
"grad_norm": 1.9315440299324362, |
|
"learning_rate": 5.0469167645008245e-06, |
|
"loss": 0.4594, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5528717122920022, |
|
"grad_norm": 2.0634590710960263, |
|
"learning_rate": 4.953083235499177e-06, |
|
"loss": 0.4485, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.558239398819109, |
|
"grad_norm": 1.8668954388857975, |
|
"learning_rate": 4.859266230077474e-06, |
|
"loss": 0.4483, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5636070853462157, |
|
"grad_norm": 1.844866191674477, |
|
"learning_rate": 4.7654987895761705e-06, |
|
"loss": 0.4436, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5689747718733226, |
|
"grad_norm": 1.9794533025528265, |
|
"learning_rate": 4.671813937879494e-06, |
|
"loss": 0.441, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5743424584004294, |
|
"grad_norm": 1.883767510221611, |
|
"learning_rate": 4.5782446697847775e-06, |
|
"loss": 0.4432, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5797101449275363, |
|
"grad_norm": 1.976673677149353, |
|
"learning_rate": 4.484823939382056e-06, |
|
"loss": 0.4358, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5850778314546431, |
|
"grad_norm": 1.9433175492111152, |
|
"learning_rate": 4.391584648447989e-06, |
|
"loss": 0.4319, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5904455179817498, |
|
"grad_norm": 1.985070698744384, |
|
"learning_rate": 4.298559634858202e-06, |
|
"loss": 0.4354, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5958132045088567, |
|
"grad_norm": 2.01070608130013, |
|
"learning_rate": 4.205781661022146e-06, |
|
"loss": 0.4267, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6011808910359635, |
|
"grad_norm": 1.9373278557153377, |
|
"learning_rate": 4.1132834023445304e-06, |
|
"loss": 0.4208, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6065485775630703, |
|
"grad_norm": 1.896977030964561, |
|
"learning_rate": 4.021097435717386e-06, |
|
"loss": 0.4254, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6119162640901772, |
|
"grad_norm": 1.9694900821692567, |
|
"learning_rate": 3.929256228046845e-06, |
|
"loss": 0.4178, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6172839506172839, |
|
"grad_norm": 1.9130375537620703, |
|
"learning_rate": 3.837792124818647e-06, |
|
"loss": 0.4256, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6226516371443908, |
|
"grad_norm": 2.148799842524908, |
|
"learning_rate": 3.7467373387063973e-06, |
|
"loss": 0.4172, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6280193236714976, |
|
"grad_norm": 1.9520935395346655, |
|
"learning_rate": 3.656123938226618e-06, |
|
"loss": 0.4209, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6333870101986044, |
|
"grad_norm": 1.9284426230497334, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.4135, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6387546967257112, |
|
"grad_norm": 3.083878858254763, |
|
"learning_rate": 3.476348779734732e-06, |
|
"loss": 0.4067, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.644122383252818, |
|
"grad_norm": 1.956096320950966, |
|
"learning_rate": 3.387250336600254e-06, |
|
"loss": 0.4193, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6494900697799249, |
|
"grad_norm": 1.7978680707200165, |
|
"learning_rate": 3.298719886554677e-06, |
|
"loss": 0.4082, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6548577563070317, |
|
"grad_norm": 1.8632209883906246, |
|
"learning_rate": 3.2107886090705035e-06, |
|
"loss": 0.4009, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6602254428341385, |
|
"grad_norm": 1.9564221459543165, |
|
"learning_rate": 3.1234874725981045e-06, |
|
"loss": 0.4116, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6655931293612453, |
|
"grad_norm": 2.1099975602879217, |
|
"learning_rate": 3.036847223658958e-06, |
|
"loss": 0.4046, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6709608158883521, |
|
"grad_norm": 1.923676982794732, |
|
"learning_rate": 2.950898376017064e-06, |
|
"loss": 0.4022, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6763285024154589, |
|
"grad_norm": 1.9781538947455772, |
|
"learning_rate": 2.865671199932318e-06, |
|
"loss": 0.4084, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6816961889425658, |
|
"grad_norm": 1.8903547449677651, |
|
"learning_rate": 2.781195711499658e-06, |
|
"loss": 0.399, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6870638754696726, |
|
"grad_norm": 1.812136545065083, |
|
"learning_rate": 2.697501662077707e-06, |
|
"loss": 0.3981, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6924315619967794, |
|
"grad_norm": 1.8634900584603342, |
|
"learning_rate": 2.6146185278106807e-06, |
|
"loss": 0.4009, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6977992485238862, |
|
"grad_norm": 1.8908371985643206, |
|
"learning_rate": 2.5325754992471886e-06, |
|
"loss": 0.3873, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.703166935050993, |
|
"grad_norm": 1.825726975173959, |
|
"learning_rate": 2.4514014710596467e-06, |
|
"loss": 0.3956, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7085346215780999, |
|
"grad_norm": 1.8145985790587735, |
|
"learning_rate": 2.3711250318678906e-06, |
|
"loss": 0.387, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7139023081052066, |
|
"grad_norm": 1.9318642386352745, |
|
"learning_rate": 2.2917744541705544e-06, |
|
"loss": 0.3915, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7192699946323134, |
|
"grad_norm": 1.8371335401848725, |
|
"learning_rate": 2.2133776843878185e-06, |
|
"loss": 0.3913, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7246376811594203, |
|
"grad_norm": 1.8857468128113828, |
|
"learning_rate": 2.1359623330189655e-06, |
|
"loss": 0.382, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7300053676865271, |
|
"grad_norm": 1.847380544620127, |
|
"learning_rate": 2.059555664918268e-06, |
|
"loss": 0.3768, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.735373054213634, |
|
"grad_norm": 1.7802526887131536, |
|
"learning_rate": 1.9841845896926022e-06, |
|
"loss": 0.3842, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 1.8355700135485964, |
|
"learning_rate": 1.9098756522241634e-06, |
|
"loss": 0.3803, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7461084272678475, |
|
"grad_norm": 1.8378538582720527, |
|
"learning_rate": 1.8366550233216584e-06, |
|
"loss": 0.3774, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7514761137949544, |
|
"grad_norm": 1.8843628517686122, |
|
"learning_rate": 1.7645484905032129e-06, |
|
"loss": 0.3804, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7568438003220612, |
|
"grad_norm": 1.842487842009676, |
|
"learning_rate": 1.6935814489142937e-06, |
|
"loss": 0.3697, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7622114868491681, |
|
"grad_norm": 1.8129216270941833, |
|
"learning_rate": 1.6237788923838149e-06, |
|
"loss": 0.374, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7675791733762748, |
|
"grad_norm": 1.9032083523775989, |
|
"learning_rate": 1.555165404621567e-06, |
|
"loss": 0.3755, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7729468599033816, |
|
"grad_norm": 1.8048406689197223, |
|
"learning_rate": 1.487765150560116e-06, |
|
"loss": 0.37, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7783145464304885, |
|
"grad_norm": 1.7679512404980078, |
|
"learning_rate": 1.4216018678441558e-06, |
|
"loss": 0.371, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7836822329575953, |
|
"grad_norm": 1.7580438160255716, |
|
"learning_rate": 1.3566988584703817e-06, |
|
"loss": 0.3744, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.789049919484702, |
|
"grad_norm": 1.9423775433056052, |
|
"learning_rate": 1.293078980580766e-06, |
|
"loss": 0.3648, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7944176060118089, |
|
"grad_norm": 1.8075229160199813, |
|
"learning_rate": 1.2307646404121692e-06, |
|
"loss": 0.3793, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7997852925389157, |
|
"grad_norm": 1.7960898775042207, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.3658, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.8051529790660226, |
|
"grad_norm": 1.714976428918269, |
|
"learning_rate": 1.1101398914744565e-06, |
|
"loss": 0.3539, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.8105206655931294, |
|
"grad_norm": 1.9437004560812625, |
|
"learning_rate": 1.0518719654447896e-06, |
|
"loss": 0.3672, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8158883521202361, |
|
"grad_norm": 1.8231928241724982, |
|
"learning_rate": 9.949945276530782e-07, |
|
"loss": 0.3623, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.821256038647343, |
|
"grad_norm": 1.768854699968928, |
|
"learning_rate": 9.395276097212841e-07, |
|
"loss": 0.3644, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8266237251744498, |
|
"grad_norm": 1.7546007013596414, |
|
"learning_rate": 8.854907465014479e-07, |
|
"loss": 0.3612, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8319914117015567, |
|
"grad_norm": 1.7931543962107859, |
|
"learning_rate": 8.329029691957124e-07, |
|
"loss": 0.3651, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8373590982286635, |
|
"grad_norm": 1.8334730517443687, |
|
"learning_rate": 7.817827986537508e-07, |
|
"loss": 0.3572, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8427267847557702, |
|
"grad_norm": 1.9211319549733363, |
|
"learning_rate": 7.321482388499096e-07, |
|
"loss": 0.3547, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8480944712828771, |
|
"grad_norm": 1.8336015586916503, |
|
"learning_rate": 6.840167705424106e-07, |
|
"loss": 0.3591, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8534621578099839, |
|
"grad_norm": 1.7443053171841683, |
|
"learning_rate": 6.374053451168166e-07, |
|
"loss": 0.3557, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8588298443370908, |
|
"grad_norm": 1.8189610501841909, |
|
"learning_rate": 5.92330378615929e-07, |
|
"loss": 0.3629, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8641975308641975, |
|
"grad_norm": 1.8372980732446615, |
|
"learning_rate": 5.488077459582425e-07, |
|
"loss": 0.355, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 1.7662263372996434, |
|
"learning_rate": 5.068527753469604e-07, |
|
"loss": 0.3457, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8749329039184112, |
|
"grad_norm": 1.884357397297957, |
|
"learning_rate": 4.664802428715753e-07, |
|
"loss": 0.3555, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.880300590445518, |
|
"grad_norm": 1.7648193673189987, |
|
"learning_rate": 4.2770436730388166e-07, |
|
"loss": 0.3533, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8856682769726248, |
|
"grad_norm": 1.8350106858958837, |
|
"learning_rate": 3.9053880509028086e-07, |
|
"loss": 0.35, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8910359634997316, |
|
"grad_norm": 1.860048919453916, |
|
"learning_rate": 3.549966455421305e-07, |
|
"loss": 0.3513, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8964036500268384, |
|
"grad_norm": 1.7816738115824102, |
|
"learning_rate": 3.2109040622582186e-07, |
|
"loss": 0.3491, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.9017713365539453, |
|
"grad_norm": 1.7638165624694198, |
|
"learning_rate": 2.8883202855423676e-07, |
|
"loss": 0.3533, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9071390230810521, |
|
"grad_norm": 1.8839660726806369, |
|
"learning_rate": 2.582328735811029e-07, |
|
"loss": 0.3553, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.9125067096081588, |
|
"grad_norm": 1.8407087550617782, |
|
"learning_rate": 2.2930371799975593e-07, |
|
"loss": 0.3627, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9178743961352657, |
|
"grad_norm": 1.801970350663046, |
|
"learning_rate": 2.0205475034770606e-07, |
|
"loss": 0.3552, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.9232420826623725, |
|
"grad_norm": 1.685025912596546, |
|
"learning_rate": 1.7649556741833995e-07, |
|
"loss": 0.3432, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9286097691894794, |
|
"grad_norm": 1.675226625798938, |
|
"learning_rate": 1.5263517088103862e-07, |
|
"loss": 0.3531, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.9339774557165862, |
|
"grad_norm": 1.7824714528651928, |
|
"learning_rate": 1.304819641108801e-07, |
|
"loss": 0.3408, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9393451422436929, |
|
"grad_norm": 1.7589068127764829, |
|
"learning_rate": 1.1004374922906846e-07, |
|
"loss": 0.3436, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9447128287707998, |
|
"grad_norm": 1.8043041133322923, |
|
"learning_rate": 9.132772435510362e-08, |
|
"loss": 0.3557, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9500805152979066, |
|
"grad_norm": 1.8271300591957, |
|
"learning_rate": 7.434048107168523e-08, |
|
"loss": 0.3413, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9554482018250134, |
|
"grad_norm": 1.8377353760781505, |
|
"learning_rate": 5.908800210322696e-08, |
|
"loss": 0.3477, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9608158883521203, |
|
"grad_norm": 1.7930991363661997, |
|
"learning_rate": 4.55756592088058e-08, |
|
"loss": 0.3494, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.966183574879227, |
|
"grad_norm": 1.7208695719091334, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.3537, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9715512614063339, |
|
"grad_norm": 1.7750738665351566, |
|
"learning_rate": 2.378980271628195e-08, |
|
"loss": 0.3455, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9769189479334407, |
|
"grad_norm": 1.7676906709440423, |
|
"learning_rate": 1.552396186256411e-08, |
|
"loss": 0.3522, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9822866344605475, |
|
"grad_norm": 1.7765229197521504, |
|
"learning_rate": 9.013599869394096e-09, |
|
"loss": 0.3372, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9876543209876543, |
|
"grad_norm": 1.7778750895811917, |
|
"learning_rate": 4.261009616257638e-09, |
|
"loss": 0.354, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9930220075147611, |
|
"grad_norm": 1.7318086433483029, |
|
"learning_rate": 1.2678649143349485e-09, |
|
"loss": 0.3479, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.998389694041868, |
|
"grad_norm": 1.7750655216541529, |
|
"learning_rate": 3.5219917003948003e-11, |
|
"loss": 0.3526, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9994632313472893, |
|
"eval_loss": 0.29608339071273804, |
|
"eval_runtime": 1.1911, |
|
"eval_samples_per_second": 1.679, |
|
"eval_steps_per_second": 0.84, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.9994632313472893, |
|
"step": 931, |
|
"total_flos": 194880114524160.0, |
|
"train_loss": 0.5082121759685102, |
|
"train_runtime": 20748.1716, |
|
"train_samples_per_second": 1.437, |
|
"train_steps_per_second": 0.045 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 931, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 194880114524160.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|