|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9997315436241611, |
|
"eval_steps": 500, |
|
"global_step": 931, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010738255033557046, |
|
"grad_norm": 22.873438223486826, |
|
"learning_rate": 1.0638297872340426e-07, |
|
"loss": 1.3013, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005369127516778523, |
|
"grad_norm": 21.159527745955735, |
|
"learning_rate": 5.319148936170213e-07, |
|
"loss": 1.3281, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010738255033557046, |
|
"grad_norm": 8.15243855646548, |
|
"learning_rate": 1.0638297872340427e-06, |
|
"loss": 1.1934, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.016107382550335572, |
|
"grad_norm": 7.876837115931939, |
|
"learning_rate": 1.595744680851064e-06, |
|
"loss": 1.0403, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.021476510067114093, |
|
"grad_norm": 3.0351073481295128, |
|
"learning_rate": 2.1276595744680853e-06, |
|
"loss": 0.9152, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.026845637583892617, |
|
"grad_norm": 2.4261598045634685, |
|
"learning_rate": 2.6595744680851065e-06, |
|
"loss": 0.8749, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.032214765100671144, |
|
"grad_norm": 2.216819042623432, |
|
"learning_rate": 3.191489361702128e-06, |
|
"loss": 0.8366, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03758389261744966, |
|
"grad_norm": 2.282450409349314, |
|
"learning_rate": 3.723404255319149e-06, |
|
"loss": 0.818, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.042953020134228186, |
|
"grad_norm": 2.2660596456862643, |
|
"learning_rate": 4.255319148936171e-06, |
|
"loss": 0.7985, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04832214765100671, |
|
"grad_norm": 2.367380807091964, |
|
"learning_rate": 4.787234042553192e-06, |
|
"loss": 0.7886, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.053691275167785234, |
|
"grad_norm": 2.385525049214499, |
|
"learning_rate": 5.319148936170213e-06, |
|
"loss": 0.7777, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05906040268456376, |
|
"grad_norm": 2.5856947730866446, |
|
"learning_rate": 5.851063829787235e-06, |
|
"loss": 0.7546, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06442953020134229, |
|
"grad_norm": 2.4085256812489084, |
|
"learning_rate": 6.382978723404256e-06, |
|
"loss": 0.7524, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0697986577181208, |
|
"grad_norm": 2.4401920680003157, |
|
"learning_rate": 6.914893617021278e-06, |
|
"loss": 0.7291, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.07516778523489932, |
|
"grad_norm": 2.3253305278745215, |
|
"learning_rate": 7.446808510638298e-06, |
|
"loss": 0.7272, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08053691275167785, |
|
"grad_norm": 2.45450168917715, |
|
"learning_rate": 7.97872340425532e-06, |
|
"loss": 0.7127, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.08590604026845637, |
|
"grad_norm": 2.127447895073466, |
|
"learning_rate": 8.510638297872341e-06, |
|
"loss": 0.7041, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0912751677852349, |
|
"grad_norm": 2.3028444099891705, |
|
"learning_rate": 9.042553191489362e-06, |
|
"loss": 0.7037, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.09664429530201342, |
|
"grad_norm": 2.5309118167527616, |
|
"learning_rate": 9.574468085106385e-06, |
|
"loss": 0.6988, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.10201342281879194, |
|
"grad_norm": 2.4219930767340743, |
|
"learning_rate": 9.999964780082996e-06, |
|
"loss": 0.6903, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.10738255033557047, |
|
"grad_norm": 2.111757405385662, |
|
"learning_rate": 9.998732135085665e-06, |
|
"loss": 0.6801, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.11275167785234899, |
|
"grad_norm": 2.2202581015134077, |
|
"learning_rate": 9.995738990383743e-06, |
|
"loss": 0.6907, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.11812080536912752, |
|
"grad_norm": 2.626765418959801, |
|
"learning_rate": 9.990986400130607e-06, |
|
"loss": 0.686, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.12348993288590604, |
|
"grad_norm": 2.2905312730653424, |
|
"learning_rate": 9.984476038137437e-06, |
|
"loss": 0.6687, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12885906040268458, |
|
"grad_norm": 2.190663676539029, |
|
"learning_rate": 9.97621019728372e-06, |
|
"loss": 0.6776, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1342281879194631, |
|
"grad_norm": 2.2515596629136825, |
|
"learning_rate": 9.966191788709716e-06, |
|
"loss": 0.6694, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1395973154362416, |
|
"grad_norm": 2.168375563832784, |
|
"learning_rate": 9.954424340791195e-06, |
|
"loss": 0.6752, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.14496644295302014, |
|
"grad_norm": 2.088272443654766, |
|
"learning_rate": 9.940911997896774e-06, |
|
"loss": 0.6661, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.15033557046979865, |
|
"grad_norm": 2.173874083902431, |
|
"learning_rate": 9.925659518928316e-06, |
|
"loss": 0.6563, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.15570469798657718, |
|
"grad_norm": 2.0589658736370757, |
|
"learning_rate": 9.908672275644898e-06, |
|
"loss": 0.6607, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1610738255033557, |
|
"grad_norm": 2.147351094566772, |
|
"learning_rate": 9.889956250770933e-06, |
|
"loss": 0.6575, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.16644295302013423, |
|
"grad_norm": 2.3349226051109584, |
|
"learning_rate": 9.86951803588912e-06, |
|
"loss": 0.6527, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.17181208053691274, |
|
"grad_norm": 2.281471579219723, |
|
"learning_rate": 9.847364829118963e-06, |
|
"loss": 0.6544, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.17718120805369128, |
|
"grad_norm": 2.1332428982031715, |
|
"learning_rate": 9.82350443258166e-06, |
|
"loss": 0.6479, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1825503355704698, |
|
"grad_norm": 2.0804446588419045, |
|
"learning_rate": 9.797945249652295e-06, |
|
"loss": 0.6439, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.18791946308724833, |
|
"grad_norm": 2.0585417674642636, |
|
"learning_rate": 9.770696282000245e-06, |
|
"loss": 0.6321, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.19328859060402684, |
|
"grad_norm": 2.1666332876300762, |
|
"learning_rate": 9.741767126418898e-06, |
|
"loss": 0.6336, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.19865771812080538, |
|
"grad_norm": 2.0205631171866543, |
|
"learning_rate": 9.711167971445766e-06, |
|
"loss": 0.6306, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.2040268456375839, |
|
"grad_norm": 2.4514209883366807, |
|
"learning_rate": 9.67890959377418e-06, |
|
"loss": 0.624, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.20939597315436242, |
|
"grad_norm": 2.011077236989635, |
|
"learning_rate": 9.645003354457872e-06, |
|
"loss": 0.6176, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.21476510067114093, |
|
"grad_norm": 2.157067702163037, |
|
"learning_rate": 9.60946119490972e-06, |
|
"loss": 0.6231, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.22013422818791947, |
|
"grad_norm": 1.851876954712529, |
|
"learning_rate": 9.57229563269612e-06, |
|
"loss": 0.6203, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.22550335570469798, |
|
"grad_norm": 1.9636046456639136, |
|
"learning_rate": 9.533519757128426e-06, |
|
"loss": 0.6186, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.23087248322147652, |
|
"grad_norm": 2.374396960285375, |
|
"learning_rate": 9.49314722465304e-06, |
|
"loss": 0.6157, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.23624161073825503, |
|
"grad_norm": 2.072084337879624, |
|
"learning_rate": 9.451192254041759e-06, |
|
"loss": 0.6157, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.24161073825503357, |
|
"grad_norm": 2.0213326681347605, |
|
"learning_rate": 9.407669621384073e-06, |
|
"loss": 0.619, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.24697986577181208, |
|
"grad_norm": 2.05541918839593, |
|
"learning_rate": 9.362594654883185e-06, |
|
"loss": 0.6043, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2523489932885906, |
|
"grad_norm": 1.9647313060735423, |
|
"learning_rate": 9.31598322945759e-06, |
|
"loss": 0.5877, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.25771812080536916, |
|
"grad_norm": 2.321446787508396, |
|
"learning_rate": 9.267851761150092e-06, |
|
"loss": 0.6038, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.26308724832214764, |
|
"grad_norm": 1.954968634006319, |
|
"learning_rate": 9.218217201346251e-06, |
|
"loss": 0.5973, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2684563758389262, |
|
"grad_norm": 1.9588200511779104, |
|
"learning_rate": 9.167097030804289e-06, |
|
"loss": 0.5952, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.2738255033557047, |
|
"grad_norm": 1.978200194451611, |
|
"learning_rate": 9.114509253498554e-06, |
|
"loss": 0.5956, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2791946308724832, |
|
"grad_norm": 1.9082868769634187, |
|
"learning_rate": 9.060472390278717e-06, |
|
"loss": 0.5834, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.28456375838926173, |
|
"grad_norm": 1.9947479259010255, |
|
"learning_rate": 9.005005472346923e-06, |
|
"loss": 0.5878, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.28993288590604027, |
|
"grad_norm": 2.0101949394033443, |
|
"learning_rate": 8.948128034555212e-06, |
|
"loss": 0.5815, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.2953020134228188, |
|
"grad_norm": 1.855173856877532, |
|
"learning_rate": 8.889860108525544e-06, |
|
"loss": 0.5749, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3006711409395973, |
|
"grad_norm": 1.8947595029633266, |
|
"learning_rate": 8.83022221559489e-06, |
|
"loss": 0.5597, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.30604026845637583, |
|
"grad_norm": 2.0225835192845825, |
|
"learning_rate": 8.76923535958783e-06, |
|
"loss": 0.5683, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.31140939597315437, |
|
"grad_norm": 2.047769165835403, |
|
"learning_rate": 8.706921019419237e-06, |
|
"loss": 0.5693, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.3167785234899329, |
|
"grad_norm": 1.996841996415469, |
|
"learning_rate": 8.643301141529619e-06, |
|
"loss": 0.576, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3221476510067114, |
|
"grad_norm": 2.085317513003649, |
|
"learning_rate": 8.578398132155846e-06, |
|
"loss": 0.5586, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3275167785234899, |
|
"grad_norm": 2.2696870964241342, |
|
"learning_rate": 8.512234849439887e-06, |
|
"loss": 0.5487, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.33288590604026846, |
|
"grad_norm": 2.295292164613049, |
|
"learning_rate": 8.444834595378434e-06, |
|
"loss": 0.5657, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.338255033557047, |
|
"grad_norm": 1.9535177447525518, |
|
"learning_rate": 8.376221107616187e-06, |
|
"loss": 0.5492, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3436241610738255, |
|
"grad_norm": 1.935697674348923, |
|
"learning_rate": 8.306418551085707e-06, |
|
"loss": 0.5502, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.348993288590604, |
|
"grad_norm": 1.912637650045647, |
|
"learning_rate": 8.23545150949679e-06, |
|
"loss": 0.5659, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.35436241610738256, |
|
"grad_norm": 2.0813900852367437, |
|
"learning_rate": 8.163344976678342e-06, |
|
"loss": 0.5512, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3597315436241611, |
|
"grad_norm": 1.9178576647529397, |
|
"learning_rate": 8.090124347775837e-06, |
|
"loss": 0.5399, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3651006711409396, |
|
"grad_norm": 1.9921209746990098, |
|
"learning_rate": 8.0158154103074e-06, |
|
"loss": 0.5335, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3704697986577181, |
|
"grad_norm": 2.0251179740048686, |
|
"learning_rate": 7.940444335081733e-06, |
|
"loss": 0.5412, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.37583892617449666, |
|
"grad_norm": 2.0237480393180536, |
|
"learning_rate": 7.864037666981037e-06, |
|
"loss": 0.5318, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3812080536912752, |
|
"grad_norm": 2.062903496053255, |
|
"learning_rate": 7.786622315612182e-06, |
|
"loss": 0.5391, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3865771812080537, |
|
"grad_norm": 1.931828289441635, |
|
"learning_rate": 7.708225545829446e-06, |
|
"loss": 0.525, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3919463087248322, |
|
"grad_norm": 2.004972595205748, |
|
"learning_rate": 7.6288749681321115e-06, |
|
"loss": 0.5173, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.39731543624161075, |
|
"grad_norm": 2.0462367199517937, |
|
"learning_rate": 7.548598528940354e-06, |
|
"loss": 0.5154, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.40268456375838924, |
|
"grad_norm": 2.3727598847724636, |
|
"learning_rate": 7.4674245007528135e-06, |
|
"loss": 0.5279, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4080536912751678, |
|
"grad_norm": 1.965518745162171, |
|
"learning_rate": 7.385381472189321e-06, |
|
"loss": 0.5255, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.4134228187919463, |
|
"grad_norm": 2.0728393681212225, |
|
"learning_rate": 7.302498337922293e-06, |
|
"loss": 0.5152, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.41879194630872485, |
|
"grad_norm": 2.0074609174878906, |
|
"learning_rate": 7.218804288500343e-06, |
|
"loss": 0.5224, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.42416107382550333, |
|
"grad_norm": 1.9472600550525063, |
|
"learning_rate": 7.134328800067684e-06, |
|
"loss": 0.5209, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.42953020134228187, |
|
"grad_norm": 1.9949043902713617, |
|
"learning_rate": 7.049101623982938e-06, |
|
"loss": 0.5082, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.4348993288590604, |
|
"grad_norm": 1.834654212769176, |
|
"learning_rate": 6.963152776341044e-06, |
|
"loss": 0.5149, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.44026845637583895, |
|
"grad_norm": 2.0502871429630822, |
|
"learning_rate": 6.876512527401897e-06, |
|
"loss": 0.5131, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.44563758389261743, |
|
"grad_norm": 2.000365576779467, |
|
"learning_rate": 6.789211390929497e-06, |
|
"loss": 0.5016, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.45100671140939597, |
|
"grad_norm": 2.0157623562806353, |
|
"learning_rate": 6.701280113445324e-06, |
|
"loss": 0.4958, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4563758389261745, |
|
"grad_norm": 1.970116584111867, |
|
"learning_rate": 6.6127496633997475e-06, |
|
"loss": 0.5003, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.46174496644295304, |
|
"grad_norm": 1.9483867090820592, |
|
"learning_rate": 6.523651220265269e-06, |
|
"loss": 0.4942, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4671140939597315, |
|
"grad_norm": 2.099467823611523, |
|
"learning_rate": 6.434016163555452e-06, |
|
"loss": 0.4952, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.47248322147651006, |
|
"grad_norm": 1.9558069283541033, |
|
"learning_rate": 6.343876061773385e-06, |
|
"loss": 0.4924, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4778523489932886, |
|
"grad_norm": 1.9847822609941113, |
|
"learning_rate": 6.2532626612936035e-06, |
|
"loss": 0.4823, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.48322147651006714, |
|
"grad_norm": 2.0688837915005993, |
|
"learning_rate": 6.162207875181354e-06, |
|
"loss": 0.4913, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4885906040268456, |
|
"grad_norm": 1.9342878293544008, |
|
"learning_rate": 6.070743771953157e-06, |
|
"loss": 0.4769, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.49395973154362416, |
|
"grad_norm": 2.043215000887027, |
|
"learning_rate": 5.978902564282616e-06, |
|
"loss": 0.4752, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4993288590604027, |
|
"grad_norm": 1.8676181620631764, |
|
"learning_rate": 5.886716597655472e-06, |
|
"loss": 0.4701, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5046979865771812, |
|
"grad_norm": 1.9595541482385936, |
|
"learning_rate": 5.7942183389778536e-06, |
|
"loss": 0.4696, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5100671140939598, |
|
"grad_norm": 1.825167080567174, |
|
"learning_rate": 5.701440365141799e-06, |
|
"loss": 0.4625, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5154362416107383, |
|
"grad_norm": 1.9786129201297613, |
|
"learning_rate": 5.608415351552014e-06, |
|
"loss": 0.4596, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.5208053691275167, |
|
"grad_norm": 1.9920386733808053, |
|
"learning_rate": 5.515176060617945e-06, |
|
"loss": 0.4684, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.5261744966442953, |
|
"grad_norm": 1.9116799488281817, |
|
"learning_rate": 5.421755330215223e-06, |
|
"loss": 0.4659, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.5315436241610738, |
|
"grad_norm": 1.948129234992053, |
|
"learning_rate": 5.328186062120509e-06, |
|
"loss": 0.4436, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.5369127516778524, |
|
"grad_norm": 2.078748185289108, |
|
"learning_rate": 5.23450121042383e-06, |
|
"loss": 0.4548, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.5422818791946309, |
|
"grad_norm": 1.9444657978297817, |
|
"learning_rate": 5.140733769922525e-06, |
|
"loss": 0.4533, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.5476510067114094, |
|
"grad_norm": 1.9186109778492548, |
|
"learning_rate": 5.0469167645008245e-06, |
|
"loss": 0.4484, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.553020134228188, |
|
"grad_norm": 2.115910772434904, |
|
"learning_rate": 4.953083235499177e-06, |
|
"loss": 0.4557, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5583892617449664, |
|
"grad_norm": 1.9469832075906568, |
|
"learning_rate": 4.859266230077474e-06, |
|
"loss": 0.4439, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5637583892617449, |
|
"grad_norm": 1.90470271131317, |
|
"learning_rate": 4.7654987895761705e-06, |
|
"loss": 0.4431, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5691275167785235, |
|
"grad_norm": 1.941709380965833, |
|
"learning_rate": 4.671813937879494e-06, |
|
"loss": 0.4517, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.574496644295302, |
|
"grad_norm": 1.9633282455666656, |
|
"learning_rate": 4.5782446697847775e-06, |
|
"loss": 0.4469, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.5798657718120805, |
|
"grad_norm": 1.8845576495595482, |
|
"learning_rate": 4.484823939382056e-06, |
|
"loss": 0.4316, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5852348993288591, |
|
"grad_norm": 1.8811789086409214, |
|
"learning_rate": 4.391584648447989e-06, |
|
"loss": 0.4423, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5906040268456376, |
|
"grad_norm": 1.9555309537231733, |
|
"learning_rate": 4.298559634858202e-06, |
|
"loss": 0.4368, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5959731543624162, |
|
"grad_norm": 2.0555513727023578, |
|
"learning_rate": 4.205781661022146e-06, |
|
"loss": 0.4298, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6013422818791946, |
|
"grad_norm": 1.8777742161685087, |
|
"learning_rate": 4.1132834023445304e-06, |
|
"loss": 0.426, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.6067114093959731, |
|
"grad_norm": 1.939266556920773, |
|
"learning_rate": 4.021097435717386e-06, |
|
"loss": 0.4216, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.6120805369127517, |
|
"grad_norm": 1.9261147196325419, |
|
"learning_rate": 3.929256228046845e-06, |
|
"loss": 0.4285, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.6174496644295302, |
|
"grad_norm": 1.805107172046572, |
|
"learning_rate": 3.837792124818647e-06, |
|
"loss": 0.416, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.6228187919463087, |
|
"grad_norm": 1.8704231334410426, |
|
"learning_rate": 3.7467373387063973e-06, |
|
"loss": 0.4191, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.6281879194630873, |
|
"grad_norm": 1.9935959039830586, |
|
"learning_rate": 3.656123938226618e-06, |
|
"loss": 0.4183, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.6335570469798658, |
|
"grad_norm": 1.9248727434625008, |
|
"learning_rate": 3.5659838364445505e-06, |
|
"loss": 0.4239, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.6389261744966444, |
|
"grad_norm": 1.9448194253706668, |
|
"learning_rate": 3.476348779734732e-06, |
|
"loss": 0.4118, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.6442953020134228, |
|
"grad_norm": 1.8244567974798083, |
|
"learning_rate": 3.387250336600254e-06, |
|
"loss": 0.4108, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.6496644295302013, |
|
"grad_norm": 1.8766359486725541, |
|
"learning_rate": 3.298719886554677e-06, |
|
"loss": 0.4094, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.6550335570469799, |
|
"grad_norm": 1.9861783019472632, |
|
"learning_rate": 3.2107886090705035e-06, |
|
"loss": 0.4024, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6604026845637584, |
|
"grad_norm": 1.8918824877700715, |
|
"learning_rate": 3.1234874725981045e-06, |
|
"loss": 0.4086, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6657718120805369, |
|
"grad_norm": 2.0714978550264003, |
|
"learning_rate": 3.036847223658958e-06, |
|
"loss": 0.396, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6711409395973155, |
|
"grad_norm": 1.9560452224763145, |
|
"learning_rate": 2.950898376017064e-06, |
|
"loss": 0.4035, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.676510067114094, |
|
"grad_norm": 2.043363405835207, |
|
"learning_rate": 2.865671199932318e-06, |
|
"loss": 0.4068, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6818791946308724, |
|
"grad_norm": 1.8816775523611597, |
|
"learning_rate": 2.781195711499658e-06, |
|
"loss": 0.393, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.687248322147651, |
|
"grad_norm": 1.8488910820957973, |
|
"learning_rate": 2.697501662077707e-06, |
|
"loss": 0.4013, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6926174496644295, |
|
"grad_norm": 1.862219172494916, |
|
"learning_rate": 2.6146185278106807e-06, |
|
"loss": 0.3909, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.697986577181208, |
|
"grad_norm": 2.012779150472371, |
|
"learning_rate": 2.5325754992471886e-06, |
|
"loss": 0.3933, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.7033557046979866, |
|
"grad_norm": 1.925776837174688, |
|
"learning_rate": 2.4514014710596467e-06, |
|
"loss": 0.3794, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.7087248322147651, |
|
"grad_norm": 1.8323121691067996, |
|
"learning_rate": 2.3711250318678906e-06, |
|
"loss": 0.3906, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.7140939597315437, |
|
"grad_norm": 1.8449642618290978, |
|
"learning_rate": 2.2917744541705544e-06, |
|
"loss": 0.3804, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.7194630872483222, |
|
"grad_norm": 1.8385577849219443, |
|
"learning_rate": 2.2133776843878185e-06, |
|
"loss": 0.3896, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.7248322147651006, |
|
"grad_norm": 1.8863509359096609, |
|
"learning_rate": 2.1359623330189655e-06, |
|
"loss": 0.3906, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.7302013422818792, |
|
"grad_norm": 1.9412331601540616, |
|
"learning_rate": 2.059555664918268e-06, |
|
"loss": 0.3872, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.7355704697986577, |
|
"grad_norm": 1.795387975158009, |
|
"learning_rate": 1.9841845896926022e-06, |
|
"loss": 0.3798, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.7409395973154362, |
|
"grad_norm": 1.843173352636606, |
|
"learning_rate": 1.9098756522241634e-06, |
|
"loss": 0.3788, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.7463087248322148, |
|
"grad_norm": 1.912174039399666, |
|
"learning_rate": 1.8366550233216584e-06, |
|
"loss": 0.3695, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.7516778523489933, |
|
"grad_norm": 1.8237677647091424, |
|
"learning_rate": 1.7645484905032129e-06, |
|
"loss": 0.3933, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.7570469798657719, |
|
"grad_norm": 1.7822160523413584, |
|
"learning_rate": 1.6935814489142937e-06, |
|
"loss": 0.3781, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.7624161073825504, |
|
"grad_norm": 1.8878271721301074, |
|
"learning_rate": 1.6237788923838149e-06, |
|
"loss": 0.3751, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7677852348993288, |
|
"grad_norm": 1.9082160841545055, |
|
"learning_rate": 1.555165404621567e-06, |
|
"loss": 0.3728, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7731543624161074, |
|
"grad_norm": 1.816281209572928, |
|
"learning_rate": 1.487765150560116e-06, |
|
"loss": 0.3777, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7785234899328859, |
|
"grad_norm": 1.91354765663735, |
|
"learning_rate": 1.4216018678441558e-06, |
|
"loss": 0.3699, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7838926174496644, |
|
"grad_norm": 1.9355520287879238, |
|
"learning_rate": 1.3566988584703817e-06, |
|
"loss": 0.371, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.789261744966443, |
|
"grad_norm": 1.7280419120289419, |
|
"learning_rate": 1.293078980580766e-06, |
|
"loss": 0.3764, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7946308724832215, |
|
"grad_norm": 1.9044555736805637, |
|
"learning_rate": 1.2307646404121692e-06, |
|
"loss": 0.3664, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.891432724737264, |
|
"learning_rate": 1.1697777844051105e-06, |
|
"loss": 0.3664, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.8053691275167785, |
|
"grad_norm": 1.6814136447042585, |
|
"learning_rate": 1.1101398914744565e-06, |
|
"loss": 0.3574, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.810738255033557, |
|
"grad_norm": 1.8225153893838018, |
|
"learning_rate": 1.0518719654447896e-06, |
|
"loss": 0.3684, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.8161073825503355, |
|
"grad_norm": 1.8275415575673986, |
|
"learning_rate": 9.949945276530782e-07, |
|
"loss": 0.3603, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.8214765100671141, |
|
"grad_norm": 1.853253611406343, |
|
"learning_rate": 9.395276097212841e-07, |
|
"loss": 0.3651, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.8268456375838926, |
|
"grad_norm": 1.835073747374209, |
|
"learning_rate": 8.854907465014479e-07, |
|
"loss": 0.3677, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.8322147651006712, |
|
"grad_norm": 1.7695339228605491, |
|
"learning_rate": 8.329029691957124e-07, |
|
"loss": 0.3606, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.8375838926174497, |
|
"grad_norm": 1.749975183558492, |
|
"learning_rate": 7.817827986537508e-07, |
|
"loss": 0.3511, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.8429530201342282, |
|
"grad_norm": 1.9426041133261787, |
|
"learning_rate": 7.321482388499096e-07, |
|
"loss": 0.358, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.8483221476510067, |
|
"grad_norm": 1.8171481735775137, |
|
"learning_rate": 6.840167705424106e-07, |
|
"loss": 0.3544, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.8536912751677852, |
|
"grad_norm": 1.8412832647944588, |
|
"learning_rate": 6.374053451168166e-07, |
|
"loss": 0.3661, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.8590604026845637, |
|
"grad_norm": 1.8350930759774366, |
|
"learning_rate": 5.92330378615929e-07, |
|
"loss": 0.3537, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.8644295302013423, |
|
"grad_norm": 1.723224003576655, |
|
"learning_rate": 5.488077459582425e-07, |
|
"loss": 0.3553, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.8697986577181208, |
|
"grad_norm": 1.8473917985800616, |
|
"learning_rate": 5.068527753469604e-07, |
|
"loss": 0.3603, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8751677852348994, |
|
"grad_norm": 1.8325217665657294, |
|
"learning_rate": 4.664802428715753e-07, |
|
"loss": 0.348, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.8805369127516779, |
|
"grad_norm": 1.7346304200820148, |
|
"learning_rate": 4.2770436730388166e-07, |
|
"loss": 0.3498, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8859060402684564, |
|
"grad_norm": 1.7575151506469913, |
|
"learning_rate": 3.9053880509028086e-07, |
|
"loss": 0.3616, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8912751677852349, |
|
"grad_norm": 1.800873655206166, |
|
"learning_rate": 3.549966455421305e-07, |
|
"loss": 0.3507, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8966442953020134, |
|
"grad_norm": 1.710054226395714, |
|
"learning_rate": 3.2109040622582186e-07, |
|
"loss": 0.3527, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.9020134228187919, |
|
"grad_norm": 1.6705083298206793, |
|
"learning_rate": 2.8883202855423676e-07, |
|
"loss": 0.3516, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.9073825503355705, |
|
"grad_norm": 1.7474504216215188, |
|
"learning_rate": 2.582328735811029e-07, |
|
"loss": 0.3452, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.912751677852349, |
|
"grad_norm": 1.7463913866112468, |
|
"learning_rate": 2.2930371799975593e-07, |
|
"loss": 0.3471, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.9181208053691275, |
|
"grad_norm": 1.7297820385087201, |
|
"learning_rate": 2.0205475034770606e-07, |
|
"loss": 0.3468, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.9234899328859061, |
|
"grad_norm": 1.7482101229826381, |
|
"learning_rate": 1.7649556741833995e-07, |
|
"loss": 0.3478, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.9288590604026845, |
|
"grad_norm": 1.8150110385873126, |
|
"learning_rate": 1.5263517088103862e-07, |
|
"loss": 0.3586, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.934228187919463, |
|
"grad_norm": 1.8153438746846808, |
|
"learning_rate": 1.304819641108801e-07, |
|
"loss": 0.3555, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.9395973154362416, |
|
"grad_norm": 1.8087296470250676, |
|
"learning_rate": 1.1004374922906846e-07, |
|
"loss": 0.3457, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.9449664429530201, |
|
"grad_norm": 1.7195355098862968, |
|
"learning_rate": 9.132772435510362e-08, |
|
"loss": 0.349, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.9503355704697987, |
|
"grad_norm": 1.7647364179507874, |
|
"learning_rate": 7.434048107168523e-08, |
|
"loss": 0.3466, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.9557046979865772, |
|
"grad_norm": 1.9088484806569315, |
|
"learning_rate": 5.908800210322696e-08, |
|
"loss": 0.3411, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.9610738255033557, |
|
"grad_norm": 1.7271817479075642, |
|
"learning_rate": 4.55756592088058e-08, |
|
"loss": 0.3484, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.9664429530201343, |
|
"grad_norm": 1.7372584442728163, |
|
"learning_rate": 3.3808211290284886e-08, |
|
"loss": 0.3433, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.9718120805369127, |
|
"grad_norm": 1.808380468788094, |
|
"learning_rate": 2.378980271628195e-08, |
|
"loss": 0.3528, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.9771812080536912, |
|
"grad_norm": 1.7605143851131846, |
|
"learning_rate": 1.552396186256411e-08, |
|
"loss": 0.3445, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9825503355704698, |
|
"grad_norm": 1.8047618073453124, |
|
"learning_rate": 9.013599869394096e-09, |
|
"loss": 0.3497, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.9879194630872483, |
|
"grad_norm": 1.7837820069864043, |
|
"learning_rate": 4.261009616257638e-09, |
|
"loss": 0.3452, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9932885906040269, |
|
"grad_norm": 1.7898194618346237, |
|
"learning_rate": 1.2678649143349485e-09, |
|
"loss": 0.3497, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9986577181208054, |
|
"grad_norm": 1.8036764007244224, |
|
"learning_rate": 3.5219917003948003e-11, |
|
"loss": 0.346, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9997315436241611, |
|
"eval_loss": 0.2977491617202759, |
|
"eval_runtime": 1.3637, |
|
"eval_samples_per_second": 1.467, |
|
"eval_steps_per_second": 0.733, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.9997315436241611, |
|
"step": 931, |
|
"total_flos": 194880114524160.0, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.0085, |
|
"train_samples_per_second": 3497894.361, |
|
"train_steps_per_second": 109279.854 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 931, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 194880114524160.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|