{ "best_metric": null, "best_model_checkpoint": null, "epoch": 0.9997315436241611, "eval_steps": 500, "global_step": 931, "is_hyper_param_search": false, "is_local_process_zero": true, "is_world_process_zero": true, "log_history": [ { "epoch": 0.0010738255033557046, "grad_norm": 23.23349641925915, "learning_rate": 1.0638297872340426e-07, "loss": 1.3224, "step": 1 }, { "epoch": 0.005369127516778523, "grad_norm": 20.864176480583073, "learning_rate": 5.319148936170213e-07, "loss": 1.3084, "step": 5 }, { "epoch": 0.010738255033557046, "grad_norm": 8.533873275523032, "learning_rate": 1.0638297872340427e-06, "loss": 1.1987, "step": 10 }, { "epoch": 0.016107382550335572, "grad_norm": 7.665296900301775, "learning_rate": 1.595744680851064e-06, "loss": 1.0267, "step": 15 }, { "epoch": 0.021476510067114093, "grad_norm": 2.9761601522815546, "learning_rate": 2.1276595744680853e-06, "loss": 0.9131, "step": 20 }, { "epoch": 0.026845637583892617, "grad_norm": 2.433875752280275, "learning_rate": 2.6595744680851065e-06, "loss": 0.8629, "step": 25 }, { "epoch": 0.032214765100671144, "grad_norm": 2.309359589452698, "learning_rate": 3.191489361702128e-06, "loss": 0.8432, "step": 30 }, { "epoch": 0.03758389261744966, "grad_norm": 2.3739666896282, "learning_rate": 3.723404255319149e-06, "loss": 0.8237, "step": 35 }, { "epoch": 0.042953020134228186, "grad_norm": 2.411414400789539, "learning_rate": 4.255319148936171e-06, "loss": 0.8004, "step": 40 }, { "epoch": 0.04832214765100671, "grad_norm": 2.2779093841935176, "learning_rate": 4.787234042553192e-06, "loss": 0.7798, "step": 45 }, { "epoch": 0.053691275167785234, "grad_norm": 2.390464966558602, "learning_rate": 5.319148936170213e-06, "loss": 0.777, "step": 50 }, { "epoch": 0.05906040268456376, "grad_norm": 2.30705290045996, "learning_rate": 5.851063829787235e-06, "loss": 0.7534, "step": 55 }, { "epoch": 0.06442953020134229, "grad_norm": 2.432088997553532, "learning_rate": 6.382978723404256e-06, "loss": 0.7363, "step": 60 }, { "epoch": 0.0697986577181208, "grad_norm": 2.231418575279909, "learning_rate": 6.914893617021278e-06, "loss": 0.7258, "step": 65 }, { "epoch": 0.07516778523489932, "grad_norm": 2.3360484244386015, "learning_rate": 7.446808510638298e-06, "loss": 0.7239, "step": 70 }, { "epoch": 0.08053691275167785, "grad_norm": 2.2583495209303805, "learning_rate": 7.97872340425532e-06, "loss": 0.7158, "step": 75 }, { "epoch": 0.08590604026845637, "grad_norm": 2.166632602977575, "learning_rate": 8.510638297872341e-06, "loss": 0.698, "step": 80 }, { "epoch": 0.0912751677852349, "grad_norm": 2.261852982800451, "learning_rate": 9.042553191489362e-06, "loss": 0.6978, "step": 85 }, { "epoch": 0.09664429530201342, "grad_norm": 2.2449184892395593, "learning_rate": 9.574468085106385e-06, "loss": 0.6914, "step": 90 }, { "epoch": 0.10201342281879194, "grad_norm": 2.6796428545722906, "learning_rate": 9.999964780082996e-06, "loss": 0.6925, "step": 95 }, { "epoch": 0.10738255033557047, "grad_norm": 2.445223053748917, "learning_rate": 9.998732135085665e-06, "loss": 0.6894, "step": 100 }, { "epoch": 0.11275167785234899, "grad_norm": 2.101448463042792, "learning_rate": 9.995738990383743e-06, "loss": 0.6842, "step": 105 }, { "epoch": 0.11812080536912752, "grad_norm": 2.375756679579466, "learning_rate": 9.990986400130607e-06, "loss": 0.6881, "step": 110 }, { "epoch": 0.12348993288590604, "grad_norm": 2.0584289730843675, "learning_rate": 9.984476038137437e-06, "loss": 0.6789, "step": 115 }, { "epoch": 0.12885906040268458, "grad_norm": 2.1097466944401946, "learning_rate": 9.97621019728372e-06, "loss": 0.6794, "step": 120 }, { "epoch": 0.1342281879194631, "grad_norm": 2.1000573026325813, "learning_rate": 9.966191788709716e-06, "loss": 0.6622, "step": 125 }, { "epoch": 0.1395973154362416, "grad_norm": 2.21126026156604, "learning_rate": 9.954424340791195e-06, "loss": 0.6659, "step": 130 }, { "epoch": 0.14496644295302014, "grad_norm": 2.233783388990834, "learning_rate": 9.940911997896774e-06, "loss": 0.6708, "step": 135 }, { "epoch": 0.15033557046979865, "grad_norm": 2.3220703167361068, "learning_rate": 9.925659518928316e-06, "loss": 0.6546, "step": 140 }, { "epoch": 0.15570469798657718, "grad_norm": 2.1406068535723626, "learning_rate": 9.908672275644898e-06, "loss": 0.6606, "step": 145 }, { "epoch": 0.1610738255033557, "grad_norm": 1.9796338120208212, "learning_rate": 9.889956250770933e-06, "loss": 0.6543, "step": 150 }, { "epoch": 0.16644295302013423, "grad_norm": 2.038004062758435, "learning_rate": 9.86951803588912e-06, "loss": 0.639, "step": 155 }, { "epoch": 0.17181208053691274, "grad_norm": 1.9023679569925318, "learning_rate": 9.847364829118963e-06, "loss": 0.6394, "step": 160 }, { "epoch": 0.17718120805369128, "grad_norm": 1.908984775733616, "learning_rate": 9.82350443258166e-06, "loss": 0.6449, "step": 165 }, { "epoch": 0.1825503355704698, "grad_norm": 2.010057009992793, "learning_rate": 9.797945249652295e-06, "loss": 0.6417, "step": 170 }, { "epoch": 0.18791946308724833, "grad_norm": 2.2539350542085006, "learning_rate": 9.770696282000245e-06, "loss": 0.6333, "step": 175 }, { "epoch": 0.19328859060402684, "grad_norm": 1.9065312726735626, "learning_rate": 9.741767126418898e-06, "loss": 0.6441, "step": 180 }, { "epoch": 0.19865771812080538, "grad_norm": 2.0928470931750023, "learning_rate": 9.711167971445766e-06, "loss": 0.6213, "step": 185 }, { "epoch": 0.2040268456375839, "grad_norm": 2.006004562606479, "learning_rate": 9.67890959377418e-06, "loss": 0.6278, "step": 190 }, { "epoch": 0.20939597315436242, "grad_norm": 1.9301686303505856, "learning_rate": 9.645003354457872e-06, "loss": 0.6325, "step": 195 }, { "epoch": 0.21476510067114093, "grad_norm": 2.038658207178946, "learning_rate": 9.60946119490972e-06, "loss": 0.6361, "step": 200 }, { "epoch": 0.22013422818791947, "grad_norm": 2.2159213381686604, "learning_rate": 9.57229563269612e-06, "loss": 0.619, "step": 205 }, { "epoch": 0.22550335570469798, "grad_norm": 2.2179012384166685, "learning_rate": 9.533519757128426e-06, "loss": 0.6296, "step": 210 }, { "epoch": 0.23087248322147652, "grad_norm": 1.9118609526745118, "learning_rate": 9.49314722465304e-06, "loss": 0.6172, "step": 215 }, { "epoch": 0.23624161073825503, "grad_norm": 2.0780299928973753, "learning_rate": 9.451192254041759e-06, "loss": 0.6238, "step": 220 }, { "epoch": 0.24161073825503357, "grad_norm": 1.880745326989776, "learning_rate": 9.407669621384073e-06, "loss": 0.603, "step": 225 }, { "epoch": 0.24697986577181208, "grad_norm": 2.0325599687685325, "learning_rate": 9.362594654883185e-06, "loss": 0.6098, "step": 230 }, { "epoch": 0.2523489932885906, "grad_norm": 2.1114795284546006, "learning_rate": 9.31598322945759e-06, "loss": 0.5996, "step": 235 }, { "epoch": 0.25771812080536916, "grad_norm": 2.147915062806659, "learning_rate": 9.267851761150092e-06, "loss": 0.5922, "step": 240 }, { "epoch": 0.26308724832214764, "grad_norm": 1.8963761596238329, "learning_rate": 9.218217201346251e-06, "loss": 0.5977, "step": 245 }, { "epoch": 0.2684563758389262, "grad_norm": 2.0596529484621406, "learning_rate": 9.167097030804289e-06, "loss": 0.6019, "step": 250 }, { "epoch": 0.2738255033557047, "grad_norm": 1.9613750119136377, "learning_rate": 9.114509253498554e-06, "loss": 0.5823, "step": 255 }, { "epoch": 0.2791946308724832, "grad_norm": 1.9493095780956693, "learning_rate": 9.060472390278717e-06, "loss": 0.5895, "step": 260 }, { "epoch": 0.28456375838926173, "grad_norm": 1.9649255622722053, "learning_rate": 9.005005472346923e-06, "loss": 0.5861, "step": 265 }, { "epoch": 0.28993288590604027, "grad_norm": 1.916731628798239, "learning_rate": 8.948128034555212e-06, "loss": 0.5741, "step": 270 }, { "epoch": 0.2953020134228188, "grad_norm": 1.9882471136788202, "learning_rate": 8.889860108525544e-06, "loss": 0.5891, "step": 275 }, { "epoch": 0.3006711409395973, "grad_norm": 1.8657052588450402, "learning_rate": 8.83022221559489e-06, "loss": 0.5662, "step": 280 }, { "epoch": 0.30604026845637583, "grad_norm": 1.8876272358030999, "learning_rate": 8.76923535958783e-06, "loss": 0.5611, "step": 285 }, { "epoch": 0.31140939597315437, "grad_norm": 1.9352681945969277, "learning_rate": 8.706921019419237e-06, "loss": 0.5703, "step": 290 }, { "epoch": 0.3167785234899329, "grad_norm": 2.061655573531995, "learning_rate": 8.643301141529619e-06, "loss": 0.5704, "step": 295 }, { "epoch": 0.3221476510067114, "grad_norm": 1.9955648734094908, "learning_rate": 8.578398132155846e-06, "loss": 0.5646, "step": 300 }, { "epoch": 0.3275167785234899, "grad_norm": 1.9172018423696509, "learning_rate": 8.512234849439887e-06, "loss": 0.5603, "step": 305 }, { "epoch": 0.33288590604026846, "grad_norm": 2.748943489124992, "learning_rate": 8.444834595378434e-06, "loss": 0.5635, "step": 310 }, { "epoch": 0.338255033557047, "grad_norm": 2.031805396620389, "learning_rate": 8.376221107616187e-06, "loss": 0.56, "step": 315 }, { "epoch": 0.3436241610738255, "grad_norm": 2.310478182840465, "learning_rate": 8.306418551085707e-06, "loss": 0.5518, "step": 320 }, { "epoch": 0.348993288590604, "grad_norm": 2.143453110103673, "learning_rate": 8.23545150949679e-06, "loss": 0.545, "step": 325 }, { "epoch": 0.35436241610738256, "grad_norm": 2.074828415385107, "learning_rate": 8.163344976678342e-06, "loss": 0.5417, "step": 330 }, { "epoch": 0.3597315436241611, "grad_norm": 1.95080697885721, "learning_rate": 8.090124347775837e-06, "loss": 0.5461, "step": 335 }, { "epoch": 0.3651006711409396, "grad_norm": 1.8940424474498196, "learning_rate": 8.0158154103074e-06, "loss": 0.5415, "step": 340 }, { "epoch": 0.3704697986577181, "grad_norm": 1.9899368306488876, "learning_rate": 7.940444335081733e-06, "loss": 0.5334, "step": 345 }, { "epoch": 0.37583892617449666, "grad_norm": 3.3190577946485074, "learning_rate": 7.864037666981037e-06, "loss": 0.5274, "step": 350 }, { "epoch": 0.3812080536912752, "grad_norm": 1.9132824928499255, "learning_rate": 7.786622315612182e-06, "loss": 0.5395, "step": 355 }, { "epoch": 0.3865771812080537, "grad_norm": 1.9703232913555617, "learning_rate": 7.708225545829446e-06, "loss": 0.5213, "step": 360 }, { "epoch": 0.3919463087248322, "grad_norm": 2.076133929106472, "learning_rate": 7.6288749681321115e-06, "loss": 0.5356, "step": 365 }, { "epoch": 0.39731543624161075, "grad_norm": 2.0858297493245637, "learning_rate": 7.548598528940354e-06, "loss": 0.5286, "step": 370 }, { "epoch": 0.40268456375838924, "grad_norm": 1.9498254037168414, "learning_rate": 7.4674245007528135e-06, "loss": 0.5217, "step": 375 }, { "epoch": 0.4080536912751678, "grad_norm": 1.9718043172163682, "learning_rate": 7.385381472189321e-06, "loss": 0.5185, "step": 380 }, { "epoch": 0.4134228187919463, "grad_norm": 2.0714251046483136, "learning_rate": 7.302498337922293e-06, "loss": 0.5212, "step": 385 }, { "epoch": 0.41879194630872485, "grad_norm": 2.059741936941689, "learning_rate": 7.218804288500343e-06, "loss": 0.5068, "step": 390 }, { "epoch": 0.42416107382550333, "grad_norm": 2.0677503384082656, "learning_rate": 7.134328800067684e-06, "loss": 0.5128, "step": 395 }, { "epoch": 0.42953020134228187, "grad_norm": 2.1270598611660008, "learning_rate": 7.049101623982938e-06, "loss": 0.5056, "step": 400 }, { "epoch": 0.4348993288590604, "grad_norm": 1.7730274826372387, "learning_rate": 6.963152776341044e-06, "loss": 0.5007, "step": 405 }, { "epoch": 0.44026845637583895, "grad_norm": 2.092171850544149, "learning_rate": 6.876512527401897e-06, "loss": 0.4969, "step": 410 }, { "epoch": 0.44563758389261743, "grad_norm": 1.8511960894357482, "learning_rate": 6.789211390929497e-06, "loss": 0.5069, "step": 415 }, { "epoch": 0.45100671140939597, "grad_norm": 2.003279508110089, "learning_rate": 6.701280113445324e-06, "loss": 0.4868, "step": 420 }, { "epoch": 0.4563758389261745, "grad_norm": 1.9910852122088196, "learning_rate": 6.6127496633997475e-06, "loss": 0.4856, "step": 425 }, { "epoch": 0.46174496644295304, "grad_norm": 2.0142055270514425, "learning_rate": 6.523651220265269e-06, "loss": 0.4942, "step": 430 }, { "epoch": 0.4671140939597315, "grad_norm": 2.102485865135929, "learning_rate": 6.434016163555452e-06, "loss": 0.4848, "step": 435 }, { "epoch": 0.47248322147651006, "grad_norm": 2.1181467567869623, "learning_rate": 6.343876061773385e-06, "loss": 0.4886, "step": 440 }, { "epoch": 0.4778523489932886, "grad_norm": 2.294633696659754, "learning_rate": 6.2532626612936035e-06, "loss": 0.4862, "step": 445 }, { "epoch": 0.48322147651006714, "grad_norm": 1.9401139366264295, "learning_rate": 6.162207875181354e-06, "loss": 0.4834, "step": 450 }, { "epoch": 0.4885906040268456, "grad_norm": 1.8938897786171476, "learning_rate": 6.070743771953157e-06, "loss": 0.4769, "step": 455 }, { "epoch": 0.49395973154362416, "grad_norm": 2.035803586489251, "learning_rate": 5.978902564282616e-06, "loss": 0.4819, "step": 460 }, { "epoch": 0.4993288590604027, "grad_norm": 1.8943417586526035, "learning_rate": 5.886716597655472e-06, "loss": 0.4759, "step": 465 }, { "epoch": 0.5046979865771812, "grad_norm": 1.8388956694087875, "learning_rate": 5.7942183389778536e-06, "loss": 0.4683, "step": 470 }, { "epoch": 0.5100671140939598, "grad_norm": 1.9348760408299013, "learning_rate": 5.701440365141799e-06, "loss": 0.4644, "step": 475 }, { "epoch": 0.5154362416107383, "grad_norm": 2.0489951854475796, "learning_rate": 5.608415351552014e-06, "loss": 0.4662, "step": 480 }, { "epoch": 0.5208053691275167, "grad_norm": 1.9191064778476148, "learning_rate": 5.515176060617945e-06, "loss": 0.4629, "step": 485 }, { "epoch": 0.5261744966442953, "grad_norm": 2.1725377637259924, "learning_rate": 5.421755330215223e-06, "loss": 0.4562, "step": 490 }, { "epoch": 0.5315436241610738, "grad_norm": 1.9581424804999248, "learning_rate": 5.328186062120509e-06, "loss": 0.4635, "step": 495 }, { "epoch": 0.5369127516778524, "grad_norm": 1.8929231268679836, "learning_rate": 5.23450121042383e-06, "loss": 0.454, "step": 500 }, { "epoch": 0.5422818791946309, "grad_norm": 1.9802801188021257, "learning_rate": 5.140733769922525e-06, "loss": 0.4641, "step": 505 }, { "epoch": 0.5476510067114094, "grad_norm": 2.008695527313478, "learning_rate": 5.0469167645008245e-06, "loss": 0.4482, "step": 510 }, { "epoch": 0.553020134228188, "grad_norm": 2.010797136402869, "learning_rate": 4.953083235499177e-06, "loss": 0.4407, "step": 515 }, { "epoch": 0.5583892617449664, "grad_norm": 1.9094101117542013, "learning_rate": 4.859266230077474e-06, "loss": 0.4479, "step": 520 }, { "epoch": 0.5637583892617449, "grad_norm": 1.9310237131735821, "learning_rate": 4.7654987895761705e-06, "loss": 0.4421, "step": 525 }, { "epoch": 0.5691275167785235, "grad_norm": 1.9040107482286928, "learning_rate": 4.671813937879494e-06, "loss": 0.446, "step": 530 }, { "epoch": 0.574496644295302, "grad_norm": 2.0403382751342387, "learning_rate": 4.5782446697847775e-06, "loss": 0.4469, "step": 535 }, { "epoch": 0.5798657718120805, "grad_norm": 1.827855233692127, "learning_rate": 4.484823939382056e-06, "loss": 0.4349, "step": 540 }, { "epoch": 0.5852348993288591, "grad_norm": 1.8653574780339666, "learning_rate": 4.391584648447989e-06, "loss": 0.4288, "step": 545 }, { "epoch": 0.5906040268456376, "grad_norm": 1.986995981845508, "learning_rate": 4.298559634858202e-06, "loss": 0.4372, "step": 550 }, { "epoch": 0.5959731543624162, "grad_norm": 1.9833632848096547, "learning_rate": 4.205781661022146e-06, "loss": 0.4332, "step": 555 }, { "epoch": 0.6013422818791946, "grad_norm": 1.9751140496991637, "learning_rate": 4.1132834023445304e-06, "loss": 0.4478, "step": 560 }, { "epoch": 0.6067114093959731, "grad_norm": 2.0032559169326456, "learning_rate": 4.021097435717386e-06, "loss": 0.4221, "step": 565 }, { "epoch": 0.6120805369127517, "grad_norm": 1.9079295664632885, "learning_rate": 3.929256228046845e-06, "loss": 0.4157, "step": 570 }, { "epoch": 0.6174496644295302, "grad_norm": 1.8781628903110001, "learning_rate": 3.837792124818647e-06, "loss": 0.4277, "step": 575 }, { "epoch": 0.6228187919463087, "grad_norm": 1.7915826382801951, "learning_rate": 3.7467373387063973e-06, "loss": 0.4241, "step": 580 }, { "epoch": 0.6281879194630873, "grad_norm": 1.8613980836008768, "learning_rate": 3.656123938226618e-06, "loss": 0.4153, "step": 585 }, { "epoch": 0.6335570469798658, "grad_norm": 1.8036271371193608, "learning_rate": 3.5659838364445505e-06, "loss": 0.4087, "step": 590 }, { "epoch": 0.6389261744966444, "grad_norm": 1.876894262488646, "learning_rate": 3.476348779734732e-06, "loss": 0.4171, "step": 595 }, { "epoch": 0.6442953020134228, "grad_norm": 2.005365914493537, "learning_rate": 3.387250336600254e-06, "loss": 0.4117, "step": 600 }, { "epoch": 0.6496644295302013, "grad_norm": 1.847827057920518, "learning_rate": 3.298719886554677e-06, "loss": 0.4129, "step": 605 }, { "epoch": 0.6550335570469799, "grad_norm": 1.8912137540309444, "learning_rate": 3.2107886090705035e-06, "loss": 0.4006, "step": 610 }, { "epoch": 0.6604026845637584, "grad_norm": 1.8594474550600166, "learning_rate": 3.1234874725981045e-06, "loss": 0.4043, "step": 615 }, { "epoch": 0.6657718120805369, "grad_norm": 1.8497041114366601, "learning_rate": 3.036847223658958e-06, "loss": 0.4033, "step": 620 }, { "epoch": 0.6711409395973155, "grad_norm": 1.960254189224025, "learning_rate": 2.950898376017064e-06, "loss": 0.4036, "step": 625 }, { "epoch": 0.676510067114094, "grad_norm": 2.10629670479599, "learning_rate": 2.865671199932318e-06, "loss": 0.4044, "step": 630 }, { "epoch": 0.6818791946308724, "grad_norm": 1.8824907032670186, "learning_rate": 2.781195711499658e-06, "loss": 0.4095, "step": 635 }, { "epoch": 0.687248322147651, "grad_norm": 1.9655088963405234, "learning_rate": 2.697501662077707e-06, "loss": 0.4021, "step": 640 }, { "epoch": 0.6926174496644295, "grad_norm": 1.889034517852432, "learning_rate": 2.6146185278106807e-06, "loss": 0.3825, "step": 645 }, { "epoch": 0.697986577181208, "grad_norm": 1.781359751443117, "learning_rate": 2.5325754992471886e-06, "loss": 0.403, "step": 650 }, { "epoch": 0.7033557046979866, "grad_norm": 2.124217994937085, "learning_rate": 2.4514014710596467e-06, "loss": 0.3938, "step": 655 }, { "epoch": 0.7087248322147651, "grad_norm": 2.0049604807831636, "learning_rate": 2.3711250318678906e-06, "loss": 0.3943, "step": 660 }, { "epoch": 0.7140939597315437, "grad_norm": 1.8963570644553214, "learning_rate": 2.2917744541705544e-06, "loss": 0.3886, "step": 665 }, { "epoch": 0.7194630872483222, "grad_norm": 1.9231403525555266, "learning_rate": 2.2133776843878185e-06, "loss": 0.3918, "step": 670 }, { "epoch": 0.7248322147651006, "grad_norm": 1.9140311429947394, "learning_rate": 2.1359623330189655e-06, "loss": 0.3924, "step": 675 }, { "epoch": 0.7302013422818792, "grad_norm": 1.790471011384921, "learning_rate": 2.059555664918268e-06, "loss": 0.3861, "step": 680 }, { "epoch": 0.7355704697986577, "grad_norm": 1.8245278498502604, "learning_rate": 1.9841845896926022e-06, "loss": 0.3837, "step": 685 }, { "epoch": 0.7409395973154362, "grad_norm": 1.835597641802119, "learning_rate": 1.9098756522241634e-06, "loss": 0.3833, "step": 690 }, { "epoch": 0.7463087248322148, "grad_norm": 1.7733798731081618, "learning_rate": 1.8366550233216584e-06, "loss": 0.3859, "step": 695 }, { "epoch": 0.7516778523489933, "grad_norm": 1.8909772302753176, "learning_rate": 1.7645484905032129e-06, "loss": 0.3725, "step": 700 }, { "epoch": 0.7570469798657719, "grad_norm": 1.8395377457755753, "learning_rate": 1.6935814489142937e-06, "loss": 0.375, "step": 705 }, { "epoch": 0.7624161073825504, "grad_norm": 2.004189882277981, "learning_rate": 1.6237788923838149e-06, "loss": 0.3745, "step": 710 }, { "epoch": 0.7677852348993288, "grad_norm": 1.8493982881721263, "learning_rate": 1.555165404621567e-06, "loss": 0.3851, "step": 715 }, { "epoch": 0.7731543624161074, "grad_norm": 1.8731392963530693, "learning_rate": 1.487765150560116e-06, "loss": 0.3719, "step": 720 }, { "epoch": 0.7785234899328859, "grad_norm": 1.800997454509494, "learning_rate": 1.4216018678441558e-06, "loss": 0.3735, "step": 725 }, { "epoch": 0.7838926174496644, "grad_norm": 1.858224940166772, "learning_rate": 1.3566988584703817e-06, "loss": 0.3674, "step": 730 }, { "epoch": 0.789261744966443, "grad_norm": 1.8306556119241966, "learning_rate": 1.293078980580766e-06, "loss": 0.3731, "step": 735 }, { "epoch": 0.7946308724832215, "grad_norm": 1.7780036225720621, "learning_rate": 1.2307646404121692e-06, "loss": 0.362, "step": 740 }, { "epoch": 0.8, "grad_norm": 1.9057965494467726, "learning_rate": 1.1697777844051105e-06, "loss": 0.3711, "step": 745 }, { "epoch": 0.8053691275167785, "grad_norm": 1.7752487152814125, "learning_rate": 1.1101398914744565e-06, "loss": 0.3644, "step": 750 }, { "epoch": 0.810738255033557, "grad_norm": 1.8233485934046587, "learning_rate": 1.0518719654447896e-06, "loss": 0.3635, "step": 755 }, { "epoch": 0.8161073825503355, "grad_norm": 1.7956083349387602, "learning_rate": 9.949945276530782e-07, "loss": 0.361, "step": 760 }, { "epoch": 0.8214765100671141, "grad_norm": 1.7050898287128047, "learning_rate": 9.395276097212841e-07, "loss": 0.3632, "step": 765 }, { "epoch": 0.8268456375838926, "grad_norm": 1.8682808768340036, "learning_rate": 8.854907465014479e-07, "loss": 0.3679, "step": 770 }, { "epoch": 0.8322147651006712, "grad_norm": 1.7567008107111888, "learning_rate": 8.329029691957124e-07, "loss": 0.3663, "step": 775 }, { "epoch": 0.8375838926174497, "grad_norm": 1.8090009300620273, "learning_rate": 7.817827986537508e-07, "loss": 0.3552, "step": 780 }, { "epoch": 0.8429530201342282, "grad_norm": 1.8320510025717955, "learning_rate": 7.321482388499096e-07, "loss": 0.3595, "step": 785 }, { "epoch": 0.8483221476510067, "grad_norm": 1.80583578265842, "learning_rate": 6.840167705424106e-07, "loss": 0.3652, "step": 790 }, { "epoch": 0.8536912751677852, "grad_norm": 1.8327102347329405, "learning_rate": 6.374053451168166e-07, "loss": 0.3561, "step": 795 }, { "epoch": 0.8590604026845637, "grad_norm": 1.8311020050096911, "learning_rate": 5.92330378615929e-07, "loss": 0.3585, "step": 800 }, { "epoch": 0.8644295302013423, "grad_norm": 1.8055339358917153, "learning_rate": 5.488077459582425e-07, "loss": 0.3593, "step": 805 }, { "epoch": 0.8697986577181208, "grad_norm": 1.8142078683309888, "learning_rate": 5.068527753469604e-07, "loss": 0.3556, "step": 810 }, { "epoch": 0.8751677852348994, "grad_norm": 1.8006022957772634, "learning_rate": 4.664802428715753e-07, "loss": 0.3501, "step": 815 }, { "epoch": 0.8805369127516779, "grad_norm": 1.8115511806620141, "learning_rate": 4.2770436730388166e-07, "loss": 0.3555, "step": 820 }, { "epoch": 0.8859060402684564, "grad_norm": 1.8145406688101844, "learning_rate": 3.9053880509028086e-07, "loss": 0.345, "step": 825 }, { "epoch": 0.8912751677852349, "grad_norm": 1.7552883220570528, "learning_rate": 3.549966455421305e-07, "loss": 0.3484, "step": 830 }, { "epoch": 0.8966442953020134, "grad_norm": 1.7075911431534658, "learning_rate": 3.2109040622582186e-07, "loss": 0.3588, "step": 835 }, { "epoch": 0.9020134228187919, "grad_norm": 1.6934879805875171, "learning_rate": 2.8883202855423676e-07, "loss": 0.3583, "step": 840 }, { "epoch": 0.9073825503355705, "grad_norm": 1.77775983103448, "learning_rate": 2.582328735811029e-07, "loss": 0.351, "step": 845 }, { "epoch": 0.912751677852349, "grad_norm": 1.8618113993543914, "learning_rate": 2.2930371799975593e-07, "loss": 0.3491, "step": 850 }, { "epoch": 0.9181208053691275, "grad_norm": 1.858661836026022, "learning_rate": 2.0205475034770606e-07, "loss": 0.3458, "step": 855 }, { "epoch": 0.9234899328859061, "grad_norm": 1.7009249535049955, "learning_rate": 1.7649556741833995e-07, "loss": 0.3456, "step": 860 }, { "epoch": 0.9288590604026845, "grad_norm": 1.792030192988469, "learning_rate": 1.5263517088103862e-07, "loss": 0.3575, "step": 865 }, { "epoch": 0.934228187919463, "grad_norm": 1.9256857866336536, "learning_rate": 1.304819641108801e-07, "loss": 0.3571, "step": 870 }, { "epoch": 0.9395973154362416, "grad_norm": 1.84021155443979, "learning_rate": 1.1004374922906846e-07, "loss": 0.3478, "step": 875 }, { "epoch": 0.9449664429530201, "grad_norm": 1.706774577248585, "learning_rate": 9.132772435510362e-08, "loss": 0.3459, "step": 880 }, { "epoch": 0.9503355704697987, "grad_norm": 1.737896821789404, "learning_rate": 7.434048107168523e-08, "loss": 0.3487, "step": 885 }, { "epoch": 0.9557046979865772, "grad_norm": 1.754018893680602, "learning_rate": 5.908800210322696e-08, "loss": 0.3533, "step": 890 }, { "epoch": 0.9610738255033557, "grad_norm": 1.6854396215038576, "learning_rate": 4.55756592088058e-08, "loss": 0.3493, "step": 895 }, { "epoch": 0.9664429530201343, "grad_norm": 1.7570339706613218, "learning_rate": 3.3808211290284886e-08, "loss": 0.3389, "step": 900 }, { "epoch": 0.9718120805369127, "grad_norm": 1.83552844908017, "learning_rate": 2.378980271628195e-08, "loss": 0.3455, "step": 905 }, { "epoch": 0.9771812080536912, "grad_norm": 1.6940562777678623, "learning_rate": 1.552396186256411e-08, "loss": 0.3478, "step": 910 }, { "epoch": 0.9825503355704698, "grad_norm": 1.7576783825666717, "learning_rate": 9.013599869394096e-09, "loss": 0.3393, "step": 915 }, { "epoch": 0.9879194630872483, "grad_norm": 1.7480069996022527, "learning_rate": 4.261009616257638e-09, "loss": 0.3499, "step": 920 }, { "epoch": 0.9932885906040269, "grad_norm": 1.6906969685299382, "learning_rate": 1.2678649143349485e-09, "loss": 0.3528, "step": 925 }, { "epoch": 0.9986577181208054, "grad_norm": 1.77167724060171, "learning_rate": 3.5219917003948003e-11, "loss": 0.3509, "step": 930 }, { "epoch": 0.9997315436241611, "eval_loss": 0.3463418781757355, "eval_runtime": 1.1928, "eval_samples_per_second": 1.677, "eval_steps_per_second": 0.838, "step": 931 }, { "epoch": 0.9997315436241611, "step": 931, "total_flos": 194880114524160.0, "train_loss": 0.5085661182086014, "train_runtime": 21123.8288, "train_samples_per_second": 1.41, "train_steps_per_second": 0.044 } ], "logging_steps": 5, "max_steps": 931, "num_input_tokens_seen": 0, "num_train_epochs": 1, "save_steps": 100, "stateful_callbacks": { "TrainerControl": { "args": { "should_epoch_stop": false, "should_evaluate": false, "should_log": false, "should_save": true, "should_training_stop": true }, "attributes": {} } }, "total_flos": 194880114524160.0, "train_batch_size": 2, "trial_name": null, "trial_params": null }