| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 100, | |
| "global_step": 976, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0010245901639344263, | |
| "grad_norm": 15.452402536806751, | |
| "learning_rate": 4.999987048807813e-06, | |
| "loss": 0.572, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0020491803278688526, | |
| "grad_norm": 7.305550200033585, | |
| "learning_rate": 4.999948195365436e-06, | |
| "loss": 0.5132, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0030737704918032786, | |
| "grad_norm": 3.1876193511198307, | |
| "learning_rate": 4.999883440075429e-06, | |
| "loss": 0.3837, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.004098360655737705, | |
| "grad_norm": 2.5201160768865507, | |
| "learning_rate": 4.99979278360872e-06, | |
| "loss": 0.3567, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.005122950819672131, | |
| "grad_norm": 3.417645371813442, | |
| "learning_rate": 4.999676226904594e-06, | |
| "loss": 0.3265, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006147540983606557, | |
| "grad_norm": 3.125678061228638, | |
| "learning_rate": 4.99953377117069e-06, | |
| "loss": 0.3237, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.007172131147540984, | |
| "grad_norm": 2.5821298124203076, | |
| "learning_rate": 4.999365417882986e-06, | |
| "loss": 0.2999, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.00819672131147541, | |
| "grad_norm": 1.8082505051013165, | |
| "learning_rate": 4.999171168785783e-06, | |
| "loss": 0.2807, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.009221311475409836, | |
| "grad_norm": 1.9177759039519724, | |
| "learning_rate": 4.9989510258916864e-06, | |
| "loss": 0.2704, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.010245901639344262, | |
| "grad_norm": 1.8001052736974854, | |
| "learning_rate": 4.998704991481587e-06, | |
| "loss": 0.2518, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011270491803278689, | |
| "grad_norm": 1.4570000596353188, | |
| "learning_rate": 4.998433068104634e-06, | |
| "loss": 0.26, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.012295081967213115, | |
| "grad_norm": 1.414693092753735, | |
| "learning_rate": 4.9981352585782154e-06, | |
| "loss": 0.2662, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.01331967213114754, | |
| "grad_norm": 1.4736139838753664, | |
| "learning_rate": 4.997811565987921e-06, | |
| "loss": 0.2594, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.014344262295081968, | |
| "grad_norm": 1.3561908873975805, | |
| "learning_rate": 4.997461993687514e-06, | |
| "loss": 0.2694, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.015368852459016393, | |
| "grad_norm": 1.3179132329910017, | |
| "learning_rate": 4.997086545298899e-06, | |
| "loss": 0.2505, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01639344262295082, | |
| "grad_norm": 1.429289822640037, | |
| "learning_rate": 4.996685224712077e-06, | |
| "loss": 0.2544, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.017418032786885244, | |
| "grad_norm": 1.5772811824871815, | |
| "learning_rate": 4.996258036085113e-06, | |
| "loss": 0.2478, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.018442622950819672, | |
| "grad_norm": 1.50117344559958, | |
| "learning_rate": 4.995804983844088e-06, | |
| "loss": 0.233, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.0194672131147541, | |
| "grad_norm": 1.1938213540249831, | |
| "learning_rate": 4.995326072683057e-06, | |
| "loss": 0.2353, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.020491803278688523, | |
| "grad_norm": 1.2002393039838568, | |
| "learning_rate": 4.994821307563995e-06, | |
| "loss": 0.2246, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02151639344262295, | |
| "grad_norm": 1.6411451173591138, | |
| "learning_rate": 4.99429069371675e-06, | |
| "loss": 0.2316, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.022540983606557378, | |
| "grad_norm": 1.3522742877642988, | |
| "learning_rate": 4.9937342366389875e-06, | |
| "loss": 0.2449, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0235655737704918, | |
| "grad_norm": 1.0773538576470656, | |
| "learning_rate": 4.993151942096134e-06, | |
| "loss": 0.2075, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.02459016393442623, | |
| "grad_norm": 1.0992817872294744, | |
| "learning_rate": 4.992543816121317e-06, | |
| "loss": 0.2237, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.025614754098360656, | |
| "grad_norm": 1.3824716757057172, | |
| "learning_rate": 4.991909865015301e-06, | |
| "loss": 0.2208, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.02663934426229508, | |
| "grad_norm": 1.2484473731205439, | |
| "learning_rate": 4.991250095346423e-06, | |
| "loss": 0.2112, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.027663934426229508, | |
| "grad_norm": 1.1450821381867806, | |
| "learning_rate": 4.990564513950527e-06, | |
| "loss": 0.2286, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.028688524590163935, | |
| "grad_norm": 1.1716786254384786, | |
| "learning_rate": 4.98985312793089e-06, | |
| "loss": 0.2198, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.02971311475409836, | |
| "grad_norm": 1.3431518970277194, | |
| "learning_rate": 4.989115944658151e-06, | |
| "loss": 0.2158, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.030737704918032786, | |
| "grad_norm": 1.2238656237307888, | |
| "learning_rate": 4.988352971770229e-06, | |
| "loss": 0.2232, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.031762295081967214, | |
| "grad_norm": 1.0783463258426353, | |
| "learning_rate": 4.987564217172254e-06, | |
| "loss": 0.2216, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.03278688524590164, | |
| "grad_norm": 1.0799097690608335, | |
| "learning_rate": 4.9867496890364734e-06, | |
| "loss": 0.2373, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.03381147540983607, | |
| "grad_norm": 1.19743516455158, | |
| "learning_rate": 4.985909395802176e-06, | |
| "loss": 0.2294, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.03483606557377049, | |
| "grad_norm": 1.1410028916577852, | |
| "learning_rate": 4.985043346175602e-06, | |
| "loss": 0.2126, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.035860655737704916, | |
| "grad_norm": 1.1362506360036861, | |
| "learning_rate": 4.984151549129851e-06, | |
| "loss": 0.2222, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.036885245901639344, | |
| "grad_norm": 1.0459790774942477, | |
| "learning_rate": 4.983234013904791e-06, | |
| "loss": 0.2182, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.03790983606557377, | |
| "grad_norm": 1.1106982889401027, | |
| "learning_rate": 4.982290750006962e-06, | |
| "loss": 0.2301, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.0389344262295082, | |
| "grad_norm": 1.1627562476561535, | |
| "learning_rate": 4.981321767209477e-06, | |
| "loss": 0.2155, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.039959016393442626, | |
| "grad_norm": 1.1225146567693527, | |
| "learning_rate": 4.980327075551923e-06, | |
| "loss": 0.2187, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.040983606557377046, | |
| "grad_norm": 1.174221204360651, | |
| "learning_rate": 4.9793066853402535e-06, | |
| "loss": 0.2245, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.042008196721311473, | |
| "grad_norm": 1.1240891815014928, | |
| "learning_rate": 4.978260607146685e-06, | |
| "loss": 0.2084, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0430327868852459, | |
| "grad_norm": 1.1951700379279537, | |
| "learning_rate": 4.9771888518095855e-06, | |
| "loss": 0.2035, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.04405737704918033, | |
| "grad_norm": 1.0931737313409504, | |
| "learning_rate": 4.976091430433362e-06, | |
| "loss": 0.2139, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.045081967213114756, | |
| "grad_norm": 1.2453519244340612, | |
| "learning_rate": 4.974968354388346e-06, | |
| "loss": 0.2201, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04610655737704918, | |
| "grad_norm": 0.8884787353630516, | |
| "learning_rate": 4.973819635310677e-06, | |
| "loss": 0.1884, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.0471311475409836, | |
| "grad_norm": 1.169272297511335, | |
| "learning_rate": 4.9726452851021804e-06, | |
| "loss": 0.1989, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.04815573770491803, | |
| "grad_norm": 1.0792733015654319, | |
| "learning_rate": 4.971445315930244e-06, | |
| "loss": 0.2002, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.04918032786885246, | |
| "grad_norm": 1.2022839401915029, | |
| "learning_rate": 4.970219740227693e-06, | |
| "loss": 0.2129, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.050204918032786885, | |
| "grad_norm": 1.1887069906568342, | |
| "learning_rate": 4.9689685706926615e-06, | |
| "loss": 0.2217, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05122950819672131, | |
| "grad_norm": 1.1452750318331866, | |
| "learning_rate": 4.967691820288457e-06, | |
| "loss": 0.2217, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05225409836065574, | |
| "grad_norm": 1.0326944246472174, | |
| "learning_rate": 4.966389502243434e-06, | |
| "loss": 0.2119, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05327868852459016, | |
| "grad_norm": 0.9962075871597776, | |
| "learning_rate": 4.965061630050848e-06, | |
| "loss": 0.1922, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.05430327868852459, | |
| "grad_norm": 1.3386946871994219, | |
| "learning_rate": 4.963708217468721e-06, | |
| "loss": 0.2091, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.055327868852459015, | |
| "grad_norm": 1.2441294595987402, | |
| "learning_rate": 4.9623292785197e-06, | |
| "loss": 0.206, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.05635245901639344, | |
| "grad_norm": 1.1541743884324098, | |
| "learning_rate": 4.960924827490906e-06, | |
| "loss": 0.1994, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05737704918032787, | |
| "grad_norm": 1.1973647959483156, | |
| "learning_rate": 4.959494878933792e-06, | |
| "loss": 0.2262, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0584016393442623, | |
| "grad_norm": 1.2288504734436183, | |
| "learning_rate": 4.958039447663987e-06, | |
| "loss": 0.2184, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.05942622950819672, | |
| "grad_norm": 1.160483366357783, | |
| "learning_rate": 4.95655854876115e-06, | |
| "loss": 0.1899, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.060450819672131145, | |
| "grad_norm": 1.0683648175825389, | |
| "learning_rate": 4.955052197568805e-06, | |
| "loss": 0.2051, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.06147540983606557, | |
| "grad_norm": 1.195806402238826, | |
| "learning_rate": 4.953520409694186e-06, | |
| "loss": 0.2054, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0625, | |
| "grad_norm": 1.0156806556559643, | |
| "learning_rate": 4.9519632010080765e-06, | |
| "loss": 0.1936, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06352459016393443, | |
| "grad_norm": 1.3441776346206482, | |
| "learning_rate": 4.950380587644645e-06, | |
| "loss": 0.2196, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06454918032786885, | |
| "grad_norm": 1.084686351831147, | |
| "learning_rate": 4.948772586001273e-06, | |
| "loss": 0.2098, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.06557377049180328, | |
| "grad_norm": 1.1032413296502586, | |
| "learning_rate": 4.947139212738395e-06, | |
| "loss": 0.1931, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.06659836065573771, | |
| "grad_norm": 1.0035885981893748, | |
| "learning_rate": 4.945480484779313e-06, | |
| "loss": 0.2017, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06762295081967214, | |
| "grad_norm": 1.2095163423577484, | |
| "learning_rate": 4.94379641931003e-06, | |
| "loss": 0.2193, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.06864754098360656, | |
| "grad_norm": 1.0950527630785583, | |
| "learning_rate": 4.9420870337790725e-06, | |
| "loss": 0.1846, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.06967213114754098, | |
| "grad_norm": 1.0541546000124233, | |
| "learning_rate": 4.940352345897304e-06, | |
| "loss": 0.2078, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.0706967213114754, | |
| "grad_norm": 1.122765591728912, | |
| "learning_rate": 4.938592373637745e-06, | |
| "loss": 0.2071, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07172131147540983, | |
| "grad_norm": 1.0811076273958207, | |
| "learning_rate": 4.936807135235389e-06, | |
| "loss": 0.2238, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.07274590163934426, | |
| "grad_norm": 1.0061844726090332, | |
| "learning_rate": 4.934996649187005e-06, | |
| "loss": 0.1941, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07377049180327869, | |
| "grad_norm": 1.1001201509097391, | |
| "learning_rate": 4.933160934250957e-06, | |
| "loss": 0.2092, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.07479508196721311, | |
| "grad_norm": 1.2006616102754104, | |
| "learning_rate": 4.931300009447002e-06, | |
| "loss": 0.2198, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.07581967213114754, | |
| "grad_norm": 1.1151274288680306, | |
| "learning_rate": 4.929413894056098e-06, | |
| "loss": 0.2255, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.07684426229508197, | |
| "grad_norm": 1.1868703237777518, | |
| "learning_rate": 4.927502607620196e-06, | |
| "loss": 0.2201, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.0778688524590164, | |
| "grad_norm": 0.982395247911581, | |
| "learning_rate": 4.925566169942048e-06, | |
| "loss": 0.1895, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.07889344262295082, | |
| "grad_norm": 0.9947800557739062, | |
| "learning_rate": 4.923604601084996e-06, | |
| "loss": 0.1933, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.07991803278688525, | |
| "grad_norm": 1.0995989933919512, | |
| "learning_rate": 4.921617921372764e-06, | |
| "loss": 0.1966, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08094262295081968, | |
| "grad_norm": 1.3211277515512607, | |
| "learning_rate": 4.919606151389247e-06, | |
| "loss": 0.2182, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08196721311475409, | |
| "grad_norm": 1.0157338708228065, | |
| "learning_rate": 4.917569311978301e-06, | |
| "loss": 0.2025, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.08299180327868852, | |
| "grad_norm": 1.0742916342578412, | |
| "learning_rate": 4.915507424243527e-06, | |
| "loss": 0.1961, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.08401639344262295, | |
| "grad_norm": 1.1936601035040675, | |
| "learning_rate": 4.913420509548047e-06, | |
| "loss": 0.2159, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.08504098360655737, | |
| "grad_norm": 1.0036147929803145, | |
| "learning_rate": 4.9113085895142875e-06, | |
| "loss": 0.1787, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.0860655737704918, | |
| "grad_norm": 1.0809384675136005, | |
| "learning_rate": 4.9091716860237545e-06, | |
| "loss": 0.2072, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.08709016393442623, | |
| "grad_norm": 1.0728489352248434, | |
| "learning_rate": 4.907009821216806e-06, | |
| "loss": 0.2137, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08811475409836066, | |
| "grad_norm": 1.0273705737399745, | |
| "learning_rate": 4.904823017492425e-06, | |
| "loss": 0.1974, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.08913934426229508, | |
| "grad_norm": 1.1028018958723416, | |
| "learning_rate": 4.902611297507982e-06, | |
| "loss": 0.212, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.09016393442622951, | |
| "grad_norm": 1.1368034208018793, | |
| "learning_rate": 4.900374684179005e-06, | |
| "loss": 0.2046, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09118852459016394, | |
| "grad_norm": 1.1233189102553156, | |
| "learning_rate": 4.898113200678942e-06, | |
| "loss": 0.1953, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09221311475409837, | |
| "grad_norm": 1.1272314904164307, | |
| "learning_rate": 4.89582687043892e-06, | |
| "loss": 0.199, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0932377049180328, | |
| "grad_norm": 1.0406991529998404, | |
| "learning_rate": 4.893515717147499e-06, | |
| "loss": 0.2051, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.0942622950819672, | |
| "grad_norm": 1.0471968189914205, | |
| "learning_rate": 4.891179764750434e-06, | |
| "loss": 0.1934, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.09528688524590163, | |
| "grad_norm": 1.171646384414015, | |
| "learning_rate": 4.888819037450416e-06, | |
| "loss": 0.2056, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.09631147540983606, | |
| "grad_norm": 1.0420294729475101, | |
| "learning_rate": 4.8864335597068335e-06, | |
| "loss": 0.1892, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.09733606557377049, | |
| "grad_norm": 1.1204241455306612, | |
| "learning_rate": 4.884023356235512e-06, | |
| "loss": 0.21, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09836065573770492, | |
| "grad_norm": 1.080751188012676, | |
| "learning_rate": 4.881588452008457e-06, | |
| "loss": 0.1944, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.09938524590163934, | |
| "grad_norm": 1.0957650608641825, | |
| "learning_rate": 4.879128872253598e-06, | |
| "loss": 0.2138, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.10040983606557377, | |
| "grad_norm": 1.0118154939421664, | |
| "learning_rate": 4.876644642454529e-06, | |
| "loss": 0.1912, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.1014344262295082, | |
| "grad_norm": 1.0444211645539274, | |
| "learning_rate": 4.874135788350238e-06, | |
| "loss": 0.2092, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.10245901639344263, | |
| "grad_norm": 0.9398873407702204, | |
| "learning_rate": 4.871602335934847e-06, | |
| "loss": 0.1754, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.10245901639344263, | |
| "eval_loss": 0.1825956255197525, | |
| "eval_runtime": 2.4347, | |
| "eval_samples_per_second": 3.286, | |
| "eval_steps_per_second": 0.821, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.10348360655737705, | |
| "grad_norm": 1.2140506291563484, | |
| "learning_rate": 4.869044311457341e-06, | |
| "loss": 0.2113, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.10450819672131148, | |
| "grad_norm": 1.1174741265841863, | |
| "learning_rate": 4.86646174142129e-06, | |
| "loss": 0.2153, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.10553278688524591, | |
| "grad_norm": 1.1415320048533881, | |
| "learning_rate": 4.863854652584585e-06, | |
| "loss": 0.1943, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.10655737704918032, | |
| "grad_norm": 0.962811845814654, | |
| "learning_rate": 4.8612230719591535e-06, | |
| "loss": 0.1822, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.10758196721311475, | |
| "grad_norm": 1.0188242433623849, | |
| "learning_rate": 4.858567026810679e-06, | |
| "loss": 0.1889, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10860655737704918, | |
| "grad_norm": 1.0448408213708065, | |
| "learning_rate": 4.855886544658322e-06, | |
| "loss": 0.2061, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.1096311475409836, | |
| "grad_norm": 1.1086516146994518, | |
| "learning_rate": 4.853181653274437e-06, | |
| "loss": 0.1964, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.11065573770491803, | |
| "grad_norm": 0.9915932093693616, | |
| "learning_rate": 4.850452380684275e-06, | |
| "loss": 0.205, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11168032786885246, | |
| "grad_norm": 0.9668199616021121, | |
| "learning_rate": 4.847698755165705e-06, | |
| "loss": 0.2029, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.11270491803278689, | |
| "grad_norm": 1.0036065413677484, | |
| "learning_rate": 4.844920805248914e-06, | |
| "loss": 0.1992, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.11372950819672131, | |
| "grad_norm": 1.0655873949566872, | |
| "learning_rate": 4.842118559716111e-06, | |
| "loss": 0.1801, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.11475409836065574, | |
| "grad_norm": 1.2744949735721105, | |
| "learning_rate": 4.839292047601234e-06, | |
| "loss": 0.2023, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.11577868852459017, | |
| "grad_norm": 1.0219453389387125, | |
| "learning_rate": 4.836441298189644e-06, | |
| "loss": 0.1979, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1168032786885246, | |
| "grad_norm": 0.9685822828312404, | |
| "learning_rate": 4.833566341017823e-06, | |
| "loss": 0.1902, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.11782786885245902, | |
| "grad_norm": 0.8996528787540452, | |
| "learning_rate": 4.83066720587307e-06, | |
| "loss": 0.178, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11885245901639344, | |
| "grad_norm": 1.0365707661954302, | |
| "learning_rate": 4.827743922793189e-06, | |
| "loss": 0.1988, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.11987704918032786, | |
| "grad_norm": 1.0743525706572028, | |
| "learning_rate": 4.824796522066182e-06, | |
| "loss": 0.2143, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12090163934426229, | |
| "grad_norm": 1.1150280419702638, | |
| "learning_rate": 4.8218250342299314e-06, | |
| "loss": 0.2223, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.12192622950819672, | |
| "grad_norm": 1.136629095946617, | |
| "learning_rate": 4.8188294900718855e-06, | |
| "loss": 0.2087, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.12295081967213115, | |
| "grad_norm": 1.0842106367486168, | |
| "learning_rate": 4.815809920628738e-06, | |
| "loss": 0.2009, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.12397540983606557, | |
| "grad_norm": 1.1872653525342982, | |
| "learning_rate": 4.812766357186108e-06, | |
| "loss": 0.2196, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.125, | |
| "grad_norm": 1.050343998525333, | |
| "learning_rate": 4.809698831278217e-06, | |
| "loss": 0.2131, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.1260245901639344, | |
| "grad_norm": 1.1215757529117214, | |
| "learning_rate": 4.806607374687558e-06, | |
| "loss": 0.1963, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.12704918032786885, | |
| "grad_norm": 0.9694160984295132, | |
| "learning_rate": 4.803492019444571e-06, | |
| "loss": 0.1805, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.12807377049180327, | |
| "grad_norm": 1.068077465119998, | |
| "learning_rate": 4.800352797827305e-06, | |
| "loss": 0.1863, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1290983606557377, | |
| "grad_norm": 1.2737379628540055, | |
| "learning_rate": 4.7971897423610925e-06, | |
| "loss": 0.196, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.13012295081967212, | |
| "grad_norm": 1.080792943197194, | |
| "learning_rate": 4.794002885818204e-06, | |
| "loss": 0.1861, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.13114754098360656, | |
| "grad_norm": 3.0321323156743527, | |
| "learning_rate": 4.790792261217513e-06, | |
| "loss": 0.204, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.13217213114754098, | |
| "grad_norm": 1.0252271852601382, | |
| "learning_rate": 4.787557901824151e-06, | |
| "loss": 0.1832, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.13319672131147542, | |
| "grad_norm": 1.0357508100754704, | |
| "learning_rate": 4.784299841149168e-06, | |
| "loss": 0.2089, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.13422131147540983, | |
| "grad_norm": 0.9886092711517581, | |
| "learning_rate": 4.781018112949179e-06, | |
| "loss": 0.197, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.13524590163934427, | |
| "grad_norm": 1.0804842761852165, | |
| "learning_rate": 4.777712751226019e-06, | |
| "loss": 0.1863, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.1362704918032787, | |
| "grad_norm": 1.0975335533210058, | |
| "learning_rate": 4.774383790226387e-06, | |
| "loss": 0.1908, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.13729508196721313, | |
| "grad_norm": 0.989866346019197, | |
| "learning_rate": 4.771031264441494e-06, | |
| "loss": 0.189, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.13831967213114754, | |
| "grad_norm": 1.0585783263932196, | |
| "learning_rate": 4.767655208606705e-06, | |
| "loss": 0.2061, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13934426229508196, | |
| "grad_norm": 1.304236303348888, | |
| "learning_rate": 4.764255657701179e-06, | |
| "loss": 0.2231, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.1403688524590164, | |
| "grad_norm": 1.293728370223881, | |
| "learning_rate": 4.760832646947504e-06, | |
| "loss": 0.2106, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.1413934426229508, | |
| "grad_norm": 1.0060122261276196, | |
| "learning_rate": 4.757386211811338e-06, | |
| "loss": 0.1903, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.14241803278688525, | |
| "grad_norm": 1.0410243272588056, | |
| "learning_rate": 4.753916388001034e-06, | |
| "loss": 0.2095, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.14344262295081966, | |
| "grad_norm": 1.0361509775825113, | |
| "learning_rate": 4.750423211467278e-06, | |
| "loss": 0.1919, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1444672131147541, | |
| "grad_norm": 0.9386233936167377, | |
| "learning_rate": 4.746906718402709e-06, | |
| "loss": 0.1783, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.14549180327868852, | |
| "grad_norm": 0.9972033241854218, | |
| "learning_rate": 4.74336694524155e-06, | |
| "loss": 0.1977, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.14651639344262296, | |
| "grad_norm": 0.9595522468597522, | |
| "learning_rate": 4.739803928659227e-06, | |
| "loss": 0.1938, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.14754098360655737, | |
| "grad_norm": 1.0580312526719955, | |
| "learning_rate": 4.736217705571989e-06, | |
| "loss": 0.1978, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.14856557377049182, | |
| "grad_norm": 1.012902858560667, | |
| "learning_rate": 4.732608313136528e-06, | |
| "loss": 0.1984, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.14959016393442623, | |
| "grad_norm": 1.0504040497710303, | |
| "learning_rate": 4.7289757887495935e-06, | |
| "loss": 0.1863, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.15061475409836064, | |
| "grad_norm": 1.1127899538473016, | |
| "learning_rate": 4.725320170047601e-06, | |
| "loss": 0.1941, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.15163934426229508, | |
| "grad_norm": 1.042632204743908, | |
| "learning_rate": 4.721641494906247e-06, | |
| "loss": 0.1821, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.1526639344262295, | |
| "grad_norm": 1.0574154503620987, | |
| "learning_rate": 4.717939801440115e-06, | |
| "loss": 0.2014, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.15368852459016394, | |
| "grad_norm": 1.0301525804758105, | |
| "learning_rate": 4.714215128002279e-06, | |
| "loss": 0.1941, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.15471311475409835, | |
| "grad_norm": 1.0394986265794217, | |
| "learning_rate": 4.71046751318391e-06, | |
| "loss": 0.1993, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.1557377049180328, | |
| "grad_norm": 0.8962122948145125, | |
| "learning_rate": 4.706696995813869e-06, | |
| "loss": 0.1901, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.1567622950819672, | |
| "grad_norm": 1.0833285139330349, | |
| "learning_rate": 4.702903614958314e-06, | |
| "loss": 0.1923, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.15778688524590165, | |
| "grad_norm": 1.0343462150287084, | |
| "learning_rate": 4.699087409920289e-06, | |
| "loss": 0.1795, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.15881147540983606, | |
| "grad_norm": 1.136314244429114, | |
| "learning_rate": 4.695248420239317e-06, | |
| "loss": 0.1973, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1598360655737705, | |
| "grad_norm": 1.139557058919171, | |
| "learning_rate": 4.691386685690993e-06, | |
| "loss": 0.1919, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.16086065573770492, | |
| "grad_norm": 0.9976803186383131, | |
| "learning_rate": 4.687502246286569e-06, | |
| "loss": 0.1908, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.16188524590163936, | |
| "grad_norm": 1.1627286774170864, | |
| "learning_rate": 4.683595142272544e-06, | |
| "loss": 0.1994, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.16290983606557377, | |
| "grad_norm": 1.1078694483749172, | |
| "learning_rate": 4.6796654141302385e-06, | |
| "loss": 0.2104, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.16393442622950818, | |
| "grad_norm": 1.0286235617671269, | |
| "learning_rate": 4.675713102575389e-06, | |
| "loss": 0.2076, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.16495901639344263, | |
| "grad_norm": 1.0393063836299663, | |
| "learning_rate": 4.671738248557708e-06, | |
| "loss": 0.1867, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.16598360655737704, | |
| "grad_norm": 1.1301615477590607, | |
| "learning_rate": 4.667740893260477e-06, | |
| "loss": 0.2153, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.16700819672131148, | |
| "grad_norm": 1.080136670756515, | |
| "learning_rate": 4.663721078100109e-06, | |
| "loss": 0.2051, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.1680327868852459, | |
| "grad_norm": 1.1651116434629818, | |
| "learning_rate": 4.659678844725722e-06, | |
| "loss": 0.2035, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.16905737704918034, | |
| "grad_norm": 1.2348264333740544, | |
| "learning_rate": 4.655614235018709e-06, | |
| "loss": 0.196, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.17008196721311475, | |
| "grad_norm": 1.1935234453434926, | |
| "learning_rate": 4.651527291092305e-06, | |
| "loss": 0.1788, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.1711065573770492, | |
| "grad_norm": 0.9877242013202426, | |
| "learning_rate": 4.647418055291144e-06, | |
| "loss": 0.1995, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.1721311475409836, | |
| "grad_norm": 0.9765098237094999, | |
| "learning_rate": 4.643286570190832e-06, | |
| "loss": 0.1893, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.17315573770491804, | |
| "grad_norm": 0.978172719805908, | |
| "learning_rate": 4.639132878597491e-06, | |
| "loss": 0.1778, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.17418032786885246, | |
| "grad_norm": 1.0014966917764883, | |
| "learning_rate": 4.63495702354733e-06, | |
| "loss": 0.1957, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.17520491803278687, | |
| "grad_norm": 1.0769286108149019, | |
| "learning_rate": 4.630759048306189e-06, | |
| "loss": 0.2004, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.1762295081967213, | |
| "grad_norm": 0.9617066806915309, | |
| "learning_rate": 4.626538996369096e-06, | |
| "loss": 0.1909, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.17725409836065573, | |
| "grad_norm": 1.0467338156518378, | |
| "learning_rate": 4.622296911459814e-06, | |
| "loss": 0.1842, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.17827868852459017, | |
| "grad_norm": 0.9953993367792644, | |
| "learning_rate": 4.6180328375303876e-06, | |
| "loss": 0.1984, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.17930327868852458, | |
| "grad_norm": 0.9693517701190274, | |
| "learning_rate": 4.61374681876069e-06, | |
| "loss": 0.188, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.18032786885245902, | |
| "grad_norm": 0.9293298500399155, | |
| "learning_rate": 4.609438899557964e-06, | |
| "loss": 0.1825, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.18135245901639344, | |
| "grad_norm": 1.0846028512563157, | |
| "learning_rate": 4.6051091245563615e-06, | |
| "loss": 0.196, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.18237704918032788, | |
| "grad_norm": 0.9753380204138613, | |
| "learning_rate": 4.600757538616479e-06, | |
| "loss": 0.1816, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.1834016393442623, | |
| "grad_norm": 1.026010381404164, | |
| "learning_rate": 4.5963841868249e-06, | |
| "loss": 0.2007, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.18442622950819673, | |
| "grad_norm": 1.0949898495874768, | |
| "learning_rate": 4.591989114493718e-06, | |
| "loss": 0.1965, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.18545081967213115, | |
| "grad_norm": 0.9799294135340412, | |
| "learning_rate": 4.587572367160075e-06, | |
| "loss": 0.1888, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.1864754098360656, | |
| "grad_norm": 1.1177515485982465, | |
| "learning_rate": 4.583133990585684e-06, | |
| "loss": 0.1959, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.1875, | |
| "grad_norm": 1.0400255433532866, | |
| "learning_rate": 4.578674030756364e-06, | |
| "loss": 0.1781, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.1885245901639344, | |
| "grad_norm": 1.033220628354036, | |
| "learning_rate": 4.574192533881547e-06, | |
| "loss": 0.1863, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.18954918032786885, | |
| "grad_norm": 1.0541794495564254, | |
| "learning_rate": 4.56968954639382e-06, | |
| "loss": 0.1877, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.19057377049180327, | |
| "grad_norm": 1.0600340643516728, | |
| "learning_rate": 4.565165114948423e-06, | |
| "loss": 0.2037, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.1915983606557377, | |
| "grad_norm": 1.0638072195378159, | |
| "learning_rate": 4.560619286422785e-06, | |
| "loss": 0.2037, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.19262295081967212, | |
| "grad_norm": 1.030677274531005, | |
| "learning_rate": 4.556052107916023e-06, | |
| "loss": 0.1806, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.19364754098360656, | |
| "grad_norm": 1.1370106988366322, | |
| "learning_rate": 4.551463626748463e-06, | |
| "loss": 0.2039, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.19467213114754098, | |
| "grad_norm": 1.0422993393235285, | |
| "learning_rate": 4.546853890461147e-06, | |
| "loss": 0.1972, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.19569672131147542, | |
| "grad_norm": 0.9023870422062361, | |
| "learning_rate": 4.542222946815338e-06, | |
| "loss": 0.1819, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.19672131147540983, | |
| "grad_norm": 0.9489943372721543, | |
| "learning_rate": 4.537570843792028e-06, | |
| "loss": 0.1781, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.19774590163934427, | |
| "grad_norm": 1.1214319233321943, | |
| "learning_rate": 4.532897629591445e-06, | |
| "loss": 0.1958, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.1987704918032787, | |
| "grad_norm": 0.9747289794717103, | |
| "learning_rate": 4.528203352632542e-06, | |
| "loss": 0.2029, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.19979508196721313, | |
| "grad_norm": 0.9516145745683492, | |
| "learning_rate": 4.523488061552506e-06, | |
| "loss": 0.1866, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.20081967213114754, | |
| "grad_norm": 0.986244434369134, | |
| "learning_rate": 4.518751805206251e-06, | |
| "loss": 0.1903, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.20184426229508196, | |
| "grad_norm": 0.9682844913761744, | |
| "learning_rate": 4.513994632665908e-06, | |
| "loss": 0.1882, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.2028688524590164, | |
| "grad_norm": 0.9625539731715987, | |
| "learning_rate": 4.509216593220324e-06, | |
| "loss": 0.1879, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.2038934426229508, | |
| "grad_norm": 1.0526429577245582, | |
| "learning_rate": 4.504417736374542e-06, | |
| "loss": 0.203, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.20491803278688525, | |
| "grad_norm": 1.1229903492671047, | |
| "learning_rate": 4.499598111849299e-06, | |
| "loss": 0.1914, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.20491803278688525, | |
| "eval_loss": 0.17589983344078064, | |
| "eval_runtime": 2.4244, | |
| "eval_samples_per_second": 3.3, | |
| "eval_steps_per_second": 0.825, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.20594262295081966, | |
| "grad_norm": 0.9962660878913085, | |
| "learning_rate": 4.4947577695805e-06, | |
| "loss": 0.2081, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2069672131147541, | |
| "grad_norm": 1.025972645585872, | |
| "learning_rate": 4.489896759718706e-06, | |
| "loss": 0.186, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.20799180327868852, | |
| "grad_norm": 0.9330092441565672, | |
| "learning_rate": 4.485015132628618e-06, | |
| "loss": 0.1827, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.20901639344262296, | |
| "grad_norm": 1.064826887131764, | |
| "learning_rate": 4.4801129388885475e-06, | |
| "loss": 0.199, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.21004098360655737, | |
| "grad_norm": 1.083619684687526, | |
| "learning_rate": 4.475190229289898e-06, | |
| "loss": 0.21, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.21106557377049182, | |
| "grad_norm": 1.1745677658099956, | |
| "learning_rate": 4.470247054836633e-06, | |
| "loss": 0.1983, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.21209016393442623, | |
| "grad_norm": 1.0058562059782192, | |
| "learning_rate": 4.465283466744757e-06, | |
| "loss": 0.1815, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.21311475409836064, | |
| "grad_norm": 1.0392677757793716, | |
| "learning_rate": 4.460299516441777e-06, | |
| "loss": 0.1885, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.21413934426229508, | |
| "grad_norm": 1.0328030204187646, | |
| "learning_rate": 4.455295255566169e-06, | |
| "loss": 0.1932, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.2151639344262295, | |
| "grad_norm": 1.0408439004343766, | |
| "learning_rate": 4.4502707359668515e-06, | |
| "loss": 0.2194, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.21618852459016394, | |
| "grad_norm": 1.1097459764593374, | |
| "learning_rate": 4.4452260097026376e-06, | |
| "loss": 0.1907, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.21721311475409835, | |
| "grad_norm": 0.9728079245391277, | |
| "learning_rate": 4.440161129041704e-06, | |
| "loss": 0.1819, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.2182377049180328, | |
| "grad_norm": 1.0305429058602011, | |
| "learning_rate": 4.435076146461044e-06, | |
| "loss": 0.1857, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.2192622950819672, | |
| "grad_norm": 1.1023877998769755, | |
| "learning_rate": 4.429971114645928e-06, | |
| "loss": 0.1978, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.22028688524590165, | |
| "grad_norm": 1.0721732011653349, | |
| "learning_rate": 4.424846086489353e-06, | |
| "loss": 0.2047, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.22131147540983606, | |
| "grad_norm": 1.1228522717315337, | |
| "learning_rate": 4.4197011150915e-06, | |
| "loss": 0.2025, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.2223360655737705, | |
| "grad_norm": 1.0741739831840134, | |
| "learning_rate": 4.41453625375918e-06, | |
| "loss": 0.1863, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.22336065573770492, | |
| "grad_norm": 0.947580960236787, | |
| "learning_rate": 4.409351556005281e-06, | |
| "loss": 0.177, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.22438524590163936, | |
| "grad_norm": 1.1061826152374985, | |
| "learning_rate": 4.404147075548218e-06, | |
| "loss": 0.1826, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.22540983606557377, | |
| "grad_norm": 1.029903264802071, | |
| "learning_rate": 4.398922866311371e-06, | |
| "loss": 0.1937, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.22643442622950818, | |
| "grad_norm": 1.1119447378384344, | |
| "learning_rate": 4.393678982422532e-06, | |
| "loss": 0.2027, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.22745901639344263, | |
| "grad_norm": 0.9903248700525732, | |
| "learning_rate": 4.388415478213337e-06, | |
| "loss": 0.1875, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.22848360655737704, | |
| "grad_norm": 1.0921889287784077, | |
| "learning_rate": 4.383132408218712e-06, | |
| "loss": 0.1893, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.22950819672131148, | |
| "grad_norm": 1.064794200409981, | |
| "learning_rate": 4.3778298271762995e-06, | |
| "loss": 0.1978, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.2305327868852459, | |
| "grad_norm": 0.9635986326414954, | |
| "learning_rate": 4.372507790025899e-06, | |
| "loss": 0.187, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.23155737704918034, | |
| "grad_norm": 1.0550218795957191, | |
| "learning_rate": 4.367166351908886e-06, | |
| "loss": 0.2022, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.23258196721311475, | |
| "grad_norm": 0.924031143914617, | |
| "learning_rate": 4.3618055681676585e-06, | |
| "loss": 0.166, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.2336065573770492, | |
| "grad_norm": 1.1032802650437714, | |
| "learning_rate": 4.356425494345047e-06, | |
| "loss": 0.2023, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.2346311475409836, | |
| "grad_norm": 0.9809443791157177, | |
| "learning_rate": 4.351026186183748e-06, | |
| "loss": 0.1899, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.23565573770491804, | |
| "grad_norm": 1.0926728890364168, | |
| "learning_rate": 4.345607699625744e-06, | |
| "loss": 0.1991, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.23668032786885246, | |
| "grad_norm": 1.1554933893534298, | |
| "learning_rate": 4.340170090811723e-06, | |
| "loss": 0.2091, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.23770491803278687, | |
| "grad_norm": 0.9932814442103823, | |
| "learning_rate": 4.334713416080498e-06, | |
| "loss": 0.1842, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.2387295081967213, | |
| "grad_norm": 1.058790787440086, | |
| "learning_rate": 4.329237731968424e-06, | |
| "loss": 0.1928, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.23975409836065573, | |
| "grad_norm": 0.8765605485867847, | |
| "learning_rate": 4.323743095208812e-06, | |
| "loss": 0.1796, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.24077868852459017, | |
| "grad_norm": 1.0456097781001747, | |
| "learning_rate": 4.318229562731338e-06, | |
| "loss": 0.2017, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.24180327868852458, | |
| "grad_norm": 0.8913095490656313, | |
| "learning_rate": 4.312697191661457e-06, | |
| "loss": 0.1695, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.24282786885245902, | |
| "grad_norm": 0.9740515278829909, | |
| "learning_rate": 4.3071460393198105e-06, | |
| "loss": 0.1866, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.24385245901639344, | |
| "grad_norm": 0.9403675748972575, | |
| "learning_rate": 4.301576163221631e-06, | |
| "loss": 0.189, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.24487704918032788, | |
| "grad_norm": 0.9913908866491705, | |
| "learning_rate": 4.2959876210761465e-06, | |
| "loss": 0.2019, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.2459016393442623, | |
| "grad_norm": 0.9434499604168924, | |
| "learning_rate": 4.290380470785984e-06, | |
| "loss": 0.2032, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.24692622950819673, | |
| "grad_norm": 1.008826178781846, | |
| "learning_rate": 4.284754770446568e-06, | |
| "loss": 0.1922, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.24795081967213115, | |
| "grad_norm": 1.0189518338521255, | |
| "learning_rate": 4.27911057834552e-06, | |
| "loss": 0.1932, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.2489754098360656, | |
| "grad_norm": 0.9021897994901373, | |
| "learning_rate": 4.273447952962054e-06, | |
| "loss": 0.1923, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.0256006331945873, | |
| "learning_rate": 4.267766952966369e-06, | |
| "loss": 0.1856, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.2510245901639344, | |
| "grad_norm": 1.1268552545297648, | |
| "learning_rate": 4.262067637219043e-06, | |
| "loss": 0.1959, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2520491803278688, | |
| "grad_norm": 1.0772186119795295, | |
| "learning_rate": 4.256350064770424e-06, | |
| "loss": 0.2072, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2530737704918033, | |
| "grad_norm": 1.0430348207735265, | |
| "learning_rate": 4.250614294860013e-06, | |
| "loss": 0.173, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.2540983606557377, | |
| "grad_norm": 0.8894745639181894, | |
| "learning_rate": 4.2448603869158585e-06, | |
| "loss": 0.1736, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2551229508196721, | |
| "grad_norm": 1.0674176916476532, | |
| "learning_rate": 4.239088400553936e-06, | |
| "loss": 0.1841, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.25614754098360654, | |
| "grad_norm": 0.972804641006653, | |
| "learning_rate": 4.233298395577527e-06, | |
| "loss": 0.1994, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.257172131147541, | |
| "grad_norm": 1.1201275183585193, | |
| "learning_rate": 4.227490431976606e-06, | |
| "loss": 0.2004, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.2581967213114754, | |
| "grad_norm": 1.0480381810540813, | |
| "learning_rate": 4.221664569927217e-06, | |
| "loss": 0.1781, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.25922131147540983, | |
| "grad_norm": 0.9910167796730522, | |
| "learning_rate": 4.215820869790844e-06, | |
| "loss": 0.2005, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.26024590163934425, | |
| "grad_norm": 1.077369659370248, | |
| "learning_rate": 4.209959392113796e-06, | |
| "loss": 0.1962, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.2612704918032787, | |
| "grad_norm": 1.1008285129387563, | |
| "learning_rate": 4.204080197626572e-06, | |
| "loss": 0.1934, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.26229508196721313, | |
| "grad_norm": 1.0393962604451654, | |
| "learning_rate": 4.198183347243233e-06, | |
| "loss": 0.1882, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.26331967213114754, | |
| "grad_norm": 1.1528469271149604, | |
| "learning_rate": 4.192268902060774e-06, | |
| "loss": 0.1974, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.26434426229508196, | |
| "grad_norm": 1.156974290486497, | |
| "learning_rate": 4.186336923358488e-06, | |
| "loss": 0.194, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.26536885245901637, | |
| "grad_norm": 1.0480680754180007, | |
| "learning_rate": 4.180387472597333e-06, | |
| "loss": 0.1742, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.26639344262295084, | |
| "grad_norm": 1.0821380883725997, | |
| "learning_rate": 4.1744206114192895e-06, | |
| "loss": 0.1782, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.26741803278688525, | |
| "grad_norm": 0.9370730450722256, | |
| "learning_rate": 4.168436401646735e-06, | |
| "loss": 0.1752, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.26844262295081966, | |
| "grad_norm": 0.9909139173210327, | |
| "learning_rate": 4.162434905281787e-06, | |
| "loss": 0.1816, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.2694672131147541, | |
| "grad_norm": 0.9250155997034635, | |
| "learning_rate": 4.156416184505673e-06, | |
| "loss": 0.19, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.27049180327868855, | |
| "grad_norm": 1.0151859224916395, | |
| "learning_rate": 4.15038030167808e-06, | |
| "loss": 0.1882, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.27151639344262296, | |
| "grad_norm": 1.034078628297854, | |
| "learning_rate": 4.144327319336511e-06, | |
| "loss": 0.1768, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2725409836065574, | |
| "grad_norm": 0.9608936018845129, | |
| "learning_rate": 4.138257300195636e-06, | |
| "loss": 0.181, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.2735655737704918, | |
| "grad_norm": 0.9165074571500542, | |
| "learning_rate": 4.132170307146643e-06, | |
| "loss": 0.2078, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.27459016393442626, | |
| "grad_norm": 0.9192138954571367, | |
| "learning_rate": 4.126066403256585e-06, | |
| "loss": 0.1873, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.27561475409836067, | |
| "grad_norm": 1.0821945232422363, | |
| "learning_rate": 4.1199456517677285e-06, | |
| "loss": 0.2062, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.2766393442622951, | |
| "grad_norm": 0.9458127545082142, | |
| "learning_rate": 4.113808116096897e-06, | |
| "loss": 0.1833, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.2776639344262295, | |
| "grad_norm": 0.9089856213021142, | |
| "learning_rate": 4.107653859834811e-06, | |
| "loss": 0.1826, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.2786885245901639, | |
| "grad_norm": 0.8911833670011362, | |
| "learning_rate": 4.101482946745438e-06, | |
| "loss": 0.1762, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.2797131147540984, | |
| "grad_norm": 0.9918650522090428, | |
| "learning_rate": 4.095295440765322e-06, | |
| "loss": 0.2124, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.2807377049180328, | |
| "grad_norm": 1.0755582607150664, | |
| "learning_rate": 4.089091406002926e-06, | |
| "loss": 0.2033, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.2817622950819672, | |
| "grad_norm": 0.9064054009332444, | |
| "learning_rate": 4.082870906737969e-06, | |
| "loss": 0.1941, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.2827868852459016, | |
| "grad_norm": 0.9186901228732742, | |
| "learning_rate": 4.076634007420754e-06, | |
| "loss": 0.1745, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.2838114754098361, | |
| "grad_norm": 0.9495257726025252, | |
| "learning_rate": 4.070380772671506e-06, | |
| "loss": 0.1978, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.2848360655737705, | |
| "grad_norm": 0.9159995303066419, | |
| "learning_rate": 4.064111267279703e-06, | |
| "loss": 0.1887, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.2858606557377049, | |
| "grad_norm": 0.881407709417217, | |
| "learning_rate": 4.0578255562034e-06, | |
| "loss": 0.1955, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.28688524590163933, | |
| "grad_norm": 1.1730286446698017, | |
| "learning_rate": 4.051523704568557e-06, | |
| "loss": 0.1947, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.28790983606557374, | |
| "grad_norm": 1.0420753893729318, | |
| "learning_rate": 4.04520577766837e-06, | |
| "loss": 0.1955, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.2889344262295082, | |
| "grad_norm": 0.8991763142976178, | |
| "learning_rate": 4.038871840962585e-06, | |
| "loss": 0.1766, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.2899590163934426, | |
| "grad_norm": 0.8413128677546347, | |
| "learning_rate": 4.032521960076829e-06, | |
| "loss": 0.1803, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.29098360655737704, | |
| "grad_norm": 0.8952367037240482, | |
| "learning_rate": 4.026156200801924e-06, | |
| "loss": 0.177, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.29200819672131145, | |
| "grad_norm": 0.8880426780549132, | |
| "learning_rate": 4.019774629093206e-06, | |
| "loss": 0.1802, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2930327868852459, | |
| "grad_norm": 1.0404416061774513, | |
| "learning_rate": 4.0133773110698454e-06, | |
| "loss": 0.19, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.29405737704918034, | |
| "grad_norm": 0.929725468084995, | |
| "learning_rate": 4.006964313014158e-06, | |
| "loss": 0.1903, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.29508196721311475, | |
| "grad_norm": 0.9611017409197256, | |
| "learning_rate": 4.0005357013709215e-06, | |
| "loss": 0.1781, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.29610655737704916, | |
| "grad_norm": 0.9025510784249413, | |
| "learning_rate": 3.994091542746681e-06, | |
| "loss": 0.1706, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.29713114754098363, | |
| "grad_norm": 0.9120277358061315, | |
| "learning_rate": 3.987631903909068e-06, | |
| "loss": 0.1871, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.29815573770491804, | |
| "grad_norm": 0.9066230289701531, | |
| "learning_rate": 3.981156851786102e-06, | |
| "loss": 0.1917, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.29918032786885246, | |
| "grad_norm": 0.9602809551789313, | |
| "learning_rate": 3.9746664534654975e-06, | |
| "loss": 0.1902, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.30020491803278687, | |
| "grad_norm": 1.0981120184491406, | |
| "learning_rate": 3.968160776193971e-06, | |
| "loss": 0.198, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.3012295081967213, | |
| "grad_norm": 0.9692313384837454, | |
| "learning_rate": 3.961639887376546e-06, | |
| "loss": 0.1788, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.30225409836065575, | |
| "grad_norm": 0.9094508203682742, | |
| "learning_rate": 3.955103854575847e-06, | |
| "loss": 0.1804, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.30327868852459017, | |
| "grad_norm": 0.9721688074247665, | |
| "learning_rate": 3.9485527455114095e-06, | |
| "loss": 0.1818, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.3043032786885246, | |
| "grad_norm": 0.9780075130319453, | |
| "learning_rate": 3.941986628058971e-06, | |
| "loss": 0.1935, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.305327868852459, | |
| "grad_norm": 0.962518419495144, | |
| "learning_rate": 3.9354055702497715e-06, | |
| "loss": 0.1733, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.30635245901639346, | |
| "grad_norm": 1.0493654596885167, | |
| "learning_rate": 3.928809640269845e-06, | |
| "loss": 0.1917, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.3073770491803279, | |
| "grad_norm": 1.0906560014882594, | |
| "learning_rate": 3.922198906459318e-06, | |
| "loss": 0.1891, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3073770491803279, | |
| "eval_loss": 0.1708558350801468, | |
| "eval_runtime": 2.4306, | |
| "eval_samples_per_second": 3.291, | |
| "eval_steps_per_second": 0.823, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3084016393442623, | |
| "grad_norm": 0.974237829150796, | |
| "learning_rate": 3.9155734373116975e-06, | |
| "loss": 0.1944, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.3094262295081967, | |
| "grad_norm": 0.9804315215019299, | |
| "learning_rate": 3.908933301473163e-06, | |
| "loss": 0.1894, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3104508196721312, | |
| "grad_norm": 0.9144952952876062, | |
| "learning_rate": 3.902278567741855e-06, | |
| "loss": 0.1779, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.3114754098360656, | |
| "grad_norm": 0.9345650715397396, | |
| "learning_rate": 3.895609305067162e-06, | |
| "loss": 0.1765, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 1.0346618111518961, | |
| "learning_rate": 3.888925582549006e-06, | |
| "loss": 0.1996, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.3135245901639344, | |
| "grad_norm": 1.0075667335652363, | |
| "learning_rate": 3.882227469437126e-06, | |
| "loss": 0.1818, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.3145491803278688, | |
| "grad_norm": 0.9391331503895501, | |
| "learning_rate": 3.8755150351303635e-06, | |
| "loss": 0.1945, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3155737704918033, | |
| "grad_norm": 1.0296899638930477, | |
| "learning_rate": 3.868788349175939e-06, | |
| "loss": 0.1801, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.3165983606557377, | |
| "grad_norm": 1.0256649895615564, | |
| "learning_rate": 3.862047481268735e-06, | |
| "loss": 0.1788, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.3176229508196721, | |
| "grad_norm": 1.1151712901913624, | |
| "learning_rate": 3.855292501250573e-06, | |
| "loss": 0.18, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.31864754098360654, | |
| "grad_norm": 1.1292576648326975, | |
| "learning_rate": 3.8485234791094864e-06, | |
| "loss": 0.1722, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.319672131147541, | |
| "grad_norm": 0.962373873109883, | |
| "learning_rate": 3.841740484979002e-06, | |
| "loss": 0.173, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.3206967213114754, | |
| "grad_norm": 1.0518110179416476, | |
| "learning_rate": 3.834943589137407e-06, | |
| "loss": 0.1973, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.32172131147540983, | |
| "grad_norm": 1.021786990369202, | |
| "learning_rate": 3.828132862007027e-06, | |
| "loss": 0.1715, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.32274590163934425, | |
| "grad_norm": 0.9712131120570924, | |
| "learning_rate": 3.821308374153489e-06, | |
| "loss": 0.186, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.3237704918032787, | |
| "grad_norm": 1.1490601644073053, | |
| "learning_rate": 3.8144701962849973e-06, | |
| "loss": 0.1826, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.32479508196721313, | |
| "grad_norm": 0.9777055750622519, | |
| "learning_rate": 3.807618399251596e-06, | |
| "loss": 0.1906, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.32581967213114754, | |
| "grad_norm": 0.9485271315706834, | |
| "learning_rate": 3.800753054044437e-06, | |
| "loss": 0.1777, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.32684426229508196, | |
| "grad_norm": 0.9962403866878229, | |
| "learning_rate": 3.7938742317950438e-06, | |
| "loss": 0.1784, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.32786885245901637, | |
| "grad_norm": 1.0811447321311103, | |
| "learning_rate": 3.7869820037745773e-06, | |
| "loss": 0.1976, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.32889344262295084, | |
| "grad_norm": 0.9360128978468536, | |
| "learning_rate": 3.780076441393092e-06, | |
| "loss": 0.1658, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.32991803278688525, | |
| "grad_norm": 0.9180508979091283, | |
| "learning_rate": 3.7731576161988005e-06, | |
| "loss": 0.1734, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.33094262295081966, | |
| "grad_norm": 1.0396928541275214, | |
| "learning_rate": 3.766225599877331e-06, | |
| "loss": 0.1864, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.3319672131147541, | |
| "grad_norm": 0.9153012515327466, | |
| "learning_rate": 3.7592804642509844e-06, | |
| "loss": 0.1779, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.33299180327868855, | |
| "grad_norm": 0.997218981988434, | |
| "learning_rate": 3.7523222812779893e-06, | |
| "loss": 0.2125, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.33401639344262296, | |
| "grad_norm": 0.9830184064611096, | |
| "learning_rate": 3.7453511230517563e-06, | |
| "loss": 0.1781, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.3350409836065574, | |
| "grad_norm": 0.9339666778299544, | |
| "learning_rate": 3.7383670618001356e-06, | |
| "loss": 0.1797, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.3360655737704918, | |
| "grad_norm": 0.9366700766934186, | |
| "learning_rate": 3.7313701698846616e-06, | |
| "loss": 0.1724, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.33709016393442626, | |
| "grad_norm": 1.0361227800682657, | |
| "learning_rate": 3.724360519799808e-06, | |
| "loss": 0.1856, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.33811475409836067, | |
| "grad_norm": 0.8308720399417647, | |
| "learning_rate": 3.7173381841722344e-06, | |
| "loss": 0.1723, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3391393442622951, | |
| "grad_norm": 1.0714188618129004, | |
| "learning_rate": 3.710303235760038e-06, | |
| "loss": 0.2, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.3401639344262295, | |
| "grad_norm": 0.9177473010576371, | |
| "learning_rate": 3.703255747451991e-06, | |
| "loss": 0.1793, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.3411885245901639, | |
| "grad_norm": 1.0661315727692644, | |
| "learning_rate": 3.6961957922667953e-06, | |
| "loss": 0.2004, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.3422131147540984, | |
| "grad_norm": 0.983555980406845, | |
| "learning_rate": 3.689123443352319e-06, | |
| "loss": 0.1911, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.3432377049180328, | |
| "grad_norm": 0.9736213389868449, | |
| "learning_rate": 3.682038773984844e-06, | |
| "loss": 0.1863, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3442622950819672, | |
| "grad_norm": 0.9830806895802345, | |
| "learning_rate": 3.6749418575683005e-06, | |
| "loss": 0.1797, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.3452868852459016, | |
| "grad_norm": 1.0008462821181234, | |
| "learning_rate": 3.6678327676335114e-06, | |
| "loss": 0.2036, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.3463114754098361, | |
| "grad_norm": 0.8990507779640605, | |
| "learning_rate": 3.660711577837429e-06, | |
| "loss": 0.1873, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.3473360655737705, | |
| "grad_norm": 0.9344739940443912, | |
| "learning_rate": 3.653578361962371e-06, | |
| "loss": 0.1919, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.3483606557377049, | |
| "grad_norm": 0.8825408990053957, | |
| "learning_rate": 3.6464331939152576e-06, | |
| "loss": 0.1961, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.34938524590163933, | |
| "grad_norm": 0.9218334297665106, | |
| "learning_rate": 3.639276147726845e-06, | |
| "loss": 0.1854, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.35040983606557374, | |
| "grad_norm": 0.9324690906174032, | |
| "learning_rate": 3.6321072975509564e-06, | |
| "loss": 0.1905, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.3514344262295082, | |
| "grad_norm": 0.9539768060948641, | |
| "learning_rate": 3.6249267176637177e-06, | |
| "loss": 0.1902, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.3524590163934426, | |
| "grad_norm": 0.9685089280030936, | |
| "learning_rate": 3.6177344824627854e-06, | |
| "loss": 0.1914, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.35348360655737704, | |
| "grad_norm": 0.849621188864458, | |
| "learning_rate": 3.6105306664665746e-06, | |
| "loss": 0.1857, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.35450819672131145, | |
| "grad_norm": 0.969070419851645, | |
| "learning_rate": 3.6033153443134903e-06, | |
| "loss": 0.1939, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.3555327868852459, | |
| "grad_norm": 0.8555359157425206, | |
| "learning_rate": 3.5960885907611528e-06, | |
| "loss": 0.1751, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.35655737704918034, | |
| "grad_norm": 0.9625957055500438, | |
| "learning_rate": 3.5888504806856194e-06, | |
| "loss": 0.1982, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.35758196721311475, | |
| "grad_norm": 1.021555885929761, | |
| "learning_rate": 3.5816010890806153e-06, | |
| "loss": 0.2036, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.35860655737704916, | |
| "grad_norm": 0.8740783715036657, | |
| "learning_rate": 3.574340491056751e-06, | |
| "loss": 0.1886, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.35963114754098363, | |
| "grad_norm": 0.9391133885347263, | |
| "learning_rate": 3.5670687618407456e-06, | |
| "loss": 0.1864, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.36065573770491804, | |
| "grad_norm": 0.9747926125448839, | |
| "learning_rate": 3.5597859767746524e-06, | |
| "loss": 0.1961, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.36168032786885246, | |
| "grad_norm": 0.9890521400485744, | |
| "learning_rate": 3.552492211315067e-06, | |
| "loss": 0.2135, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.36270491803278687, | |
| "grad_norm": 0.8967167368386569, | |
| "learning_rate": 3.5451875410323587e-06, | |
| "loss": 0.1915, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.3637295081967213, | |
| "grad_norm": 0.9004311832535973, | |
| "learning_rate": 3.5378720416098783e-06, | |
| "loss": 0.1881, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.36475409836065575, | |
| "grad_norm": 0.9908535155617944, | |
| "learning_rate": 3.5305457888431747e-06, | |
| "loss": 0.1981, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.36577868852459017, | |
| "grad_norm": 0.9853818617819529, | |
| "learning_rate": 3.5232088586392165e-06, | |
| "loss": 0.1883, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.3668032786885246, | |
| "grad_norm": 1.0463375721325034, | |
| "learning_rate": 3.515861327015596e-06, | |
| "loss": 0.1994, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.367827868852459, | |
| "grad_norm": 0.9052272364482308, | |
| "learning_rate": 3.5085032700997506e-06, | |
| "loss": 0.1716, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.36885245901639346, | |
| "grad_norm": 1.016301006687888, | |
| "learning_rate": 3.501134764128167e-06, | |
| "loss": 0.1921, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3698770491803279, | |
| "grad_norm": 0.9861707260118079, | |
| "learning_rate": 3.4937558854455934e-06, | |
| "loss": 0.1844, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.3709016393442623, | |
| "grad_norm": 0.9326523265709225, | |
| "learning_rate": 3.4863667105042526e-06, | |
| "loss": 0.1901, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.3719262295081967, | |
| "grad_norm": 0.8440868881004626, | |
| "learning_rate": 3.4789673158630438e-06, | |
| "loss": 0.1763, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.3729508196721312, | |
| "grad_norm": 0.8984441896914397, | |
| "learning_rate": 3.4715577781867516e-06, | |
| "loss": 0.1778, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.3739754098360656, | |
| "grad_norm": 1.0300522515206934, | |
| "learning_rate": 3.464138174245254e-06, | |
| "loss": 0.1991, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.375, | |
| "grad_norm": 1.1170963945459262, | |
| "learning_rate": 3.4567085809127247e-06, | |
| "loss": 0.1954, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.3760245901639344, | |
| "grad_norm": 0.9820703395867142, | |
| "learning_rate": 3.449269075166836e-06, | |
| "loss": 0.175, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.3770491803278688, | |
| "grad_norm": 1.005545038069355, | |
| "learning_rate": 3.441819734087963e-06, | |
| "loss": 0.1985, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.3780737704918033, | |
| "grad_norm": 1.1060850824585713, | |
| "learning_rate": 3.4343606348583843e-06, | |
| "loss": 0.1838, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.3790983606557377, | |
| "grad_norm": 0.9013438288791463, | |
| "learning_rate": 3.4268918547614814e-06, | |
| "loss": 0.1775, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3801229508196721, | |
| "grad_norm": 0.9451163507725908, | |
| "learning_rate": 3.4194134711809403e-06, | |
| "loss": 0.1868, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.38114754098360654, | |
| "grad_norm": 0.8353752064473315, | |
| "learning_rate": 3.411925561599947e-06, | |
| "loss": 0.1724, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.382172131147541, | |
| "grad_norm": 1.0263103249835293, | |
| "learning_rate": 3.404428203600386e-06, | |
| "loss": 0.1855, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.3831967213114754, | |
| "grad_norm": 1.0665328862663697, | |
| "learning_rate": 3.396921474862037e-06, | |
| "loss": 0.1925, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.38422131147540983, | |
| "grad_norm": 0.8858165355966975, | |
| "learning_rate": 3.38940545316177e-06, | |
| "loss": 0.1865, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.38524590163934425, | |
| "grad_norm": 1.0181271259818823, | |
| "learning_rate": 3.3818802163727377e-06, | |
| "loss": 0.1846, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.3862704918032787, | |
| "grad_norm": 0.9774590408391829, | |
| "learning_rate": 3.3743458424635694e-06, | |
| "loss": 0.1881, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.38729508196721313, | |
| "grad_norm": 1.0512978913270727, | |
| "learning_rate": 3.3668024094975665e-06, | |
| "loss": 0.1755, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.38831967213114754, | |
| "grad_norm": 0.9157505647818972, | |
| "learning_rate": 3.359249995631888e-06, | |
| "loss": 0.1879, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.38934426229508196, | |
| "grad_norm": 0.8806244639722144, | |
| "learning_rate": 3.3516886791167446e-06, | |
| "loss": 0.1639, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.39036885245901637, | |
| "grad_norm": 0.9677680121953749, | |
| "learning_rate": 3.3441185382945883e-06, | |
| "loss": 0.182, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.39139344262295084, | |
| "grad_norm": 1.0300069172612671, | |
| "learning_rate": 3.3365396515992954e-06, | |
| "loss": 0.1814, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.39241803278688525, | |
| "grad_norm": 1.0851050006111067, | |
| "learning_rate": 3.3289520975553635e-06, | |
| "loss": 0.1988, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.39344262295081966, | |
| "grad_norm": 1.093539734983261, | |
| "learning_rate": 3.3213559547770873e-06, | |
| "loss": 0.1935, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.3944672131147541, | |
| "grad_norm": 0.9269699858489512, | |
| "learning_rate": 3.3137513019677514e-06, | |
| "loss": 0.186, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.39549180327868855, | |
| "grad_norm": 0.8369124013439341, | |
| "learning_rate": 3.306138217918811e-06, | |
| "loss": 0.1591, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.39651639344262296, | |
| "grad_norm": 1.0032049530349945, | |
| "learning_rate": 3.298516781509079e-06, | |
| "loss": 0.2112, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.3975409836065574, | |
| "grad_norm": 1.044381465568065, | |
| "learning_rate": 3.290887071703905e-06, | |
| "loss": 0.1948, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.3985655737704918, | |
| "grad_norm": 1.015067153208399, | |
| "learning_rate": 3.2832491675543597e-06, | |
| "loss": 0.1813, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.39959016393442626, | |
| "grad_norm": 0.9349214923134407, | |
| "learning_rate": 3.2756031481964134e-06, | |
| "loss": 0.1888, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.40061475409836067, | |
| "grad_norm": 1.0231077159903734, | |
| "learning_rate": 3.2679490928501207e-06, | |
| "loss": 0.1896, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4016393442622951, | |
| "grad_norm": 0.9371460083202204, | |
| "learning_rate": 3.2602870808187955e-06, | |
| "loss": 0.1962, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.4026639344262295, | |
| "grad_norm": 1.0342221744795552, | |
| "learning_rate": 3.2526171914881876e-06, | |
| "loss": 0.1792, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.4036885245901639, | |
| "grad_norm": 1.0406608389115342, | |
| "learning_rate": 3.2449395043256683e-06, | |
| "loss": 0.1829, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.4047131147540984, | |
| "grad_norm": 0.8245556978018224, | |
| "learning_rate": 3.2372540988793973e-06, | |
| "loss": 0.1711, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.4057377049180328, | |
| "grad_norm": 0.7995196427971495, | |
| "learning_rate": 3.2295610547775054e-06, | |
| "loss": 0.1716, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4067622950819672, | |
| "grad_norm": 0.9761037593410186, | |
| "learning_rate": 3.221860451727268e-06, | |
| "loss": 0.1808, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.4077868852459016, | |
| "grad_norm": 0.9880493962436403, | |
| "learning_rate": 3.214152369514275e-06, | |
| "loss": 0.1933, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.4088114754098361, | |
| "grad_norm": 0.943261780103261, | |
| "learning_rate": 3.206436888001612e-06, | |
| "loss": 0.1562, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.4098360655737705, | |
| "grad_norm": 1.005725376840667, | |
| "learning_rate": 3.198714087129024e-06, | |
| "loss": 0.1999, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4098360655737705, | |
| "eval_loss": 0.16810214519500732, | |
| "eval_runtime": 2.434, | |
| "eval_samples_per_second": 3.287, | |
| "eval_steps_per_second": 0.822, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.4108606557377049, | |
| "grad_norm": 0.9526095132564871, | |
| "learning_rate": 3.190984046912095e-06, | |
| "loss": 0.2012, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.41188524590163933, | |
| "grad_norm": 0.9023202592271459, | |
| "learning_rate": 3.1832468474414148e-06, | |
| "loss": 0.1868, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.41290983606557374, | |
| "grad_norm": 0.9435387554046812, | |
| "learning_rate": 3.175502568881747e-06, | |
| "loss": 0.1746, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4139344262295082, | |
| "grad_norm": 0.9804553538550842, | |
| "learning_rate": 3.1677512914712044e-06, | |
| "loss": 0.1921, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.4149590163934426, | |
| "grad_norm": 0.8890905522969267, | |
| "learning_rate": 3.1599930955204156e-06, | |
| "loss": 0.1947, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.41598360655737704, | |
| "grad_norm": 0.9683135604251915, | |
| "learning_rate": 3.1522280614116886e-06, | |
| "loss": 0.193, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.41700819672131145, | |
| "grad_norm": 0.9988605665171968, | |
| "learning_rate": 3.1444562695981817e-06, | |
| "loss": 0.2026, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.4180327868852459, | |
| "grad_norm": 0.8687368469302651, | |
| "learning_rate": 3.1366778006030717e-06, | |
| "loss": 0.192, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.41905737704918034, | |
| "grad_norm": 1.0857187112486426, | |
| "learning_rate": 3.1288927350187163e-06, | |
| "loss": 0.1909, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.42008196721311475, | |
| "grad_norm": 0.9710600193646357, | |
| "learning_rate": 3.12110115350582e-06, | |
| "loss": 0.1809, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.42110655737704916, | |
| "grad_norm": 0.9456713520233195, | |
| "learning_rate": 3.1133031367925974e-06, | |
| "loss": 0.1706, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.42213114754098363, | |
| "grad_norm": 0.9547249882772915, | |
| "learning_rate": 3.1054987656739395e-06, | |
| "loss": 0.186, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.42315573770491804, | |
| "grad_norm": 1.0195756446825028, | |
| "learning_rate": 3.0976881210105752e-06, | |
| "loss": 0.1948, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.42418032786885246, | |
| "grad_norm": 1.0419299237523008, | |
| "learning_rate": 3.089871283728232e-06, | |
| "loss": 0.1868, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.42520491803278687, | |
| "grad_norm": 0.9512433125376193, | |
| "learning_rate": 3.082048334816799e-06, | |
| "loss": 0.1737, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.4262295081967213, | |
| "grad_norm": 0.9483952515172039, | |
| "learning_rate": 3.0742193553294896e-06, | |
| "loss": 0.1971, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.42725409836065575, | |
| "grad_norm": 0.9595885090552172, | |
| "learning_rate": 3.066384426381996e-06, | |
| "loss": 0.1751, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.42827868852459017, | |
| "grad_norm": 0.7936179170600358, | |
| "learning_rate": 3.058543629151657e-06, | |
| "loss": 0.1697, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.4293032786885246, | |
| "grad_norm": 0.858005912225604, | |
| "learning_rate": 3.0506970448766077e-06, | |
| "loss": 0.1815, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.430327868852459, | |
| "grad_norm": 0.9430715833001673, | |
| "learning_rate": 3.0428447548549466e-06, | |
| "loss": 0.1825, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.43135245901639346, | |
| "grad_norm": 0.9257776681911731, | |
| "learning_rate": 3.034986840443887e-06, | |
| "loss": 0.1725, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.4323770491803279, | |
| "grad_norm": 0.9377881456280102, | |
| "learning_rate": 3.0271233830589162e-06, | |
| "loss": 0.1844, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.4334016393442623, | |
| "grad_norm": 1.0013014700093705, | |
| "learning_rate": 3.019254464172953e-06, | |
| "loss": 0.198, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.4344262295081967, | |
| "grad_norm": 0.865648036887426, | |
| "learning_rate": 3.011380165315503e-06, | |
| "loss": 0.183, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.4354508196721312, | |
| "grad_norm": 0.9258424362853888, | |
| "learning_rate": 3.0035005680718094e-06, | |
| "loss": 0.1776, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.4364754098360656, | |
| "grad_norm": 0.9001292801411326, | |
| "learning_rate": 2.9956157540820186e-06, | |
| "loss": 0.1812, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.4375, | |
| "grad_norm": 0.9775053601756468, | |
| "learning_rate": 2.9877258050403214e-06, | |
| "loss": 0.1783, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.4385245901639344, | |
| "grad_norm": 0.871708642795832, | |
| "learning_rate": 2.9798308026941147e-06, | |
| "loss": 0.1805, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.4395491803278688, | |
| "grad_norm": 0.9276040613374253, | |
| "learning_rate": 2.9719308288431543e-06, | |
| "loss": 0.186, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.4405737704918033, | |
| "grad_norm": 0.8530532618006621, | |
| "learning_rate": 2.964025965338702e-06, | |
| "loss": 0.1826, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.4415983606557377, | |
| "grad_norm": 1.0352417625547834, | |
| "learning_rate": 2.956116294082685e-06, | |
| "loss": 0.1822, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.4426229508196721, | |
| "grad_norm": 0.8860058832420755, | |
| "learning_rate": 2.9482018970268395e-06, | |
| "loss": 0.1883, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.44364754098360654, | |
| "grad_norm": 0.8202340203564753, | |
| "learning_rate": 2.940282856171869e-06, | |
| "loss": 0.1777, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.444672131147541, | |
| "grad_norm": 0.884758289776382, | |
| "learning_rate": 2.932359253566588e-06, | |
| "loss": 0.1677, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.4456967213114754, | |
| "grad_norm": 0.8982103787621459, | |
| "learning_rate": 2.9244311713070774e-06, | |
| "loss": 0.1614, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.44672131147540983, | |
| "grad_norm": 1.002182566210533, | |
| "learning_rate": 2.91649869153583e-06, | |
| "loss": 0.2016, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.44774590163934425, | |
| "grad_norm": 1.1280758657132388, | |
| "learning_rate": 2.908561896440904e-06, | |
| "loss": 0.1913, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.4487704918032787, | |
| "grad_norm": 0.9361092677253728, | |
| "learning_rate": 2.900620868255064e-06, | |
| "loss": 0.1878, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.44979508196721313, | |
| "grad_norm": 1.0616014053031155, | |
| "learning_rate": 2.8926756892549375e-06, | |
| "loss": 0.1906, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.45081967213114754, | |
| "grad_norm": 0.996118790544859, | |
| "learning_rate": 2.884726441760155e-06, | |
| "loss": 0.1734, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.45184426229508196, | |
| "grad_norm": 1.033940124594917, | |
| "learning_rate": 2.876773208132503e-06, | |
| "loss": 0.1797, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.45286885245901637, | |
| "grad_norm": 0.9619937576387713, | |
| "learning_rate": 2.8688160707750678e-06, | |
| "loss": 0.1837, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.45389344262295084, | |
| "grad_norm": 0.8193219900925219, | |
| "learning_rate": 2.8608551121313797e-06, | |
| "loss": 0.1658, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.45491803278688525, | |
| "grad_norm": 0.984671344727508, | |
| "learning_rate": 2.8528904146845652e-06, | |
| "loss": 0.1909, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.45594262295081966, | |
| "grad_norm": 0.8938287151182799, | |
| "learning_rate": 2.8449220609564857e-06, | |
| "loss": 0.1767, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4569672131147541, | |
| "grad_norm": 0.8783794269209247, | |
| "learning_rate": 2.836950133506885e-06, | |
| "loss": 0.1833, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.45799180327868855, | |
| "grad_norm": 0.8388401917820947, | |
| "learning_rate": 2.828974714932535e-06, | |
| "loss": 0.1623, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.45901639344262296, | |
| "grad_norm": 0.8879468318408993, | |
| "learning_rate": 2.820995887866378e-06, | |
| "loss": 0.1663, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.4600409836065574, | |
| "grad_norm": 0.8040026306441832, | |
| "learning_rate": 2.8130137349766727e-06, | |
| "loss": 0.1734, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.4610655737704918, | |
| "grad_norm": 0.8895818696352096, | |
| "learning_rate": 2.805028338966137e-06, | |
| "loss": 0.187, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.46209016393442626, | |
| "grad_norm": 0.9082433138393691, | |
| "learning_rate": 2.7970397825710876e-06, | |
| "loss": 0.1746, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.46311475409836067, | |
| "grad_norm": 0.9648849248370039, | |
| "learning_rate": 2.7890481485605898e-06, | |
| "loss": 0.186, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.4641393442622951, | |
| "grad_norm": 0.9581034738728476, | |
| "learning_rate": 2.7810535197355935e-06, | |
| "loss": 0.1797, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.4651639344262295, | |
| "grad_norm": 0.924307135544629, | |
| "learning_rate": 2.7730559789280774e-06, | |
| "loss": 0.1799, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.4661885245901639, | |
| "grad_norm": 0.9355898672825648, | |
| "learning_rate": 2.7650556090001925e-06, | |
| "loss": 0.1811, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4672131147540984, | |
| "grad_norm": 1.0053628640492882, | |
| "learning_rate": 2.757052492843401e-06, | |
| "loss": 0.1803, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.4682377049180328, | |
| "grad_norm": 1.021231657170419, | |
| "learning_rate": 2.7490467133776206e-06, | |
| "loss": 0.1771, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.4692622950819672, | |
| "grad_norm": 0.9550452571706001, | |
| "learning_rate": 2.7410383535503616e-06, | |
| "loss": 0.1747, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.4702868852459016, | |
| "grad_norm": 1.0623765912935619, | |
| "learning_rate": 2.733027496335868e-06, | |
| "loss": 0.1835, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.4713114754098361, | |
| "grad_norm": 1.1803097574873964, | |
| "learning_rate": 2.7250142247342637e-06, | |
| "loss": 0.1957, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4723360655737705, | |
| "grad_norm": 1.0049781044714168, | |
| "learning_rate": 2.716998621770683e-06, | |
| "loss": 0.1833, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.4733606557377049, | |
| "grad_norm": 0.8159977604444156, | |
| "learning_rate": 2.7089807704944184e-06, | |
| "loss": 0.1559, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.47438524590163933, | |
| "grad_norm": 0.8582641646416767, | |
| "learning_rate": 2.7009607539780565e-06, | |
| "loss": 0.179, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.47540983606557374, | |
| "grad_norm": 1.0012733851677595, | |
| "learning_rate": 2.6929386553166165e-06, | |
| "loss": 0.1902, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.4764344262295082, | |
| "grad_norm": 0.9554376597557084, | |
| "learning_rate": 2.684914557626692e-06, | |
| "loss": 0.1727, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4774590163934426, | |
| "grad_norm": 0.935579898170339, | |
| "learning_rate": 2.6768885440455887e-06, | |
| "loss": 0.1874, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.47848360655737704, | |
| "grad_norm": 1.10214638116436, | |
| "learning_rate": 2.6688606977304613e-06, | |
| "loss": 0.1992, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.47950819672131145, | |
| "grad_norm": 0.8453525495551559, | |
| "learning_rate": 2.6608311018574545e-06, | |
| "loss": 0.1696, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.4805327868852459, | |
| "grad_norm": 1.0000368998173677, | |
| "learning_rate": 2.6527998396208398e-06, | |
| "loss": 0.1977, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.48155737704918034, | |
| "grad_norm": 0.8851921410246413, | |
| "learning_rate": 2.6447669942321535e-06, | |
| "loss": 0.17, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.48258196721311475, | |
| "grad_norm": 0.9233005885883367, | |
| "learning_rate": 2.636732648919336e-06, | |
| "loss": 0.1772, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.48360655737704916, | |
| "grad_norm": 0.9059605354667876, | |
| "learning_rate": 2.6286968869258666e-06, | |
| "loss": 0.1828, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.48463114754098363, | |
| "grad_norm": 0.9189723653425443, | |
| "learning_rate": 2.6206597915099038e-06, | |
| "loss": 0.1614, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.48565573770491804, | |
| "grad_norm": 0.8162208190912336, | |
| "learning_rate": 2.6126214459434223e-06, | |
| "loss": 0.1797, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.48668032786885246, | |
| "grad_norm": 0.8699061340655311, | |
| "learning_rate": 2.6045819335113475e-06, | |
| "loss": 0.1816, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.48770491803278687, | |
| "grad_norm": 0.9323694284213979, | |
| "learning_rate": 2.5965413375106965e-06, | |
| "loss": 0.1937, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.4887295081967213, | |
| "grad_norm": 0.8825773586836678, | |
| "learning_rate": 2.588499741249713e-06, | |
| "loss": 0.1704, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.48975409836065575, | |
| "grad_norm": 0.8569206477182163, | |
| "learning_rate": 2.5804572280470027e-06, | |
| "loss": 0.1889, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.49077868852459017, | |
| "grad_norm": 1.0357459294345959, | |
| "learning_rate": 2.572413881230675e-06, | |
| "loss": 0.1713, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.4918032786885246, | |
| "grad_norm": 1.1056807788746599, | |
| "learning_rate": 2.5643697841374722e-06, | |
| "loss": 0.1841, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.492827868852459, | |
| "grad_norm": 1.1124299375974238, | |
| "learning_rate": 2.5563250201119126e-06, | |
| "loss": 0.1839, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.49385245901639346, | |
| "grad_norm": 0.9084669173441584, | |
| "learning_rate": 2.5482796725054247e-06, | |
| "loss": 0.1846, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.4948770491803279, | |
| "grad_norm": 0.920531935865003, | |
| "learning_rate": 2.540233824675484e-06, | |
| "loss": 0.1827, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.4959016393442623, | |
| "grad_norm": 0.9246311670662104, | |
| "learning_rate": 2.5321875599847456e-06, | |
| "loss": 0.1793, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.4969262295081967, | |
| "grad_norm": 0.9789211624771333, | |
| "learning_rate": 2.5241409618001877e-06, | |
| "loss": 0.1817, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.4979508196721312, | |
| "grad_norm": 0.9779342764635619, | |
| "learning_rate": 2.51609411349224e-06, | |
| "loss": 0.1718, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.4989754098360656, | |
| "grad_norm": 0.9817015095605027, | |
| "learning_rate": 2.5080470984339277e-06, | |
| "loss": 0.18, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 0.9721428985868776, | |
| "learning_rate": 2.5e-06, | |
| "loss": 0.1839, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.5010245901639344, | |
| "grad_norm": 0.9237798994826446, | |
| "learning_rate": 2.4919529015660727e-06, | |
| "loss": 0.1868, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.5020491803278688, | |
| "grad_norm": 0.8627499236993943, | |
| "learning_rate": 2.4839058865077607e-06, | |
| "loss": 0.1737, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.5030737704918032, | |
| "grad_norm": 0.9735089251880433, | |
| "learning_rate": 2.475859038199814e-06, | |
| "loss": 0.1772, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 0.5040983606557377, | |
| "grad_norm": 1.043930622355012, | |
| "learning_rate": 2.467812440015255e-06, | |
| "loss": 0.1857, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 0.5051229508196722, | |
| "grad_norm": 1.0654258398150707, | |
| "learning_rate": 2.4597661753245165e-06, | |
| "loss": 0.2065, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 0.5061475409836066, | |
| "grad_norm": 1.024382782872247, | |
| "learning_rate": 2.451720327494575e-06, | |
| "loss": 0.1908, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 0.507172131147541, | |
| "grad_norm": 1.0340460353249987, | |
| "learning_rate": 2.4436749798880878e-06, | |
| "loss": 0.1982, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.5081967213114754, | |
| "grad_norm": 0.9058871877464277, | |
| "learning_rate": 2.435630215862529e-06, | |
| "loss": 0.1659, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 0.5092213114754098, | |
| "grad_norm": 0.9426607593548226, | |
| "learning_rate": 2.4275861187693262e-06, | |
| "loss": 0.1774, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 0.5102459016393442, | |
| "grad_norm": 0.8709460502620212, | |
| "learning_rate": 2.4195427719529977e-06, | |
| "loss": 0.1795, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 0.5112704918032787, | |
| "grad_norm": 1.177696156477345, | |
| "learning_rate": 2.4115002587502882e-06, | |
| "loss": 0.1873, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 0.5122950819672131, | |
| "grad_norm": 1.0161372304609024, | |
| "learning_rate": 2.403458662489304e-06, | |
| "loss": 0.1822, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5122950819672131, | |
| "eval_loss": 0.16566705703735352, | |
| "eval_runtime": 2.4227, | |
| "eval_samples_per_second": 3.302, | |
| "eval_steps_per_second": 0.826, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.5133196721311475, | |
| "grad_norm": 0.9451496411270415, | |
| "learning_rate": 2.395418066488654e-06, | |
| "loss": 0.1701, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 0.514344262295082, | |
| "grad_norm": 0.9730967807474432, | |
| "learning_rate": 2.387378554056578e-06, | |
| "loss": 0.1889, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 0.5153688524590164, | |
| "grad_norm": 0.9430921961090616, | |
| "learning_rate": 2.3793402084900966e-06, | |
| "loss": 0.1811, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 0.5163934426229508, | |
| "grad_norm": 0.924302334463627, | |
| "learning_rate": 2.371303113074134e-06, | |
| "loss": 0.1818, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 0.5174180327868853, | |
| "grad_norm": 1.0448642929685288, | |
| "learning_rate": 2.3632673510806644e-06, | |
| "loss": 0.2021, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.5184426229508197, | |
| "grad_norm": 1.0148016478474498, | |
| "learning_rate": 2.3552330057678473e-06, | |
| "loss": 0.1612, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 0.5194672131147541, | |
| "grad_norm": 1.0386403226107632, | |
| "learning_rate": 2.347200160379161e-06, | |
| "loss": 0.1773, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 0.5204918032786885, | |
| "grad_norm": 0.877535009049574, | |
| "learning_rate": 2.3391688981425464e-06, | |
| "loss": 0.1738, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 0.5215163934426229, | |
| "grad_norm": 0.9244572124364351, | |
| "learning_rate": 2.33113930226954e-06, | |
| "loss": 0.1793, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 0.5225409836065574, | |
| "grad_norm": 0.9005690931259356, | |
| "learning_rate": 2.3231114559544117e-06, | |
| "loss": 0.1711, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.5235655737704918, | |
| "grad_norm": 1.0645216464577514, | |
| "learning_rate": 2.3150854423733086e-06, | |
| "loss": 0.1772, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 0.5245901639344263, | |
| "grad_norm": 0.9701503703682157, | |
| "learning_rate": 2.3070613446833843e-06, | |
| "loss": 0.1703, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 0.5256147540983607, | |
| "grad_norm": 1.0057771421845738, | |
| "learning_rate": 2.2990392460219443e-06, | |
| "loss": 0.1784, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 0.5266393442622951, | |
| "grad_norm": 0.9043879622116481, | |
| "learning_rate": 2.2910192295055825e-06, | |
| "loss": 0.1786, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 0.5276639344262295, | |
| "grad_norm": 0.9349576592752755, | |
| "learning_rate": 2.2830013782293178e-06, | |
| "loss": 0.1626, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.5286885245901639, | |
| "grad_norm": 1.0581233390679867, | |
| "learning_rate": 2.274985775265737e-06, | |
| "loss": 0.1695, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 0.5297131147540983, | |
| "grad_norm": 1.0207892581506677, | |
| "learning_rate": 2.266972503664133e-06, | |
| "loss": 0.1862, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 0.5307377049180327, | |
| "grad_norm": 0.9663467310372069, | |
| "learning_rate": 2.25896164644964e-06, | |
| "loss": 0.1567, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 0.5317622950819673, | |
| "grad_norm": 1.005885464137267, | |
| "learning_rate": 2.2509532866223798e-06, | |
| "loss": 0.1791, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 0.5327868852459017, | |
| "grad_norm": 1.0718244401274244, | |
| "learning_rate": 2.242947507156599e-06, | |
| "loss": 0.1924, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.5338114754098361, | |
| "grad_norm": 1.07932031397857, | |
| "learning_rate": 2.2349443909998083e-06, | |
| "loss": 0.1892, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 0.5348360655737705, | |
| "grad_norm": 0.8636791735498007, | |
| "learning_rate": 2.2269440210719234e-06, | |
| "loss": 0.1777, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 0.5358606557377049, | |
| "grad_norm": 0.9014517991138811, | |
| "learning_rate": 2.2189464802644074e-06, | |
| "loss": 0.1794, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 0.5368852459016393, | |
| "grad_norm": 1.0133982545357838, | |
| "learning_rate": 2.210951851439411e-06, | |
| "loss": 0.1709, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 0.5379098360655737, | |
| "grad_norm": 0.8965794936311501, | |
| "learning_rate": 2.202960217428913e-06, | |
| "loss": 0.1922, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.5389344262295082, | |
| "grad_norm": 0.9375750081565379, | |
| "learning_rate": 2.194971661033864e-06, | |
| "loss": 0.1861, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 0.5399590163934426, | |
| "grad_norm": 0.8564721106689944, | |
| "learning_rate": 2.1869862650233286e-06, | |
| "loss": 0.1692, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 0.5409836065573771, | |
| "grad_norm": 0.9374103901639406, | |
| "learning_rate": 2.1790041121336223e-06, | |
| "loss": 0.1763, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 0.5420081967213115, | |
| "grad_norm": 0.902819779537131, | |
| "learning_rate": 2.1710252850674664e-06, | |
| "loss": 0.1775, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 0.5430327868852459, | |
| "grad_norm": 0.9752430497362763, | |
| "learning_rate": 2.1630498664931156e-06, | |
| "loss": 0.2009, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.5440573770491803, | |
| "grad_norm": 0.9832519127839873, | |
| "learning_rate": 2.1550779390435147e-06, | |
| "loss": 0.1731, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 0.5450819672131147, | |
| "grad_norm": 0.8976405259546838, | |
| "learning_rate": 2.147109585315435e-06, | |
| "loss": 0.1727, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 0.5461065573770492, | |
| "grad_norm": 0.8995899251015329, | |
| "learning_rate": 2.1391448878686207e-06, | |
| "loss": 0.184, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 0.5471311475409836, | |
| "grad_norm": 0.9474376635515027, | |
| "learning_rate": 2.1311839292249335e-06, | |
| "loss": 0.1817, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 0.548155737704918, | |
| "grad_norm": 1.0526465344644493, | |
| "learning_rate": 2.1232267918674983e-06, | |
| "loss": 0.1797, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5491803278688525, | |
| "grad_norm": 0.9389334939961796, | |
| "learning_rate": 2.1152735582398453e-06, | |
| "loss": 0.1874, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 0.5502049180327869, | |
| "grad_norm": 0.9286391238290078, | |
| "learning_rate": 2.1073243107450637e-06, | |
| "loss": 0.1778, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 0.5512295081967213, | |
| "grad_norm": 0.9423792786835667, | |
| "learning_rate": 2.0993791317449362e-06, | |
| "loss": 0.1798, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 0.5522540983606558, | |
| "grad_norm": 0.8276634176264499, | |
| "learning_rate": 2.0914381035590962e-06, | |
| "loss": 0.1696, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 0.5532786885245902, | |
| "grad_norm": 0.9331411425509982, | |
| "learning_rate": 2.0835013084641704e-06, | |
| "loss": 0.1835, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5543032786885246, | |
| "grad_norm": 0.9968625008851902, | |
| "learning_rate": 2.075568828692924e-06, | |
| "loss": 0.1854, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 0.555327868852459, | |
| "grad_norm": 0.9706145474150928, | |
| "learning_rate": 2.067640746433413e-06, | |
| "loss": 0.1854, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 0.5563524590163934, | |
| "grad_norm": 1.0040664260213645, | |
| "learning_rate": 2.0597171438281327e-06, | |
| "loss": 0.1881, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 0.5573770491803278, | |
| "grad_norm": 0.9164859655161115, | |
| "learning_rate": 2.0517981029731613e-06, | |
| "loss": 0.1864, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 0.5584016393442623, | |
| "grad_norm": 0.8087007912604435, | |
| "learning_rate": 2.043883705917316e-06, | |
| "loss": 0.1761, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5594262295081968, | |
| "grad_norm": 0.9309720011319698, | |
| "learning_rate": 2.0359740346612982e-06, | |
| "loss": 0.1648, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 0.5604508196721312, | |
| "grad_norm": 1.0038867242848444, | |
| "learning_rate": 2.028069171156846e-06, | |
| "loss": 0.1786, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 0.5614754098360656, | |
| "grad_norm": 1.1082299229379549, | |
| "learning_rate": 2.020169197305886e-06, | |
| "loss": 0.1782, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 0.5625, | |
| "grad_norm": 1.0492620007922442, | |
| "learning_rate": 2.01227419495968e-06, | |
| "loss": 0.1778, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 0.5635245901639344, | |
| "grad_norm": 0.85862249874578, | |
| "learning_rate": 2.0043842459179823e-06, | |
| "loss": 0.1776, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5645491803278688, | |
| "grad_norm": 0.9609917290946641, | |
| "learning_rate": 1.996499431928191e-06, | |
| "loss": 0.1906, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 0.5655737704918032, | |
| "grad_norm": 0.9638723808970817, | |
| "learning_rate": 1.988619834684499e-06, | |
| "loss": 0.1764, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 0.5665983606557377, | |
| "grad_norm": 0.9680309470280125, | |
| "learning_rate": 1.9807455358270477e-06, | |
| "loss": 0.1889, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 0.5676229508196722, | |
| "grad_norm": 0.7774348030375438, | |
| "learning_rate": 1.972876616941084e-06, | |
| "loss": 0.1702, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 0.5686475409836066, | |
| "grad_norm": 0.9230593520946012, | |
| "learning_rate": 1.9650131595561134e-06, | |
| "loss": 0.18, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.569672131147541, | |
| "grad_norm": 0.9703053185220839, | |
| "learning_rate": 1.9571552451450542e-06, | |
| "loss": 0.1753, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 0.5706967213114754, | |
| "grad_norm": 0.8239319690167616, | |
| "learning_rate": 1.949302955123393e-06, | |
| "loss": 0.1562, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 0.5717213114754098, | |
| "grad_norm": 0.8126544559671446, | |
| "learning_rate": 1.941456370848344e-06, | |
| "loss": 0.1656, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 0.5727459016393442, | |
| "grad_norm": 0.8988839292715595, | |
| "learning_rate": 1.933615573618005e-06, | |
| "loss": 0.18, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 0.5737704918032787, | |
| "grad_norm": 0.9621543171516884, | |
| "learning_rate": 1.9257806446705116e-06, | |
| "loss": 0.1887, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5747950819672131, | |
| "grad_norm": 0.8991360686328593, | |
| "learning_rate": 1.9179516651832014e-06, | |
| "loss": 0.1727, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 0.5758196721311475, | |
| "grad_norm": 0.9280759430716244, | |
| "learning_rate": 1.9101287162717694e-06, | |
| "loss": 0.1578, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 0.576844262295082, | |
| "grad_norm": 0.8115046343882056, | |
| "learning_rate": 1.9023118789894246e-06, | |
| "loss": 0.1708, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 0.5778688524590164, | |
| "grad_norm": 0.8279219158492381, | |
| "learning_rate": 1.8945012343260605e-06, | |
| "loss": 0.1743, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 0.5788934426229508, | |
| "grad_norm": 1.0334682801699855, | |
| "learning_rate": 1.8866968632074028e-06, | |
| "loss": 0.176, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5799180327868853, | |
| "grad_norm": 1.0159442958438203, | |
| "learning_rate": 1.8788988464941804e-06, | |
| "loss": 0.1894, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 0.5809426229508197, | |
| "grad_norm": 1.0074138818577671, | |
| "learning_rate": 1.871107264981284e-06, | |
| "loss": 0.191, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 0.5819672131147541, | |
| "grad_norm": 0.941991790602351, | |
| "learning_rate": 1.8633221993969285e-06, | |
| "loss": 0.1782, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 0.5829918032786885, | |
| "grad_norm": 0.8928456569011954, | |
| "learning_rate": 1.8555437304018188e-06, | |
| "loss": 0.1711, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 0.5840163934426229, | |
| "grad_norm": 0.804584322595907, | |
| "learning_rate": 1.847771938588313e-06, | |
| "loss": 0.159, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5850409836065574, | |
| "grad_norm": 0.8113508407782561, | |
| "learning_rate": 1.8400069044795844e-06, | |
| "loss": 0.1793, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 0.5860655737704918, | |
| "grad_norm": 0.9311394253680938, | |
| "learning_rate": 1.8322487085287953e-06, | |
| "loss": 0.1702, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 0.5870901639344263, | |
| "grad_norm": 0.892654395238897, | |
| "learning_rate": 1.824497431118254e-06, | |
| "loss": 0.1736, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 0.5881147540983607, | |
| "grad_norm": 1.0310569889641534, | |
| "learning_rate": 1.8167531525585863e-06, | |
| "loss": 0.1768, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 0.5891393442622951, | |
| "grad_norm": 0.8624017378857061, | |
| "learning_rate": 1.8090159530879058e-06, | |
| "loss": 0.1679, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5901639344262295, | |
| "grad_norm": 1.012490437849977, | |
| "learning_rate": 1.8012859128709766e-06, | |
| "loss": 0.2042, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 0.5911885245901639, | |
| "grad_norm": 0.9675988730825642, | |
| "learning_rate": 1.793563111998389e-06, | |
| "loss": 0.1744, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 0.5922131147540983, | |
| "grad_norm": 0.9891917842286704, | |
| "learning_rate": 1.7858476304857259e-06, | |
| "loss": 0.1963, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 0.5932377049180327, | |
| "grad_norm": 0.9998287081903463, | |
| "learning_rate": 1.7781395482727335e-06, | |
| "loss": 0.2012, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 0.5942622950819673, | |
| "grad_norm": 0.8600549144098295, | |
| "learning_rate": 1.7704389452224945e-06, | |
| "loss": 0.1767, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5952868852459017, | |
| "grad_norm": 0.8334960678413419, | |
| "learning_rate": 1.7627459011206033e-06, | |
| "loss": 0.1676, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 0.5963114754098361, | |
| "grad_norm": 0.8935616230539094, | |
| "learning_rate": 1.7550604956743328e-06, | |
| "loss": 0.1841, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 0.5973360655737705, | |
| "grad_norm": 0.7776403885258395, | |
| "learning_rate": 1.7473828085118128e-06, | |
| "loss": 0.1557, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 0.5983606557377049, | |
| "grad_norm": 0.8488628288731226, | |
| "learning_rate": 1.7397129191812058e-06, | |
| "loss": 0.1679, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 0.5993852459016393, | |
| "grad_norm": 0.79834721581537, | |
| "learning_rate": 1.7320509071498797e-06, | |
| "loss": 0.1677, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.6004098360655737, | |
| "grad_norm": 0.9473726586965617, | |
| "learning_rate": 1.7243968518035874e-06, | |
| "loss": 0.1887, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 0.6014344262295082, | |
| "grad_norm": 0.9896869054673711, | |
| "learning_rate": 1.7167508324456416e-06, | |
| "loss": 0.1871, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 0.6024590163934426, | |
| "grad_norm": 1.195141051808304, | |
| "learning_rate": 1.7091129282960966e-06, | |
| "loss": 0.1909, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 0.6034836065573771, | |
| "grad_norm": 0.7855704968125037, | |
| "learning_rate": 1.7014832184909213e-06, | |
| "loss": 0.1634, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 0.6045081967213115, | |
| "grad_norm": 0.9615930579642529, | |
| "learning_rate": 1.6938617820811899e-06, | |
| "loss": 0.1872, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.6055327868852459, | |
| "grad_norm": 0.9055528908053315, | |
| "learning_rate": 1.6862486980322496e-06, | |
| "loss": 0.1781, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 0.6065573770491803, | |
| "grad_norm": 0.9272936732355035, | |
| "learning_rate": 1.6786440452229134e-06, | |
| "loss": 0.1777, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 0.6075819672131147, | |
| "grad_norm": 0.8761377437810832, | |
| "learning_rate": 1.6710479024446375e-06, | |
| "loss": 0.1933, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 0.6086065573770492, | |
| "grad_norm": 0.9058388798522707, | |
| "learning_rate": 1.663460348400705e-06, | |
| "loss": 0.1776, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 0.6096311475409836, | |
| "grad_norm": 0.879770260654968, | |
| "learning_rate": 1.6558814617054131e-06, | |
| "loss": 0.1897, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.610655737704918, | |
| "grad_norm": 0.9246529958294375, | |
| "learning_rate": 1.6483113208832562e-06, | |
| "loss": 0.1646, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 0.6116803278688525, | |
| "grad_norm": 0.8907371357883067, | |
| "learning_rate": 1.6407500043681124e-06, | |
| "loss": 0.1791, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 0.6127049180327869, | |
| "grad_norm": 0.9619582849368665, | |
| "learning_rate": 1.6331975905024341e-06, | |
| "loss": 0.1823, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 0.6137295081967213, | |
| "grad_norm": 1.0930414825085903, | |
| "learning_rate": 1.625654157536431e-06, | |
| "loss": 0.1694, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 0.6147540983606558, | |
| "grad_norm": 0.9283373429450119, | |
| "learning_rate": 1.618119783627263e-06, | |
| "loss": 0.1815, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6147540983606558, | |
| "eval_loss": 0.16305969655513763, | |
| "eval_runtime": 2.4343, | |
| "eval_samples_per_second": 3.286, | |
| "eval_steps_per_second": 0.822, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.6157786885245902, | |
| "grad_norm": 0.9097473897087295, | |
| "learning_rate": 1.6105945468382308e-06, | |
| "loss": 0.1952, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 0.6168032786885246, | |
| "grad_norm": 0.856909730367002, | |
| "learning_rate": 1.6030785251379635e-06, | |
| "loss": 0.1844, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 0.617827868852459, | |
| "grad_norm": 0.869667682336708, | |
| "learning_rate": 1.5955717963996149e-06, | |
| "loss": 0.1807, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 0.6188524590163934, | |
| "grad_norm": 0.8368467750795624, | |
| "learning_rate": 1.5880744384000544e-06, | |
| "loss": 0.1697, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 0.6198770491803278, | |
| "grad_norm": 0.9508966964045688, | |
| "learning_rate": 1.5805865288190607e-06, | |
| "loss": 0.1829, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.6209016393442623, | |
| "grad_norm": 0.8215737206117173, | |
| "learning_rate": 1.5731081452385188e-06, | |
| "loss": 0.1709, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 0.6219262295081968, | |
| "grad_norm": 0.8148999964638836, | |
| "learning_rate": 1.5656393651416163e-06, | |
| "loss": 0.1723, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 0.6229508196721312, | |
| "grad_norm": 0.9366039655507001, | |
| "learning_rate": 1.558180265912037e-06, | |
| "loss": 0.1671, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 0.6239754098360656, | |
| "grad_norm": 0.9426220272943876, | |
| "learning_rate": 1.5507309248331649e-06, | |
| "loss": 0.1944, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.9795285676961993, | |
| "learning_rate": 1.5432914190872757e-06, | |
| "loss": 0.1778, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.6260245901639344, | |
| "grad_norm": 0.7929663615269116, | |
| "learning_rate": 1.5358618257547465e-06, | |
| "loss": 0.1669, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 0.6270491803278688, | |
| "grad_norm": 0.830499815972745, | |
| "learning_rate": 1.5284422218132495e-06, | |
| "loss": 0.1775, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 0.6280737704918032, | |
| "grad_norm": 0.98277964305133, | |
| "learning_rate": 1.5210326841369577e-06, | |
| "loss": 0.1868, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 0.6290983606557377, | |
| "grad_norm": 0.9149700203615054, | |
| "learning_rate": 1.5136332894957484e-06, | |
| "loss": 0.1945, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 0.6301229508196722, | |
| "grad_norm": 0.982431973331764, | |
| "learning_rate": 1.5062441145544066e-06, | |
| "loss": 0.1827, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.6311475409836066, | |
| "grad_norm": 0.8612211416224701, | |
| "learning_rate": 1.4988652358718336e-06, | |
| "loss": 0.1742, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 0.632172131147541, | |
| "grad_norm": 0.950045950980689, | |
| "learning_rate": 1.49149672990025e-06, | |
| "loss": 0.1932, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 0.6331967213114754, | |
| "grad_norm": 0.8889159352294493, | |
| "learning_rate": 1.4841386729844043e-06, | |
| "loss": 0.1832, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 0.6342213114754098, | |
| "grad_norm": 0.8137207716017777, | |
| "learning_rate": 1.4767911413607843e-06, | |
| "loss": 0.1588, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 0.6352459016393442, | |
| "grad_norm": 0.9618098881106849, | |
| "learning_rate": 1.4694542111568261e-06, | |
| "loss": 0.1941, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.6362704918032787, | |
| "grad_norm": 0.8904413275358246, | |
| "learning_rate": 1.462127958390123e-06, | |
| "loss": 0.1778, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 0.6372950819672131, | |
| "grad_norm": 0.8287392813137765, | |
| "learning_rate": 1.4548124589676417e-06, | |
| "loss": 0.1711, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 0.6383196721311475, | |
| "grad_norm": 0.8802611594605634, | |
| "learning_rate": 1.447507788684933e-06, | |
| "loss": 0.1893, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 0.639344262295082, | |
| "grad_norm": 0.8895516234505085, | |
| "learning_rate": 1.4402140232253486e-06, | |
| "loss": 0.1749, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 0.6403688524590164, | |
| "grad_norm": 0.8753884794940977, | |
| "learning_rate": 1.4329312381592544e-06, | |
| "loss": 0.161, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.6413934426229508, | |
| "grad_norm": 0.9057408033520777, | |
| "learning_rate": 1.4256595089432502e-06, | |
| "loss": 0.1758, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 0.6424180327868853, | |
| "grad_norm": 0.9403525056772438, | |
| "learning_rate": 1.4183989109193851e-06, | |
| "loss": 0.1864, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 0.6434426229508197, | |
| "grad_norm": 0.8437799400052396, | |
| "learning_rate": 1.411149519314381e-06, | |
| "loss": 0.1789, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 0.6444672131147541, | |
| "grad_norm": 0.8842877428855487, | |
| "learning_rate": 1.4039114092388487e-06, | |
| "loss": 0.1843, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 0.6454918032786885, | |
| "grad_norm": 0.7737086343332978, | |
| "learning_rate": 1.3966846556865105e-06, | |
| "loss": 0.1678, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.6465163934426229, | |
| "grad_norm": 0.9257633482999843, | |
| "learning_rate": 1.3894693335334264e-06, | |
| "loss": 0.1766, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 0.6475409836065574, | |
| "grad_norm": 0.8772319407484099, | |
| "learning_rate": 1.3822655175372148e-06, | |
| "loss": 0.1751, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 0.6485655737704918, | |
| "grad_norm": 0.882895498381621, | |
| "learning_rate": 1.3750732823362823e-06, | |
| "loss": 0.1783, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 0.6495901639344263, | |
| "grad_norm": 0.9440880256394637, | |
| "learning_rate": 1.3678927024490446e-06, | |
| "loss": 0.1808, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 0.6506147540983607, | |
| "grad_norm": 0.8941564656302972, | |
| "learning_rate": 1.360723852273156e-06, | |
| "loss": 0.1693, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.6516393442622951, | |
| "grad_norm": 0.8346406796780561, | |
| "learning_rate": 1.3535668060847428e-06, | |
| "loss": 0.1547, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 0.6526639344262295, | |
| "grad_norm": 0.9872743628910813, | |
| "learning_rate": 1.3464216380376293e-06, | |
| "loss": 0.1818, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 0.6536885245901639, | |
| "grad_norm": 0.8631492704444791, | |
| "learning_rate": 1.3392884221625718e-06, | |
| "loss": 0.1809, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 0.6547131147540983, | |
| "grad_norm": 0.9081316513965528, | |
| "learning_rate": 1.3321672323664892e-06, | |
| "loss": 0.1818, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 0.6557377049180327, | |
| "grad_norm": 0.9124865738028407, | |
| "learning_rate": 1.3250581424317012e-06, | |
| "loss": 0.1797, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.6567622950819673, | |
| "grad_norm": 1.1216056284827836, | |
| "learning_rate": 1.3179612260151565e-06, | |
| "loss": 0.185, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 0.6577868852459017, | |
| "grad_norm": 0.8965230801229277, | |
| "learning_rate": 1.3108765566476805e-06, | |
| "loss": 0.1785, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 0.6588114754098361, | |
| "grad_norm": 0.9095275871993023, | |
| "learning_rate": 1.303804207733205e-06, | |
| "loss": 0.1777, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 0.6598360655737705, | |
| "grad_norm": 0.8816668832976083, | |
| "learning_rate": 1.2967442525480092e-06, | |
| "loss": 0.1856, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 0.6608606557377049, | |
| "grad_norm": 0.8830622817915152, | |
| "learning_rate": 1.2896967642399632e-06, | |
| "loss": 0.1926, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6618852459016393, | |
| "grad_norm": 0.8797068199992459, | |
| "learning_rate": 1.282661815827766e-06, | |
| "loss": 0.1687, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 0.6629098360655737, | |
| "grad_norm": 0.9691572857301871, | |
| "learning_rate": 1.275639480200193e-06, | |
| "loss": 0.1919, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 0.6639344262295082, | |
| "grad_norm": 0.8956590523664938, | |
| "learning_rate": 1.2686298301153394e-06, | |
| "loss": 0.1766, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 0.6649590163934426, | |
| "grad_norm": 0.9449619488255843, | |
| "learning_rate": 1.2616329381998654e-06, | |
| "loss": 0.1849, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 0.6659836065573771, | |
| "grad_norm": 0.8391692251323416, | |
| "learning_rate": 1.2546488769482444e-06, | |
| "loss": 0.1608, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6670081967213115, | |
| "grad_norm": 0.8610841181738311, | |
| "learning_rate": 1.2476777187220117e-06, | |
| "loss": 0.1722, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 0.6680327868852459, | |
| "grad_norm": 0.8687012620001858, | |
| "learning_rate": 1.2407195357490163e-06, | |
| "loss": 0.1721, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 0.6690573770491803, | |
| "grad_norm": 0.8962176284980125, | |
| "learning_rate": 1.2337744001226693e-06, | |
| "loss": 0.1855, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 0.6700819672131147, | |
| "grad_norm": 0.8537639569558746, | |
| "learning_rate": 1.2268423838011997e-06, | |
| "loss": 0.1744, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 0.6711065573770492, | |
| "grad_norm": 0.8784248814379644, | |
| "learning_rate": 1.2199235586069083e-06, | |
| "loss": 0.1653, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6721311475409836, | |
| "grad_norm": 0.872617656159597, | |
| "learning_rate": 1.213017996225424e-06, | |
| "loss": 0.1656, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 0.673155737704918, | |
| "grad_norm": 0.9078493035536171, | |
| "learning_rate": 1.206125768204957e-06, | |
| "loss": 0.1723, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 0.6741803278688525, | |
| "grad_norm": 0.9810182910060384, | |
| "learning_rate": 1.1992469459555635e-06, | |
| "loss": 0.1845, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 0.6752049180327869, | |
| "grad_norm": 0.8944763115101391, | |
| "learning_rate": 1.1923816007484044e-06, | |
| "loss": 0.1756, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 0.6762295081967213, | |
| "grad_norm": 0.8681594229399818, | |
| "learning_rate": 1.1855298037150022e-06, | |
| "loss": 0.185, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6772540983606558, | |
| "grad_norm": 0.8822745122080423, | |
| "learning_rate": 1.1786916258465114e-06, | |
| "loss": 0.1799, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 0.6782786885245902, | |
| "grad_norm": 0.8514766627297857, | |
| "learning_rate": 1.1718671379929736e-06, | |
| "loss": 0.1762, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 0.6793032786885246, | |
| "grad_norm": 0.9596304132372674, | |
| "learning_rate": 1.1650564108625933e-06, | |
| "loss": 0.1874, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 0.680327868852459, | |
| "grad_norm": 0.8433274844215597, | |
| "learning_rate": 1.158259515020999e-06, | |
| "loss": 0.1698, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 0.6813524590163934, | |
| "grad_norm": 0.8499815595439452, | |
| "learning_rate": 1.1514765208905146e-06, | |
| "loss": 0.1836, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.6823770491803278, | |
| "grad_norm": 0.9013196425252755, | |
| "learning_rate": 1.144707498749428e-06, | |
| "loss": 0.1638, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 0.6834016393442623, | |
| "grad_norm": 0.9284561025002607, | |
| "learning_rate": 1.137952518731265e-06, | |
| "loss": 0.1777, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 0.6844262295081968, | |
| "grad_norm": 0.8633106451847434, | |
| "learning_rate": 1.1312116508240612e-06, | |
| "loss": 0.1673, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 0.6854508196721312, | |
| "grad_norm": 1.0315624806440022, | |
| "learning_rate": 1.1244849648696365e-06, | |
| "loss": 0.1841, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 0.6864754098360656, | |
| "grad_norm": 0.8526642948788298, | |
| "learning_rate": 1.117772530562874e-06, | |
| "loss": 0.1692, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6875, | |
| "grad_norm": 0.9113560463996885, | |
| "learning_rate": 1.1110744174509952e-06, | |
| "loss": 0.1894, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 0.6885245901639344, | |
| "grad_norm": 0.928814220877697, | |
| "learning_rate": 1.1043906949328387e-06, | |
| "loss": 0.1604, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 0.6895491803278688, | |
| "grad_norm": 0.8167375399582568, | |
| "learning_rate": 1.0977214322581457e-06, | |
| "loss": 0.1599, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 0.6905737704918032, | |
| "grad_norm": 0.7622941249626587, | |
| "learning_rate": 1.0910666985268375e-06, | |
| "loss": 0.1556, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 0.6915983606557377, | |
| "grad_norm": 0.8569733864749541, | |
| "learning_rate": 1.0844265626883034e-06, | |
| "loss": 0.1788, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6926229508196722, | |
| "grad_norm": 0.8395365175127185, | |
| "learning_rate": 1.0778010935406826e-06, | |
| "loss": 0.1721, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 0.6936475409836066, | |
| "grad_norm": 1.1130088908062106, | |
| "learning_rate": 1.071190359730156e-06, | |
| "loss": 0.1896, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 0.694672131147541, | |
| "grad_norm": 0.9430245792619598, | |
| "learning_rate": 1.0645944297502295e-06, | |
| "loss": 0.1852, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 0.6956967213114754, | |
| "grad_norm": 0.9339986974793401, | |
| "learning_rate": 1.0580133719410292e-06, | |
| "loss": 0.1727, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 0.6967213114754098, | |
| "grad_norm": 0.9547204803402818, | |
| "learning_rate": 1.051447254488591e-06, | |
| "loss": 0.1775, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6977459016393442, | |
| "grad_norm": 0.9782457613190049, | |
| "learning_rate": 1.0448961454241535e-06, | |
| "loss": 0.1861, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 0.6987704918032787, | |
| "grad_norm": 0.8844574130716941, | |
| "learning_rate": 1.0383601126234557e-06, | |
| "loss": 0.1764, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 0.6997950819672131, | |
| "grad_norm": 0.874666835242442, | |
| "learning_rate": 1.0318392238060299e-06, | |
| "loss": 0.1737, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 0.7008196721311475, | |
| "grad_norm": 0.834655698935124, | |
| "learning_rate": 1.0253335465345037e-06, | |
| "loss": 0.1617, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 0.701844262295082, | |
| "grad_norm": 0.9544303664483919, | |
| "learning_rate": 1.0188431482138984e-06, | |
| "loss": 0.1783, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.7028688524590164, | |
| "grad_norm": 0.9042437156934395, | |
| "learning_rate": 1.0123680960909319e-06, | |
| "loss": 0.179, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 0.7038934426229508, | |
| "grad_norm": 0.8916993978866337, | |
| "learning_rate": 1.0059084572533199e-06, | |
| "loss": 0.1712, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 0.7049180327868853, | |
| "grad_norm": 0.9539109119183387, | |
| "learning_rate": 9.994642986290797e-07, | |
| "loss": 0.1667, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 0.7059426229508197, | |
| "grad_norm": 0.8930383055580376, | |
| "learning_rate": 9.930356869858423e-07, | |
| "loss": 0.1806, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 0.7069672131147541, | |
| "grad_norm": 0.8599902881552571, | |
| "learning_rate": 9.866226889301552e-07, | |
| "loss": 0.1796, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.7079918032786885, | |
| "grad_norm": 0.8748204361284472, | |
| "learning_rate": 9.80225370906795e-07, | |
| "loss": 0.1837, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 0.7090163934426229, | |
| "grad_norm": 0.8944626878488349, | |
| "learning_rate": 9.73843799198077e-07, | |
| "loss": 0.1665, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 0.7100409836065574, | |
| "grad_norm": 0.9912927236792473, | |
| "learning_rate": 9.674780399231712e-07, | |
| "loss": 0.1751, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 0.7110655737704918, | |
| "grad_norm": 0.8522772968569764, | |
| "learning_rate": 9.61128159037415e-07, | |
| "loss": 0.1635, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 0.7120901639344263, | |
| "grad_norm": 0.7659248749856847, | |
| "learning_rate": 9.547942223316305e-07, | |
| "loss": 0.1624, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.7131147540983607, | |
| "grad_norm": 0.8734205934375859, | |
| "learning_rate": 9.48476295431443e-07, | |
| "loss": 0.166, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 0.7141393442622951, | |
| "grad_norm": 0.8259155042466008, | |
| "learning_rate": 9.421744437966007e-07, | |
| "loss": 0.1727, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 0.7151639344262295, | |
| "grad_norm": 0.9410108216971232, | |
| "learning_rate": 9.358887327202981e-07, | |
| "loss": 0.1862, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 0.7161885245901639, | |
| "grad_norm": 1.0608297585231523, | |
| "learning_rate": 9.296192273284949e-07, | |
| "loss": 0.1886, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 0.7172131147540983, | |
| "grad_norm": 1.0432306492922792, | |
| "learning_rate": 9.233659925792476e-07, | |
| "loss": 0.1823, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7172131147540983, | |
| "eval_loss": 0.16157369315624237, | |
| "eval_runtime": 2.4286, | |
| "eval_samples_per_second": 3.294, | |
| "eval_steps_per_second": 0.824, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.7182377049180327, | |
| "grad_norm": 0.8674676112491568, | |
| "learning_rate": 9.171290932620324e-07, | |
| "loss": 0.1832, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 0.7192622950819673, | |
| "grad_norm": 0.9239427141087063, | |
| "learning_rate": 9.109085939970733e-07, | |
| "loss": 0.1809, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 0.7202868852459017, | |
| "grad_norm": 0.8505185364810185, | |
| "learning_rate": 9.047045592346784e-07, | |
| "loss": 0.1696, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 0.7213114754098361, | |
| "grad_norm": 0.8579458941719454, | |
| "learning_rate": 8.985170532545623e-07, | |
| "loss": 0.1682, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 0.7223360655737705, | |
| "grad_norm": 0.8717212775928092, | |
| "learning_rate": 8.923461401651892e-07, | |
| "loss": 0.1852, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.7233606557377049, | |
| "grad_norm": 0.9045540981887317, | |
| "learning_rate": 8.861918839031042e-07, | |
| "loss": 0.1773, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 0.7243852459016393, | |
| "grad_norm": 0.8748793484784375, | |
| "learning_rate": 8.800543482322718e-07, | |
| "loss": 0.1831, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 0.7254098360655737, | |
| "grad_norm": 0.8517918217950547, | |
| "learning_rate": 8.739335967434151e-07, | |
| "loss": 0.1692, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 0.7264344262295082, | |
| "grad_norm": 1.0386496884010104, | |
| "learning_rate": 8.678296928533581e-07, | |
| "loss": 0.1967, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 0.7274590163934426, | |
| "grad_norm": 0.7795921315423667, | |
| "learning_rate": 8.617426998043652e-07, | |
| "loss": 0.1656, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.7284836065573771, | |
| "grad_norm": 0.9071735024261443, | |
| "learning_rate": 8.556726806634894e-07, | |
| "loss": 0.1736, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 0.7295081967213115, | |
| "grad_norm": 1.0028464790927805, | |
| "learning_rate": 8.496196983219205e-07, | |
| "loss": 0.1772, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 0.7305327868852459, | |
| "grad_norm": 0.7736961431653031, | |
| "learning_rate": 8.435838154943274e-07, | |
| "loss": 0.1628, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 0.7315573770491803, | |
| "grad_norm": 0.9750400994845899, | |
| "learning_rate": 8.375650947182137e-07, | |
| "loss": 0.1864, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 0.7325819672131147, | |
| "grad_norm": 0.7933059372186685, | |
| "learning_rate": 8.315635983532658e-07, | |
| "loss": 0.1765, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.7336065573770492, | |
| "grad_norm": 0.9061495534154601, | |
| "learning_rate": 8.255793885807104e-07, | |
| "loss": 0.1772, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 0.7346311475409836, | |
| "grad_norm": 1.0410624587803188, | |
| "learning_rate": 8.196125274026684e-07, | |
| "loss": 0.1918, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 0.735655737704918, | |
| "grad_norm": 0.8578985738857444, | |
| "learning_rate": 8.136630766415121e-07, | |
| "loss": 0.1816, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 0.7366803278688525, | |
| "grad_norm": 0.915135144786817, | |
| "learning_rate": 8.077310979392261e-07, | |
| "loss": 0.1845, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 0.7377049180327869, | |
| "grad_norm": 0.7720336948436679, | |
| "learning_rate": 8.018166527567672e-07, | |
| "loss": 0.1684, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.7387295081967213, | |
| "grad_norm": 0.9485520037973624, | |
| "learning_rate": 7.959198023734286e-07, | |
| "loss": 0.1785, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 0.7397540983606558, | |
| "grad_norm": 0.8288429385023102, | |
| "learning_rate": 7.900406078862042e-07, | |
| "loss": 0.1589, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 0.7407786885245902, | |
| "grad_norm": 0.8737413203133675, | |
| "learning_rate": 7.841791302091562e-07, | |
| "loss": 0.1838, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 0.7418032786885246, | |
| "grad_norm": 0.8148311534840044, | |
| "learning_rate": 7.783354300727835e-07, | |
| "loss": 0.184, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 0.742827868852459, | |
| "grad_norm": 0.9043365593049241, | |
| "learning_rate": 7.725095680233941e-07, | |
| "loss": 0.1817, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.7438524590163934, | |
| "grad_norm": 0.8180612208062575, | |
| "learning_rate": 7.667016044224735e-07, | |
| "loss": 0.165, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 0.7448770491803278, | |
| "grad_norm": 0.9445429998654891, | |
| "learning_rate": 7.609115994460647e-07, | |
| "loss": 0.1958, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 0.7459016393442623, | |
| "grad_norm": 0.7878590519403152, | |
| "learning_rate": 7.551396130841406e-07, | |
| "loss": 0.1734, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 0.7469262295081968, | |
| "grad_norm": 0.7827655888342286, | |
| "learning_rate": 7.493857051399878e-07, | |
| "loss": 0.1473, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 0.7479508196721312, | |
| "grad_norm": 0.9078215301174877, | |
| "learning_rate": 7.436499352295775e-07, | |
| "loss": 0.1806, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.7489754098360656, | |
| "grad_norm": 0.83324949303953, | |
| "learning_rate": 7.379323627809576e-07, | |
| "loss": 0.1879, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 0.8053183814952587, | |
| "learning_rate": 7.322330470336314e-07, | |
| "loss": 0.1817, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 0.7510245901639344, | |
| "grad_norm": 0.9138549819755927, | |
| "learning_rate": 7.265520470379461e-07, | |
| "loss": 0.1948, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 0.7520491803278688, | |
| "grad_norm": 0.9593470188556289, | |
| "learning_rate": 7.208894216544798e-07, | |
| "loss": 0.1848, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 0.7530737704918032, | |
| "grad_norm": 0.8933341129460916, | |
| "learning_rate": 7.152452295534324e-07, | |
| "loss": 0.1639, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.7540983606557377, | |
| "grad_norm": 0.8906399698287478, | |
| "learning_rate": 7.096195292140173e-07, | |
| "loss": 0.1768, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 0.7551229508196722, | |
| "grad_norm": 0.9555430084136501, | |
| "learning_rate": 7.040123789238542e-07, | |
| "loss": 0.1802, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 0.7561475409836066, | |
| "grad_norm": 1.0718660293280495, | |
| "learning_rate": 6.984238367783696e-07, | |
| "loss": 0.1676, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 0.757172131147541, | |
| "grad_norm": 0.8278322633310757, | |
| "learning_rate": 6.928539606801895e-07, | |
| "loss": 0.172, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 0.7581967213114754, | |
| "grad_norm": 0.8635871798781105, | |
| "learning_rate": 6.873028083385436e-07, | |
| "loss": 0.1753, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.7592213114754098, | |
| "grad_norm": 0.9307514064537442, | |
| "learning_rate": 6.817704372686626e-07, | |
| "loss": 0.1828, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 0.7602459016393442, | |
| "grad_norm": 0.8803324461272687, | |
| "learning_rate": 6.762569047911885e-07, | |
| "loss": 0.1758, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 0.7612704918032787, | |
| "grad_norm": 0.8244105148594775, | |
| "learning_rate": 6.707622680315759e-07, | |
| "loss": 0.1664, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 0.7622950819672131, | |
| "grad_norm": 0.8556179505279251, | |
| "learning_rate": 6.652865839195025e-07, | |
| "loss": 0.1777, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 0.7633196721311475, | |
| "grad_norm": 0.8098394712716966, | |
| "learning_rate": 6.598299091882778e-07, | |
| "loss": 0.1661, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.764344262295082, | |
| "grad_norm": 0.8331314757879953, | |
| "learning_rate": 6.543923003742567e-07, | |
| "loss": 0.1675, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 0.7653688524590164, | |
| "grad_norm": 0.8227872951016376, | |
| "learning_rate": 6.489738138162524e-07, | |
| "loss": 0.1656, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 0.7663934426229508, | |
| "grad_norm": 0.9099040665585931, | |
| "learning_rate": 6.435745056549533e-07, | |
| "loss": 0.187, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 0.7674180327868853, | |
| "grad_norm": 0.9158613102533624, | |
| "learning_rate": 6.381944318323419e-07, | |
| "loss": 0.1821, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 0.7684426229508197, | |
| "grad_norm": 0.9282338779045948, | |
| "learning_rate": 6.328336480911143e-07, | |
| "loss": 0.1898, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7694672131147541, | |
| "grad_norm": 0.7809191524430478, | |
| "learning_rate": 6.274922099741032e-07, | |
| "loss": 0.1747, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 0.7704918032786885, | |
| "grad_norm": 0.8816391545215968, | |
| "learning_rate": 6.221701728237008e-07, | |
| "loss": 0.1638, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 0.7715163934426229, | |
| "grad_norm": 0.9069804116021649, | |
| "learning_rate": 6.168675917812886e-07, | |
| "loss": 0.1929, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 0.7725409836065574, | |
| "grad_norm": 0.9388801090378771, | |
| "learning_rate": 6.115845217866625e-07, | |
| "loss": 0.1776, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 0.7735655737704918, | |
| "grad_norm": 0.766027616954334, | |
| "learning_rate": 6.063210175774683e-07, | |
| "loss": 0.1599, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7745901639344263, | |
| "grad_norm": 0.8994678729277007, | |
| "learning_rate": 6.010771336886292e-07, | |
| "loss": 0.1882, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 0.7756147540983607, | |
| "grad_norm": 1.104523292354414, | |
| "learning_rate": 5.958529244517827e-07, | |
| "loss": 0.188, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 0.7766393442622951, | |
| "grad_norm": 0.7960860880207639, | |
| "learning_rate": 5.906484439947194e-07, | |
| "loss": 0.1752, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 0.7776639344262295, | |
| "grad_norm": 0.909706846863123, | |
| "learning_rate": 5.854637462408205e-07, | |
| "loss": 0.1784, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 0.7786885245901639, | |
| "grad_norm": 0.8572663798151267, | |
| "learning_rate": 5.802988849085001e-07, | |
| "loss": 0.1677, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7797131147540983, | |
| "grad_norm": 0.7831676061987799, | |
| "learning_rate": 5.751539135106471e-07, | |
| "loss": 0.16, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 0.7807377049180327, | |
| "grad_norm": 0.9421409735863443, | |
| "learning_rate": 5.700288853540733e-07, | |
| "loss": 0.1645, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 0.7817622950819673, | |
| "grad_norm": 0.9312561795360044, | |
| "learning_rate": 5.649238535389562e-07, | |
| "loss": 0.1716, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 0.7827868852459017, | |
| "grad_norm": 0.8200610260201368, | |
| "learning_rate": 5.598388709582963e-07, | |
| "loss": 0.1767, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 0.7838114754098361, | |
| "grad_norm": 0.8571398989546529, | |
| "learning_rate": 5.547739902973625e-07, | |
| "loss": 0.1745, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7848360655737705, | |
| "grad_norm": 0.8905064809374793, | |
| "learning_rate": 5.497292640331489e-07, | |
| "loss": 0.1715, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 0.7858606557377049, | |
| "grad_norm": 0.84603287074026, | |
| "learning_rate": 5.447047444338315e-07, | |
| "loss": 0.1911, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 0.7868852459016393, | |
| "grad_norm": 0.9780582428627603, | |
| "learning_rate": 5.397004835582242e-07, | |
| "loss": 0.1705, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 0.7879098360655737, | |
| "grad_norm": 0.7985134330828078, | |
| "learning_rate": 5.347165332552437e-07, | |
| "loss": 0.1596, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 0.7889344262295082, | |
| "grad_norm": 0.8571708759121525, | |
| "learning_rate": 5.297529451633679e-07, | |
| "loss": 0.1907, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7899590163934426, | |
| "grad_norm": 0.9419392116531166, | |
| "learning_rate": 5.248097707101035e-07, | |
| "loss": 0.1644, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 0.7909836065573771, | |
| "grad_norm": 0.8889228975851249, | |
| "learning_rate": 5.198870611114529e-07, | |
| "loss": 0.178, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 0.7920081967213115, | |
| "grad_norm": 0.9739988922194254, | |
| "learning_rate": 5.149848673713822e-07, | |
| "loss": 0.1722, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 0.7930327868852459, | |
| "grad_norm": 0.9845363897922256, | |
| "learning_rate": 5.101032402812941e-07, | |
| "loss": 0.1889, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 0.7940573770491803, | |
| "grad_norm": 0.8863530203595568, | |
| "learning_rate": 5.052422304195013e-07, | |
| "loss": 0.1716, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7950819672131147, | |
| "grad_norm": 0.8454402139174103, | |
| "learning_rate": 5.004018881507016e-07, | |
| "loss": 0.1725, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 0.7961065573770492, | |
| "grad_norm": 0.9079375979904382, | |
| "learning_rate": 4.955822636254578e-07, | |
| "loss": 0.1694, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 0.7971311475409836, | |
| "grad_norm": 0.7912467614174121, | |
| "learning_rate": 4.907834067796774e-07, | |
| "loss": 0.1883, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 0.798155737704918, | |
| "grad_norm": 0.8736315278822245, | |
| "learning_rate": 4.860053673340928e-07, | |
| "loss": 0.1766, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 0.7991803278688525, | |
| "grad_norm": 0.8350962766872071, | |
| "learning_rate": 4.812481947937498e-07, | |
| "loss": 0.1807, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.8002049180327869, | |
| "grad_norm": 0.8589823969250466, | |
| "learning_rate": 4.7651193844749424e-07, | |
| "loss": 0.1743, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 0.8012295081967213, | |
| "grad_norm": 0.8796510106130906, | |
| "learning_rate": 4.7179664736745845e-07, | |
| "loss": 0.1736, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 0.8022540983606558, | |
| "grad_norm": 0.8921047560422314, | |
| "learning_rate": 4.671023704085559e-07, | |
| "loss": 0.1843, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 0.8032786885245902, | |
| "grad_norm": 0.8941424804504828, | |
| "learning_rate": 4.624291562079719e-07, | |
| "loss": 0.1778, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 0.8043032786885246, | |
| "grad_norm": 0.9232168798124224, | |
| "learning_rate": 4.5777705318466315e-07, | |
| "loss": 0.1792, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.805327868852459, | |
| "grad_norm": 0.9083324553734493, | |
| "learning_rate": 4.5314610953885396e-07, | |
| "loss": 0.1695, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 0.8063524590163934, | |
| "grad_norm": 0.9393585369942817, | |
| "learning_rate": 4.4853637325153717e-07, | |
| "loss": 0.181, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 0.8073770491803278, | |
| "grad_norm": 0.8464516973356035, | |
| "learning_rate": 4.439478920839771e-07, | |
| "loss": 0.1646, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 0.8084016393442623, | |
| "grad_norm": 0.9021074755055684, | |
| "learning_rate": 4.3938071357721547e-07, | |
| "loss": 0.193, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 0.8094262295081968, | |
| "grad_norm": 0.9363041372304467, | |
| "learning_rate": 4.3483488505157713e-07, | |
| "loss": 0.1679, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.8104508196721312, | |
| "grad_norm": 0.9990594152091536, | |
| "learning_rate": 4.3031045360618114e-07, | |
| "loss": 0.1931, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 0.8114754098360656, | |
| "grad_norm": 0.9254143463250761, | |
| "learning_rate": 4.2580746611845273e-07, | |
| "loss": 0.19, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 0.8125, | |
| "grad_norm": 0.8373159253275513, | |
| "learning_rate": 4.2132596924363666e-07, | |
| "loss": 0.17, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 0.8135245901639344, | |
| "grad_norm": 1.0396337954786299, | |
| "learning_rate": 4.168660094143159e-07, | |
| "loss": 0.1733, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 0.8145491803278688, | |
| "grad_norm": 0.7919076029426301, | |
| "learning_rate": 4.1242763283992627e-07, | |
| "loss": 0.1578, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.8155737704918032, | |
| "grad_norm": 0.8625373994303565, | |
| "learning_rate": 4.0801088550628307e-07, | |
| "loss": 0.18, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 0.8165983606557377, | |
| "grad_norm": 0.8463313462166929, | |
| "learning_rate": 4.0361581317510084e-07, | |
| "loss": 0.169, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 0.8176229508196722, | |
| "grad_norm": 0.9757764168062646, | |
| "learning_rate": 3.9924246138352106e-07, | |
| "loss": 0.1777, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 0.8186475409836066, | |
| "grad_norm": 0.917111984183459, | |
| "learning_rate": 3.9489087544363923e-07, | |
| "loss": 0.1748, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 0.819672131147541, | |
| "grad_norm": 0.904344369555692, | |
| "learning_rate": 3.9056110044203594e-07, | |
| "loss": 0.1693, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.819672131147541, | |
| "eval_loss": 0.16032838821411133, | |
| "eval_runtime": 2.4338, | |
| "eval_samples_per_second": 3.287, | |
| "eval_steps_per_second": 0.822, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.8206967213114754, | |
| "grad_norm": 0.841719890870559, | |
| "learning_rate": 3.8625318123931e-07, | |
| "loss": 0.1684, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 0.8217213114754098, | |
| "grad_norm": 0.8069861952716675, | |
| "learning_rate": 3.819671624696128e-07, | |
| "loss": 0.1574, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 0.8227459016393442, | |
| "grad_norm": 0.8755474950349729, | |
| "learning_rate": 3.7770308854018667e-07, | |
| "loss": 0.1666, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 0.8237704918032787, | |
| "grad_norm": 0.950032710955299, | |
| "learning_rate": 3.734610036309047e-07, | |
| "loss": 0.1883, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 0.8247950819672131, | |
| "grad_norm": 0.9097280481478887, | |
| "learning_rate": 3.6924095169381167e-07, | |
| "loss": 0.1715, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.8258196721311475, | |
| "grad_norm": 0.9617644609661792, | |
| "learning_rate": 3.650429764526711e-07, | |
| "loss": 0.1865, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 0.826844262295082, | |
| "grad_norm": 0.8997145966650997, | |
| "learning_rate": 3.6086712140250943e-07, | |
| "loss": 0.1699, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 0.8278688524590164, | |
| "grad_norm": 0.8718221111724076, | |
| "learning_rate": 3.56713429809169e-07, | |
| "loss": 0.1626, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 0.8288934426229508, | |
| "grad_norm": 0.8538517303467508, | |
| "learning_rate": 3.525819447088563e-07, | |
| "loss": 0.1818, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 0.8299180327868853, | |
| "grad_norm": 0.8363207788697908, | |
| "learning_rate": 3.4847270890769615e-07, | |
| "loss": 0.1654, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.8309426229508197, | |
| "grad_norm": 0.9489546178361499, | |
| "learning_rate": 3.443857649812915e-07, | |
| "loss": 0.1801, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 0.8319672131147541, | |
| "grad_norm": 0.9158711673222496, | |
| "learning_rate": 3.403211552742788e-07, | |
| "loss": 0.1905, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 0.8329918032786885, | |
| "grad_norm": 0.9536049470751078, | |
| "learning_rate": 3.362789218998919e-07, | |
| "loss": 0.1808, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 0.8340163934426229, | |
| "grad_norm": 0.9173275936587199, | |
| "learning_rate": 3.3225910673952337e-07, | |
| "loss": 0.1983, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 0.8350409836065574, | |
| "grad_norm": 0.8860707148167601, | |
| "learning_rate": 3.282617514422923e-07, | |
| "loss": 0.173, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.8360655737704918, | |
| "grad_norm": 0.9198115990841506, | |
| "learning_rate": 3.2428689742461187e-07, | |
| "loss": 0.1771, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 0.8370901639344263, | |
| "grad_norm": 0.8734530693721761, | |
| "learning_rate": 3.2033458586976124e-07, | |
| "loss": 0.1809, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 0.8381147540983607, | |
| "grad_norm": 1.0047782397720795, | |
| "learning_rate": 3.164048577274573e-07, | |
| "loss": 0.1798, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 0.8391393442622951, | |
| "grad_norm": 0.8294783569213399, | |
| "learning_rate": 3.124977537134313e-07, | |
| "loss": 0.1607, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 0.8401639344262295, | |
| "grad_norm": 0.8870940707908037, | |
| "learning_rate": 3.086133143090081e-07, | |
| "loss": 0.1834, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.8411885245901639, | |
| "grad_norm": 0.8482897896686905, | |
| "learning_rate": 3.047515797606837e-07, | |
| "loss": 0.1809, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 0.8422131147540983, | |
| "grad_norm": 0.9160715578216597, | |
| "learning_rate": 3.009125900797116e-07, | |
| "loss": 0.1781, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 0.8432377049180327, | |
| "grad_norm": 0.822129731152101, | |
| "learning_rate": 2.9709638504168635e-07, | |
| "loss": 0.1716, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 0.8442622950819673, | |
| "grad_norm": 0.8468658427315924, | |
| "learning_rate": 2.933030041861312e-07, | |
| "loss": 0.168, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 0.8452868852459017, | |
| "grad_norm": 0.8838111227648381, | |
| "learning_rate": 2.8953248681609146e-07, | |
| "loss": 0.1773, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.8463114754098361, | |
| "grad_norm": 0.8398520146975642, | |
| "learning_rate": 2.857848719977216e-07, | |
| "loss": 0.1627, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 0.8473360655737705, | |
| "grad_norm": 0.9569466718710614, | |
| "learning_rate": 2.8206019855988587e-07, | |
| "loss": 0.1656, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 0.8483606557377049, | |
| "grad_norm": 0.8743588826286536, | |
| "learning_rate": 2.783585050937537e-07, | |
| "loss": 0.1689, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 0.8493852459016393, | |
| "grad_norm": 0.9249556753245101, | |
| "learning_rate": 2.746798299523995e-07, | |
| "loss": 0.1788, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 0.8504098360655737, | |
| "grad_norm": 0.8722683857620943, | |
| "learning_rate": 2.710242112504069e-07, | |
| "loss": 0.178, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.8514344262295082, | |
| "grad_norm": 0.8639210526213216, | |
| "learning_rate": 2.673916868634721e-07, | |
| "loss": 0.1794, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 0.8524590163934426, | |
| "grad_norm": 0.8897055621889665, | |
| "learning_rate": 2.6378229442801163e-07, | |
| "loss": 0.1838, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 0.8534836065573771, | |
| "grad_norm": 0.8251992662223759, | |
| "learning_rate": 2.601960713407734e-07, | |
| "loss": 0.1704, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 0.8545081967213115, | |
| "grad_norm": 0.7826397497408839, | |
| "learning_rate": 2.566330547584497e-07, | |
| "loss": 0.1632, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 0.8555327868852459, | |
| "grad_norm": 0.9181410213146293, | |
| "learning_rate": 2.5309328159729057e-07, | |
| "loss": 0.1771, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.8565573770491803, | |
| "grad_norm": 0.8818986014115952, | |
| "learning_rate": 2.4957678853272246e-07, | |
| "loss": 0.177, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 0.8575819672131147, | |
| "grad_norm": 0.9982853256072317, | |
| "learning_rate": 2.4608361199896627e-07, | |
| "loss": 0.1799, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 0.8586065573770492, | |
| "grad_norm": 0.8058719369046026, | |
| "learning_rate": 2.4261378818866256e-07, | |
| "loss": 0.1466, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 0.8596311475409836, | |
| "grad_norm": 0.8435952503633929, | |
| "learning_rate": 2.39167353052496e-07, | |
| "loss": 0.176, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 0.860655737704918, | |
| "grad_norm": 0.9404024830246595, | |
| "learning_rate": 2.357443422988215e-07, | |
| "loss": 0.1852, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.8616803278688525, | |
| "grad_norm": 0.7920752942005287, | |
| "learning_rate": 2.3234479139329496e-07, | |
| "loss": 0.1553, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 0.8627049180327869, | |
| "grad_norm": 0.8994862830984595, | |
| "learning_rate": 2.28968735558506e-07, | |
| "loss": 0.166, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 0.8637295081967213, | |
| "grad_norm": 0.9035122592619887, | |
| "learning_rate": 2.2561620977361337e-07, | |
| "loss": 0.1692, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 0.8647540983606558, | |
| "grad_norm": 0.8782944427781477, | |
| "learning_rate": 2.2228724877398134e-07, | |
| "loss": 0.1816, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 0.8657786885245902, | |
| "grad_norm": 0.8889244415577865, | |
| "learning_rate": 2.189818870508209e-07, | |
| "loss": 0.1684, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.8668032786885246, | |
| "grad_norm": 0.8575802293919446, | |
| "learning_rate": 2.1570015885083228e-07, | |
| "loss": 0.1697, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 0.867827868852459, | |
| "grad_norm": 0.9279685730735587, | |
| "learning_rate": 2.1244209817584987e-07, | |
| "loss": 0.1849, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 0.8688524590163934, | |
| "grad_norm": 0.9303621515606999, | |
| "learning_rate": 2.092077387824884e-07, | |
| "loss": 0.1848, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 0.8698770491803278, | |
| "grad_norm": 0.8425528933325437, | |
| "learning_rate": 2.0599711418179707e-07, | |
| "loss": 0.1746, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 0.8709016393442623, | |
| "grad_norm": 0.8745626443076188, | |
| "learning_rate": 2.0281025763890767e-07, | |
| "loss": 0.18, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.8719262295081968, | |
| "grad_norm": 0.8424052034424253, | |
| "learning_rate": 1.9964720217269557e-07, | |
| "loss": 0.1797, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 0.8729508196721312, | |
| "grad_norm": 0.8364327013858791, | |
| "learning_rate": 1.9650798055543014e-07, | |
| "loss": 0.1654, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 0.8739754098360656, | |
| "grad_norm": 0.7828762225134993, | |
| "learning_rate": 1.9339262531244214e-07, | |
| "loss": 0.1729, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 0.875, | |
| "grad_norm": 0.8897812912811075, | |
| "learning_rate": 1.9030116872178317e-07, | |
| "loss": 0.1695, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 0.8760245901639344, | |
| "grad_norm": 0.8798807527360308, | |
| "learning_rate": 1.8723364281389211e-07, | |
| "loss": 0.162, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8770491803278688, | |
| "grad_norm": 0.8713780467453701, | |
| "learning_rate": 1.8419007937126254e-07, | |
| "loss": 0.1841, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 0.8780737704918032, | |
| "grad_norm": 0.7974558160445019, | |
| "learning_rate": 1.8117050992811496e-07, | |
| "loss": 0.1617, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 0.8790983606557377, | |
| "grad_norm": 0.9179566520383018, | |
| "learning_rate": 1.781749657700693e-07, | |
| "loss": 0.1604, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 0.8801229508196722, | |
| "grad_norm": 0.8407344863769272, | |
| "learning_rate": 1.7520347793381824e-07, | |
| "loss": 0.1703, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 0.8811475409836066, | |
| "grad_norm": 0.8830212689827281, | |
| "learning_rate": 1.7225607720681132e-07, | |
| "loss": 0.1838, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.882172131147541, | |
| "grad_norm": 0.7713495320941431, | |
| "learning_rate": 1.693327941269307e-07, | |
| "loss": 0.1619, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 0.8831967213114754, | |
| "grad_norm": 0.9593572336197926, | |
| "learning_rate": 1.6643365898217774e-07, | |
| "loss": 0.1744, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 0.8842213114754098, | |
| "grad_norm": 0.7270225002056623, | |
| "learning_rate": 1.6355870181035654e-07, | |
| "loss": 0.1628, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 0.8852459016393442, | |
| "grad_norm": 0.8222888761619991, | |
| "learning_rate": 1.607079523987662e-07, | |
| "loss": 0.1761, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 0.8862704918032787, | |
| "grad_norm": 0.8413661619768493, | |
| "learning_rate": 1.578814402838888e-07, | |
| "loss": 0.1736, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8872950819672131, | |
| "grad_norm": 0.7742499111063967, | |
| "learning_rate": 1.5507919475108656e-07, | |
| "loss": 0.1716, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 0.8883196721311475, | |
| "grad_norm": 0.9985212949005117, | |
| "learning_rate": 1.5230124483429505e-07, | |
| "loss": 0.2031, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 0.889344262295082, | |
| "grad_norm": 0.8777360343429678, | |
| "learning_rate": 1.4954761931572526e-07, | |
| "loss": 0.1692, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 0.8903688524590164, | |
| "grad_norm": 0.8370947720945473, | |
| "learning_rate": 1.4681834672556379e-07, | |
| "loss": 0.1743, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 0.8913934426229508, | |
| "grad_norm": 0.8751996685810851, | |
| "learning_rate": 1.4411345534167758e-07, | |
| "loss": 0.1766, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.8924180327868853, | |
| "grad_norm": 0.9362961298266388, | |
| "learning_rate": 1.4143297318932158e-07, | |
| "loss": 0.1822, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 0.8934426229508197, | |
| "grad_norm": 0.8132916873195382, | |
| "learning_rate": 1.3877692804084687e-07, | |
| "loss": 0.1702, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 0.8944672131147541, | |
| "grad_norm": 0.919634614414977, | |
| "learning_rate": 1.3614534741541507e-07, | |
| "loss": 0.1858, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 0.8954918032786885, | |
| "grad_norm": 1.0397342000036898, | |
| "learning_rate": 1.3353825857871038e-07, | |
| "loss": 0.1796, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 0.8965163934426229, | |
| "grad_norm": 0.797036127395251, | |
| "learning_rate": 1.309556885426602e-07, | |
| "loss": 0.1654, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8975409836065574, | |
| "grad_norm": 0.7201854535988077, | |
| "learning_rate": 1.2839766406515296e-07, | |
| "loss": 0.1503, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 0.8985655737704918, | |
| "grad_norm": 0.811163993144131, | |
| "learning_rate": 1.2586421164976243e-07, | |
| "loss": 0.154, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 0.8995901639344263, | |
| "grad_norm": 0.8544485574393694, | |
| "learning_rate": 1.2335535754547183e-07, | |
| "loss": 0.1692, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 0.9006147540983607, | |
| "grad_norm": 0.8485941480536551, | |
| "learning_rate": 1.2087112774640224e-07, | |
| "loss": 0.1706, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 0.9016393442622951, | |
| "grad_norm": 0.9716170720719544, | |
| "learning_rate": 1.1841154799154376e-07, | |
| "loss": 0.1831, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.9026639344262295, | |
| "grad_norm": 0.8163293311717974, | |
| "learning_rate": 1.1597664376448852e-07, | |
| "loss": 0.182, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 0.9036885245901639, | |
| "grad_norm": 0.7708856048639566, | |
| "learning_rate": 1.1356644029316661e-07, | |
| "loss": 0.1802, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 0.9047131147540983, | |
| "grad_norm": 0.8015117979367216, | |
| "learning_rate": 1.1118096254958466e-07, | |
| "loss": 0.1579, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 0.9057377049180327, | |
| "grad_norm": 0.7869302477703908, | |
| "learning_rate": 1.0882023524956764e-07, | |
| "loss": 0.1761, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 0.9067622950819673, | |
| "grad_norm": 0.8508107153427299, | |
| "learning_rate": 1.0648428285250117e-07, | |
| "loss": 0.1746, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.9077868852459017, | |
| "grad_norm": 0.7992233072823974, | |
| "learning_rate": 1.0417312956108067e-07, | |
| "loss": 0.1598, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 0.9088114754098361, | |
| "grad_norm": 0.7827917112508029, | |
| "learning_rate": 1.0188679932105817e-07, | |
| "loss": 0.1716, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 0.9098360655737705, | |
| "grad_norm": 0.8197452412676381, | |
| "learning_rate": 9.96253158209956e-08, | |
| "loss": 0.166, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 0.9108606557377049, | |
| "grad_norm": 0.8290960866345685, | |
| "learning_rate": 9.738870249201921e-08, | |
| "loss": 0.1696, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 0.9118852459016393, | |
| "grad_norm": 1.0010083478434122, | |
| "learning_rate": 9.517698250757574e-08, | |
| "loss": 0.1754, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.9129098360655737, | |
| "grad_norm": 0.7712987279364241, | |
| "learning_rate": 9.299017878319383e-08, | |
| "loss": 0.1621, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 0.9139344262295082, | |
| "grad_norm": 0.8369305372418803, | |
| "learning_rate": 9.082831397624586e-08, | |
| "loss": 0.1865, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 0.9149590163934426, | |
| "grad_norm": 1.0052739142857956, | |
| "learning_rate": 8.869141048571311e-08, | |
| "loss": 0.189, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 0.9159836065573771, | |
| "grad_norm": 0.8759858745081842, | |
| "learning_rate": 8.657949045195374e-08, | |
| "loss": 0.186, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 0.9170081967213115, | |
| "grad_norm": 0.7930929048213841, | |
| "learning_rate": 8.449257575647351e-08, | |
| "loss": 0.1758, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.9180327868852459, | |
| "grad_norm": 0.9066728421836677, | |
| "learning_rate": 8.243068802169906e-08, | |
| "loss": 0.1823, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 0.9190573770491803, | |
| "grad_norm": 0.879547520022859, | |
| "learning_rate": 8.039384861075417e-08, | |
| "loss": 0.1679, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 0.9200819672131147, | |
| "grad_norm": 0.746565324150959, | |
| "learning_rate": 7.838207862723712e-08, | |
| "loss": 0.1708, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 0.9211065573770492, | |
| "grad_norm": 0.9051359371558754, | |
| "learning_rate": 7.63953989150043e-08, | |
| "loss": 0.1684, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 0.9221311475409836, | |
| "grad_norm": 0.8977264335257594, | |
| "learning_rate": 7.443383005795224e-08, | |
| "loss": 0.1789, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.9221311475409836, | |
| "eval_loss": 0.15962126851081848, | |
| "eval_runtime": 2.4315, | |
| "eval_samples_per_second": 3.29, | |
| "eval_steps_per_second": 0.823, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.923155737704918, | |
| "grad_norm": 0.8943196342382268, | |
| "learning_rate": 7.249739237980474e-08, | |
| "loss": 0.181, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 0.9241803278688525, | |
| "grad_norm": 0.78649470970722, | |
| "learning_rate": 7.058610594390308e-08, | |
| "loss": 0.1665, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 0.9252049180327869, | |
| "grad_norm": 0.744757085949217, | |
| "learning_rate": 6.869999055299809e-08, | |
| "loss": 0.1523, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 0.9262295081967213, | |
| "grad_norm": 0.8213777195831243, | |
| "learning_rate": 6.683906574904364e-08, | |
| "loss": 0.1682, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 0.9272540983606558, | |
| "grad_norm": 0.9403750780233009, | |
| "learning_rate": 6.500335081299603e-08, | |
| "loss": 0.1897, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.9282786885245902, | |
| "grad_norm": 0.9763008546642387, | |
| "learning_rate": 6.319286476461239e-08, | |
| "loss": 0.2006, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 0.9293032786885246, | |
| "grad_norm": 0.906354129784996, | |
| "learning_rate": 6.140762636225484e-08, | |
| "loss": 0.1775, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 0.930327868852459, | |
| "grad_norm": 0.8327944984202678, | |
| "learning_rate": 5.964765410269635e-08, | |
| "loss": 0.1663, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 0.9313524590163934, | |
| "grad_norm": 0.8575399588295677, | |
| "learning_rate": 5.791296622092768e-08, | |
| "loss": 0.1667, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 0.9323770491803278, | |
| "grad_norm": 0.8222340530823159, | |
| "learning_rate": 5.6203580689970225e-08, | |
| "loss": 0.1689, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.9334016393442623, | |
| "grad_norm": 0.8172745956072737, | |
| "learning_rate": 5.451951522068788e-08, | |
| "loss": 0.1521, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 0.9344262295081968, | |
| "grad_norm": 0.8738017093364044, | |
| "learning_rate": 5.2860787261605485e-08, | |
| "loss": 0.1737, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 0.9354508196721312, | |
| "grad_norm": 0.8964817588085776, | |
| "learning_rate": 5.1227413998726214e-08, | |
| "loss": 0.1757, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 0.9364754098360656, | |
| "grad_norm": 0.9402699094593511, | |
| "learning_rate": 4.9619412355355615e-08, | |
| "loss": 0.1706, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.8491161258379792, | |
| "learning_rate": 4.8036798991923925e-08, | |
| "loss": 0.1718, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.9385245901639344, | |
| "grad_norm": 0.9087263303107859, | |
| "learning_rate": 4.647959030581517e-08, | |
| "loss": 0.1746, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 0.9395491803278688, | |
| "grad_norm": 0.8811024408290967, | |
| "learning_rate": 4.494780243119612e-08, | |
| "loss": 0.1807, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 0.9405737704918032, | |
| "grad_norm": 0.8226737996734362, | |
| "learning_rate": 4.3441451238850354e-08, | |
| "loss": 0.1751, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 0.9415983606557377, | |
| "grad_norm": 0.8816127284945342, | |
| "learning_rate": 4.19605523360131e-08, | |
| "loss": 0.1849, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 0.9426229508196722, | |
| "grad_norm": 0.8667982766413325, | |
| "learning_rate": 4.050512106620913e-08, | |
| "loss": 0.1716, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.9436475409836066, | |
| "grad_norm": 0.8835372132599139, | |
| "learning_rate": 3.907517250909487e-08, | |
| "loss": 0.1757, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 0.944672131147541, | |
| "grad_norm": 0.9851426930224088, | |
| "learning_rate": 3.767072148030071e-08, | |
| "loss": 0.1894, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 0.9456967213114754, | |
| "grad_norm": 0.7990585190735112, | |
| "learning_rate": 3.6291782531278905e-08, | |
| "loss": 0.1702, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 0.9467213114754098, | |
| "grad_norm": 0.8808541906651229, | |
| "learning_rate": 3.4938369949152616e-08, | |
| "loss": 0.1819, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 0.9477459016393442, | |
| "grad_norm": 0.8989754380010129, | |
| "learning_rate": 3.361049775656627e-08, | |
| "loss": 0.1865, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.9487704918032787, | |
| "grad_norm": 0.8223617196049038, | |
| "learning_rate": 3.2308179711543206e-08, | |
| "loss": 0.1776, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 0.9497950819672131, | |
| "grad_norm": 0.899541269928143, | |
| "learning_rate": 3.1031429307339376e-08, | |
| "loss": 0.169, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 0.9508196721311475, | |
| "grad_norm": 0.9773950095981714, | |
| "learning_rate": 2.9780259772307362e-08, | |
| "loss": 0.1784, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 0.951844262295082, | |
| "grad_norm": 0.9381758984680233, | |
| "learning_rate": 2.85546840697562e-08, | |
| "loss": 0.183, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 0.9528688524590164, | |
| "grad_norm": 0.8001247728121608, | |
| "learning_rate": 2.73547148978201e-08, | |
| "loss": 0.1709, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.9538934426229508, | |
| "grad_norm": 0.9006959143385694, | |
| "learning_rate": 2.6180364689323556e-08, | |
| "loss": 0.1908, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 0.9549180327868853, | |
| "grad_norm": 0.9041413179426013, | |
| "learning_rate": 2.5031645611654497e-08, | |
| "loss": 0.1914, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 0.9559426229508197, | |
| "grad_norm": 0.812638408788933, | |
| "learning_rate": 2.3908569566638563e-08, | |
| "loss": 0.1792, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 0.9569672131147541, | |
| "grad_norm": 0.8262511792058742, | |
| "learning_rate": 2.2811148190414468e-08, | |
| "loss": 0.1615, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 0.9579918032786885, | |
| "grad_norm": 0.8000254685307941, | |
| "learning_rate": 2.1739392853314666e-08, | |
| "loss": 0.1768, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.9590163934426229, | |
| "grad_norm": 0.8631361210863329, | |
| "learning_rate": 2.0693314659746276e-08, | |
| "loss": 0.1809, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 0.9600409836065574, | |
| "grad_norm": 0.7989977016554337, | |
| "learning_rate": 1.9672924448077278e-08, | |
| "loss": 0.1756, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 0.9610655737704918, | |
| "grad_norm": 0.7863888340988617, | |
| "learning_rate": 1.8678232790523553e-08, | |
| "loss": 0.1632, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 0.9620901639344263, | |
| "grad_norm": 0.814063561946324, | |
| "learning_rate": 1.7709249993038968e-08, | |
| "loss": 0.1662, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 0.9631147540983607, | |
| "grad_norm": 0.7963412614637703, | |
| "learning_rate": 1.6765986095209906e-08, | |
| "loss": 0.1675, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.9641393442622951, | |
| "grad_norm": 0.790291267551256, | |
| "learning_rate": 1.5848450870149802e-08, | |
| "loss": 0.1696, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 0.9651639344262295, | |
| "grad_norm": 0.8375698555062183, | |
| "learning_rate": 1.4956653824398647e-08, | |
| "loss": 0.1578, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 0.9661885245901639, | |
| "grad_norm": 0.921239217083913, | |
| "learning_rate": 1.4090604197824487e-08, | |
| "loss": 0.1836, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 0.9672131147540983, | |
| "grad_norm": 0.8162048925209585, | |
| "learning_rate": 1.3250310963527358e-08, | |
| "loss": 0.1689, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 0.9682377049180327, | |
| "grad_norm": 0.8007263369768026, | |
| "learning_rate": 1.2435782827746879e-08, | |
| "loss": 0.1703, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.9692622950819673, | |
| "grad_norm": 0.8286387271547603, | |
| "learning_rate": 1.1647028229770651e-08, | |
| "loss": 0.1702, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 0.9702868852459017, | |
| "grad_norm": 1.014322428834256, | |
| "learning_rate": 1.088405534184961e-08, | |
| "loss": 0.194, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 0.9713114754098361, | |
| "grad_norm": 0.7803863889651891, | |
| "learning_rate": 1.0146872069109748e-08, | |
| "loss": 0.167, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 0.9723360655737705, | |
| "grad_norm": 0.8208627212198034, | |
| "learning_rate": 9.43548604947303e-09, | |
| "loss": 0.1763, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 0.9733606557377049, | |
| "grad_norm": 0.8540165605829128, | |
| "learning_rate": 8.749904653577446e-09, | |
| "loss": 0.1646, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.9743852459016393, | |
| "grad_norm": 0.8486237603290785, | |
| "learning_rate": 8.090134984699849e-09, | |
| "loss": 0.1627, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 0.9754098360655737, | |
| "grad_norm": 0.7784163406280571, | |
| "learning_rate": 7.456183878683243e-09, | |
| "loss": 0.1699, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 0.9764344262295082, | |
| "grad_norm": 0.8679489126101304, | |
| "learning_rate": 6.848057903866001e-09, | |
| "loss": 0.1822, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 0.9774590163934426, | |
| "grad_norm": 0.8636438831130339, | |
| "learning_rate": 6.265763361013033e-09, | |
| "loss": 0.1808, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 0.9784836065573771, | |
| "grad_norm": 0.8767797180127741, | |
| "learning_rate": 5.7093062832508376e-09, | |
| "loss": 0.1627, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.9795081967213115, | |
| "grad_norm": 0.8112375807081016, | |
| "learning_rate": 5.178692436005883e-09, | |
| "loss": 0.1831, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 0.9805327868852459, | |
| "grad_norm": 0.7306012512582455, | |
| "learning_rate": 4.673927316943549e-09, | |
| "loss": 0.1553, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 0.9815573770491803, | |
| "grad_norm": 0.8292637382094212, | |
| "learning_rate": 4.195016155912057e-09, | |
| "loss": 0.1566, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 0.9825819672131147, | |
| "grad_norm": 0.9067630459701223, | |
| "learning_rate": 3.741963914887792e-09, | |
| "loss": 0.1792, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 0.9836065573770492, | |
| "grad_norm": 0.8237897665250666, | |
| "learning_rate": 3.3147752879236773e-09, | |
| "loss": 0.174, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9846311475409836, | |
| "grad_norm": 0.9171523826038648, | |
| "learning_rate": 2.9134547011017144e-09, | |
| "loss": 0.1878, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 0.985655737704918, | |
| "grad_norm": 0.914274547888741, | |
| "learning_rate": 2.5380063124857968e-09, | |
| "loss": 0.1823, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 0.9866803278688525, | |
| "grad_norm": 0.9109115550056215, | |
| "learning_rate": 2.1884340120795212e-09, | |
| "loss": 0.1817, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 0.9877049180327869, | |
| "grad_norm": 0.8919111435547045, | |
| "learning_rate": 1.8647414217848325e-09, | |
| "loss": 0.1865, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 0.9887295081967213, | |
| "grad_norm": 0.8644772360277703, | |
| "learning_rate": 1.5669318953662193e-09, | |
| "loss": 0.1682, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9897540983606558, | |
| "grad_norm": 0.7715238565433128, | |
| "learning_rate": 1.2950085184140759e-09, | |
| "loss": 0.1641, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 0.9907786885245902, | |
| "grad_norm": 0.8905421909942479, | |
| "learning_rate": 1.0489741083138937e-09, | |
| "loss": 0.1704, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 0.9918032786885246, | |
| "grad_norm": 0.7923871290694215, | |
| "learning_rate": 8.28831214217396e-10, | |
| "loss": 0.171, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 0.992827868852459, | |
| "grad_norm": 0.7996711063333748, | |
| "learning_rate": 6.345821170142264e-10, | |
| "loss": 0.1606, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 0.9938524590163934, | |
| "grad_norm": 0.7848807781475003, | |
| "learning_rate": 4.662288293105776e-10, | |
| "loss": 0.1663, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9948770491803278, | |
| "grad_norm": 0.8547672912543149, | |
| "learning_rate": 3.2377309540698733e-10, | |
| "loss": 0.1795, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 0.9959016393442623, | |
| "grad_norm": 0.9393791790278402, | |
| "learning_rate": 2.0721639128085113e-10, | |
| "loss": 0.1829, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 0.9969262295081968, | |
| "grad_norm": 0.7832597559006569, | |
| "learning_rate": 1.165599245708804e-10, | |
| "loss": 0.157, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 0.9979508196721312, | |
| "grad_norm": 0.8400649322975604, | |
| "learning_rate": 5.1804634564334154e-11, | |
| "loss": 0.1664, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 0.9989754098360656, | |
| "grad_norm": 0.8344881341503189, | |
| "learning_rate": 1.2951192187860006e-11, | |
| "loss": 0.1819, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9530044470093486, | |
| "learning_rate": 0.0, | |
| "loss": 0.17, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 976, | |
| "total_flos": 187284013056000.0, | |
| "train_loss": 0.18639373101416182, | |
| "train_runtime": 8890.7999, | |
| "train_samples_per_second": 0.878, | |
| "train_steps_per_second": 0.11 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 976, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 95000000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 187284013056000.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |