| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.999259807549963, | |
| "eval_steps": 100, | |
| "global_step": 675, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.007401924500370096, | |
| "grad_norm": 2.674959598727342, | |
| "learning_rate": 1.4705882352941177e-06, | |
| "loss": 1.0787, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.014803849000740192, | |
| "grad_norm": 2.2849930335009767, | |
| "learning_rate": 2.9411764705882355e-06, | |
| "loss": 1.0901, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02220577350111029, | |
| "grad_norm": 1.4169102513042604, | |
| "learning_rate": 4.411764705882353e-06, | |
| "loss": 1.0619, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.029607698001480384, | |
| "grad_norm": 1.3754280634869183, | |
| "learning_rate": 5.882352941176471e-06, | |
| "loss": 1.0087, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.037009622501850484, | |
| "grad_norm": 1.1141154652271672, | |
| "learning_rate": 7.352941176470589e-06, | |
| "loss": 0.9684, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.04441154700222058, | |
| "grad_norm": 0.9534792105007532, | |
| "learning_rate": 8.823529411764707e-06, | |
| "loss": 0.9217, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.05181347150259067, | |
| "grad_norm": 0.7606504248144849, | |
| "learning_rate": 1.0294117647058823e-05, | |
| "loss": 0.8859, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.05921539600296077, | |
| "grad_norm": 0.68529790584477, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.8631, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06661732050333087, | |
| "grad_norm": 0.7482947060538522, | |
| "learning_rate": 1.323529411764706e-05, | |
| "loss": 0.8485, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.07401924500370097, | |
| "grad_norm": 0.814176151345203, | |
| "learning_rate": 1.4705882352941179e-05, | |
| "loss": 0.861, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.08142116950407106, | |
| "grad_norm": 0.6269959293106316, | |
| "learning_rate": 1.6176470588235296e-05, | |
| "loss": 0.8545, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.08882309400444116, | |
| "grad_norm": 0.7352759272340602, | |
| "learning_rate": 1.7647058823529414e-05, | |
| "loss": 0.8293, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.09622501850481126, | |
| "grad_norm": 0.6918084490038217, | |
| "learning_rate": 1.911764705882353e-05, | |
| "loss": 0.8259, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.10362694300518134, | |
| "grad_norm": 0.7718482933587625, | |
| "learning_rate": 1.9999464266898485e-05, | |
| "loss": 0.8211, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.11102886750555144, | |
| "grad_norm": 0.9788064725128405, | |
| "learning_rate": 1.9993437928712977e-05, | |
| "loss": 0.8164, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.11843079200592153, | |
| "grad_norm": 0.837998184141708, | |
| "learning_rate": 1.998071963486563e-05, | |
| "loss": 0.8062, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.12583271650629163, | |
| "grad_norm": 0.6629895054599079, | |
| "learning_rate": 1.9961317901970953e-05, | |
| "loss": 0.7945, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.13323464100666174, | |
| "grad_norm": 0.7557912436308644, | |
| "learning_rate": 1.993524572210807e-05, | |
| "loss": 0.7947, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.14063656550703182, | |
| "grad_norm": 0.797960903546945, | |
| "learning_rate": 1.990252055412077e-05, | |
| "loss": 0.7906, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.14803849000740193, | |
| "grad_norm": 0.7886433512486097, | |
| "learning_rate": 1.9863164311926433e-05, | |
| "loss": 0.8171, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14803849000740193, | |
| "eval_loss": 0.8197493553161621, | |
| "eval_runtime": 7.2314, | |
| "eval_samples_per_second": 17.701, | |
| "eval_steps_per_second": 2.213, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.15544041450777202, | |
| "grad_norm": 0.7194704926083675, | |
| "learning_rate": 1.981720334984174e-05, | |
| "loss": 0.792, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.16284233900814213, | |
| "grad_norm": 0.7153212599875873, | |
| "learning_rate": 1.9764668444934853e-05, | |
| "loss": 0.7859, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.1702442635085122, | |
| "grad_norm": 0.7209996155683963, | |
| "learning_rate": 1.970559477641606e-05, | |
| "loss": 0.7631, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.17764618800888232, | |
| "grad_norm": 0.7333599419087882, | |
| "learning_rate": 1.9640021902080523e-05, | |
| "loss": 0.793, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1850481125092524, | |
| "grad_norm": 0.6289153412562695, | |
| "learning_rate": 1.9567993731818988e-05, | |
| "loss": 0.7916, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.19245003700962252, | |
| "grad_norm": 0.7521612039190329, | |
| "learning_rate": 1.9489558498214197e-05, | |
| "loss": 0.7843, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1998519615099926, | |
| "grad_norm": 0.6568997079081034, | |
| "learning_rate": 1.9404768724242667e-05, | |
| "loss": 0.7703, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.20725388601036268, | |
| "grad_norm": 0.7360339897713676, | |
| "learning_rate": 1.931368118810346e-05, | |
| "loss": 0.7947, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.2146558105107328, | |
| "grad_norm": 0.7557749076828488, | |
| "learning_rate": 1.92163568851975e-05, | |
| "loss": 0.7757, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.22205773501110287, | |
| "grad_norm": 0.8218802431960855, | |
| "learning_rate": 1.911286098728296e-05, | |
| "loss": 0.772, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.22945965951147299, | |
| "grad_norm": 0.7041654789385663, | |
| "learning_rate": 1.900326279883392e-05, | |
| "loss": 0.8017, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.23686158401184307, | |
| "grad_norm": 0.7107270494061172, | |
| "learning_rate": 1.8887635710631716e-05, | |
| "loss": 0.8045, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.24426350851221318, | |
| "grad_norm": 0.7042955521495632, | |
| "learning_rate": 1.8766057150619865e-05, | |
| "loss": 0.7775, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.25166543301258326, | |
| "grad_norm": 0.7141479489682149, | |
| "learning_rate": 1.8638608532055635e-05, | |
| "loss": 0.7947, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.25906735751295334, | |
| "grad_norm": 0.6682818577909502, | |
| "learning_rate": 1.8505375198992856e-05, | |
| "loss": 0.7831, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.2664692820133235, | |
| "grad_norm": 0.7193249750447441, | |
| "learning_rate": 1.836644636913258e-05, | |
| "loss": 0.7542, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.27387120651369357, | |
| "grad_norm": 0.7847188441908851, | |
| "learning_rate": 1.8221915074079764e-05, | |
| "loss": 0.7778, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.28127313101406365, | |
| "grad_norm": 0.8828987676403609, | |
| "learning_rate": 1.8071878097046064e-05, | |
| "loss": 0.7564, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.28867505551443373, | |
| "grad_norm": 0.6213320600286455, | |
| "learning_rate": 1.7916435908040413e-05, | |
| "loss": 0.7723, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.29607698001480387, | |
| "grad_norm": 0.6595102663479724, | |
| "learning_rate": 1.7755692596590778e-05, | |
| "loss": 0.7747, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.29607698001480387, | |
| "eval_loss": 0.7899559736251831, | |
| "eval_runtime": 7.2104, | |
| "eval_samples_per_second": 17.752, | |
| "eval_steps_per_second": 2.219, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.30347890451517395, | |
| "grad_norm": 0.706296557877635, | |
| "learning_rate": 1.7589755802042188e-05, | |
| "loss": 0.773, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.31088082901554404, | |
| "grad_norm": 0.6937734870068385, | |
| "learning_rate": 1.7418736641477636e-05, | |
| "loss": 0.7563, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.3182827535159141, | |
| "grad_norm": 0.6327617109092492, | |
| "learning_rate": 1.7242749635310222e-05, | |
| "loss": 0.758, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.32568467801628426, | |
| "grad_norm": 0.6635934294750666, | |
| "learning_rate": 1.7061912630596252e-05, | |
| "loss": 0.7605, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.33308660251665434, | |
| "grad_norm": 0.7228160092157478, | |
| "learning_rate": 1.6876346722120747e-05, | |
| "loss": 0.7754, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.3404885270170244, | |
| "grad_norm": 0.6952975644500169, | |
| "learning_rate": 1.6686176171308125e-05, | |
| "loss": 0.7977, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3478904515173945, | |
| "grad_norm": 0.6717058633626165, | |
| "learning_rate": 1.6491528323012412e-05, | |
| "loss": 0.7594, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.35529237601776464, | |
| "grad_norm": 0.6596693045521963, | |
| "learning_rate": 1.6292533520242663e-05, | |
| "loss": 0.7623, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3626943005181347, | |
| "grad_norm": 0.6470637566854384, | |
| "learning_rate": 1.6089325016880737e-05, | |
| "loss": 0.7526, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.3700962250185048, | |
| "grad_norm": 0.6877498215548267, | |
| "learning_rate": 1.588203888844982e-05, | |
| "loss": 0.7681, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.3774981495188749, | |
| "grad_norm": 0.6358323626672553, | |
| "learning_rate": 1.5670813940993504e-05, | |
| "loss": 0.741, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.38490007401924503, | |
| "grad_norm": 0.600848318475503, | |
| "learning_rate": 1.5455791618126407e-05, | |
| "loss": 0.7334, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.3923019985196151, | |
| "grad_norm": 0.6314609013122284, | |
| "learning_rate": 1.5237115906318565e-05, | |
| "loss": 0.7572, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.3997039230199852, | |
| "grad_norm": 0.6546980627619242, | |
| "learning_rate": 1.5014933238477069e-05, | |
| "loss": 0.7378, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.4071058475203553, | |
| "grad_norm": 0.6975545683818176, | |
| "learning_rate": 1.4789392395889468e-05, | |
| "loss": 0.7632, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.41450777202072536, | |
| "grad_norm": 0.6503686028697638, | |
| "learning_rate": 1.4560644408594602e-05, | |
| "loss": 0.744, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.4219096965210955, | |
| "grad_norm": 0.6602116319307001, | |
| "learning_rate": 1.432884245424761e-05, | |
| "loss": 0.7556, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.4293116210214656, | |
| "grad_norm": 0.6672698383287922, | |
| "learning_rate": 1.4094141755546816e-05, | |
| "loss": 0.7831, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.43671354552183567, | |
| "grad_norm": 0.6305307497798698, | |
| "learning_rate": 1.3856699476291176e-05, | |
| "loss": 0.7426, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.44411547002220575, | |
| "grad_norm": 0.6859073767100461, | |
| "learning_rate": 1.3616674616137902e-05, | |
| "loss": 0.7645, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.44411547002220575, | |
| "eval_loss": 0.7755689024925232, | |
| "eval_runtime": 7.2066, | |
| "eval_samples_per_second": 17.762, | |
| "eval_steps_per_second": 2.22, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.4515173945225759, | |
| "grad_norm": 0.6626556377379951, | |
| "learning_rate": 1.3374227904130724e-05, | |
| "loss": 0.7549, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.45891931902294597, | |
| "grad_norm": 0.6499839877597537, | |
| "learning_rate": 1.3129521691070108e-05, | |
| "loss": 0.7328, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.46632124352331605, | |
| "grad_norm": 0.722140222976433, | |
| "learning_rate": 1.2882719840797473e-05, | |
| "loss": 0.7514, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.47372316802368614, | |
| "grad_norm": 0.6900675221213151, | |
| "learning_rate": 1.2633987620466229e-05, | |
| "loss": 0.7353, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.4811250925240563, | |
| "grad_norm": 0.6297341225224966, | |
| "learning_rate": 1.2383491589873122e-05, | |
| "loss": 0.7407, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.48852701702442636, | |
| "grad_norm": 0.6139804357167142, | |
| "learning_rate": 1.213139948992394e-05, | |
| "loss": 0.7497, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.49592894152479644, | |
| "grad_norm": 0.7120439739230976, | |
| "learning_rate": 1.187788013030837e-05, | |
| "loss": 0.7468, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.5033308660251665, | |
| "grad_norm": 0.6179256601206382, | |
| "learning_rate": 1.1623103276459086e-05, | |
| "loss": 0.7507, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.5107327905255367, | |
| "grad_norm": 0.6483835976434715, | |
| "learning_rate": 1.1367239535870913e-05, | |
| "loss": 0.7425, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.5181347150259067, | |
| "grad_norm": 0.6928461738197682, | |
| "learning_rate": 1.1110460243856051e-05, | |
| "loss": 0.7302, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.5255366395262768, | |
| "grad_norm": 0.6706880141545486, | |
| "learning_rate": 1.085293734881197e-05, | |
| "loss": 0.7468, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.532938564026647, | |
| "grad_norm": 0.6042342171269331, | |
| "learning_rate": 1.0594843297078736e-05, | |
| "loss": 0.766, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.540340488527017, | |
| "grad_norm": 0.693508088289296, | |
| "learning_rate": 1.0336350917462925e-05, | |
| "loss": 0.7558, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.5477424130273871, | |
| "grad_norm": 0.6083705213800933, | |
| "learning_rate": 1.0077633305505402e-05, | |
| "loss": 0.7433, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.5551443375277573, | |
| "grad_norm": 0.6396792431151416, | |
| "learning_rate": 9.818863707570476e-06, | |
| "loss": 0.7608, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.5625462620281273, | |
| "grad_norm": 0.6663076065375303, | |
| "learning_rate": 9.560215404834094e-06, | |
| "loss": 0.7515, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.5699481865284974, | |
| "grad_norm": 0.641428537274285, | |
| "learning_rate": 9.30186159724869e-06, | |
| "loss": 0.7146, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.5773501110288675, | |
| "grad_norm": 0.6138036144437788, | |
| "learning_rate": 9.043975287562443e-06, | |
| "loss": 0.747, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5847520355292376, | |
| "grad_norm": 0.6807331921757377, | |
| "learning_rate": 8.786729165470584e-06, | |
| "loss": 0.7253, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.5921539600296077, | |
| "grad_norm": 0.6952002905984943, | |
| "learning_rate": 8.530295491976338e-06, | |
| "loss": 0.7307, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5921539600296077, | |
| "eval_loss": 0.7637839317321777, | |
| "eval_runtime": 7.2078, | |
| "eval_samples_per_second": 17.759, | |
| "eval_steps_per_second": 2.22, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5995558845299778, | |
| "grad_norm": 0.5939454843322792, | |
| "learning_rate": 8.274845984038916e-06, | |
| "loss": 0.7174, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.6069578090303479, | |
| "grad_norm": 0.6621271866381216, | |
| "learning_rate": 8.020551699585843e-06, | |
| "loss": 0.7469, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.6143597335307179, | |
| "grad_norm": 0.6106430449913639, | |
| "learning_rate": 7.76758292296659e-06, | |
| "loss": 0.7264, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.6217616580310881, | |
| "grad_norm": 0.6584389038177016, | |
| "learning_rate": 7.5161090509242005e-06, | |
| "loss": 0.7418, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.6291635825314582, | |
| "grad_norm": 0.6508063180682058, | |
| "learning_rate": 7.2662984791613186e-06, | |
| "loss": 0.7345, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.6365655070318282, | |
| "grad_norm": 0.654746417724555, | |
| "learning_rate": 7.01831848957653e-06, | |
| "loss": 0.7488, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.6439674315321984, | |
| "grad_norm": 0.6038759794228741, | |
| "learning_rate": 6.772335138246548e-06, | |
| "loss": 0.747, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.6513693560325685, | |
| "grad_norm": 0.6254763438931118, | |
| "learning_rate": 6.528513144229256e-06, | |
| "loss": 0.7427, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.6587712805329385, | |
| "grad_norm": 0.6195437763354315, | |
| "learning_rate": 6.287015779262064e-06, | |
| "loss": 0.7489, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.6661732050333087, | |
| "grad_norm": 0.6629664159964251, | |
| "learning_rate": 6.048004758429451e-06, | |
| "loss": 0.7274, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.6735751295336787, | |
| "grad_norm": 0.6058164232925908, | |
| "learning_rate": 5.811640131872867e-06, | |
| "loss": 0.7496, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.6809770540340488, | |
| "grad_norm": 0.6082658380867586, | |
| "learning_rate": 5.578080177615575e-06, | |
| "loss": 0.7201, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.688378978534419, | |
| "grad_norm": 0.6242205120975641, | |
| "learning_rate": 5.347481295574141e-06, | |
| "loss": 0.7172, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.695780903034789, | |
| "grad_norm": 0.6109755979913201, | |
| "learning_rate": 5.119997902827584e-06, | |
| "loss": 0.7286, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.7031828275351591, | |
| "grad_norm": 0.6087033956225949, | |
| "learning_rate": 4.8957823302142916e-06, | |
| "loss": 0.7354, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.7105847520355293, | |
| "grad_norm": 0.5865522874345606, | |
| "learning_rate": 4.674984720325961e-06, | |
| "loss": 0.7212, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.7179866765358993, | |
| "grad_norm": 0.5900008473027598, | |
| "learning_rate": 4.457752926966888e-06, | |
| "loss": 0.715, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.7253886010362695, | |
| "grad_norm": 0.5840665816418219, | |
| "learning_rate": 4.244232416145839e-06, | |
| "loss": 0.7337, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.7327905255366395, | |
| "grad_norm": 0.5914947024608387, | |
| "learning_rate": 4.0345661686669745e-06, | |
| "loss": 0.7271, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.7401924500370096, | |
| "grad_norm": 0.6196202183056477, | |
| "learning_rate": 3.828894584384867e-06, | |
| "loss": 0.7355, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7401924500370096, | |
| "eval_loss": 0.7563655972480774, | |
| "eval_runtime": 7.2181, | |
| "eval_samples_per_second": 17.733, | |
| "eval_steps_per_second": 2.217, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.7475943745373798, | |
| "grad_norm": 0.5586852736192075, | |
| "learning_rate": 3.62735538818787e-06, | |
| "loss": 0.7197, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.7549962990377498, | |
| "grad_norm": 0.6337625854919152, | |
| "learning_rate": 3.4300835377726904e-06, | |
| "loss": 0.7233, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.7623982235381199, | |
| "grad_norm": 0.6205123290247885, | |
| "learning_rate": 3.2372111332720045e-06, | |
| "loss": 0.7587, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.7698001480384901, | |
| "grad_norm": 0.6153129450053498, | |
| "learning_rate": 3.048867328795588e-06, | |
| "loss": 0.7156, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.7772020725388601, | |
| "grad_norm": 0.6026709629344417, | |
| "learning_rate": 2.865178245944218e-06, | |
| "loss": 0.7144, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.7846039970392302, | |
| "grad_norm": 0.5724937932245526, | |
| "learning_rate": 2.686266889354211e-06, | |
| "loss": 0.7375, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.7920059215396003, | |
| "grad_norm": 0.5925644676097567, | |
| "learning_rate": 2.5122530643292274e-06, | |
| "loss": 0.7429, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.7994078460399704, | |
| "grad_norm": 0.6326300634198754, | |
| "learning_rate": 2.3432532966144526e-06, | |
| "loss": 0.7323, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.8068097705403405, | |
| "grad_norm": 0.5849976467168821, | |
| "learning_rate": 2.1793807543668857e-06, | |
| "loss": 0.7338, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.8142116950407106, | |
| "grad_norm": 0.5500210584534766, | |
| "learning_rate": 2.0207451723739633e-06, | |
| "loss": 0.7257, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.8216136195410807, | |
| "grad_norm": 0.5676680461413595, | |
| "learning_rate": 1.8674527785713247e-06, | |
| "loss": 0.7325, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.8290155440414507, | |
| "grad_norm": 0.6239434546631168, | |
| "learning_rate": 1.7196062229088606e-06, | |
| "loss": 0.6996, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.8364174685418209, | |
| "grad_norm": 0.6254391500900318, | |
| "learning_rate": 1.577304508612717e-06, | |
| "loss": 0.7298, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.843819393042191, | |
| "grad_norm": 0.5238189690989516, | |
| "learning_rate": 1.4406429258892762e-06, | |
| "loss": 0.7503, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.851221317542561, | |
| "grad_norm": 0.6133649327761147, | |
| "learning_rate": 1.3097129881154936e-06, | |
| "loss": 0.7199, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.8586232420429312, | |
| "grad_norm": 0.5832243304649319, | |
| "learning_rate": 1.1846023705583442e-06, | |
| "loss": 0.7164, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.8660251665433013, | |
| "grad_norm": 0.581421408776636, | |
| "learning_rate": 1.065394851664394e-06, | |
| "loss": 0.7345, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.8734270910436713, | |
| "grad_norm": 0.5486795664712047, | |
| "learning_rate": 9.521702569588199e-07, | |
| "loss": 0.7537, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.8808290155440415, | |
| "grad_norm": 0.5762089839170463, | |
| "learning_rate": 8.450044055914497e-07, | |
| "loss": 0.7221, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.8882309400444115, | |
| "grad_norm": 0.5637562364478066, | |
| "learning_rate": 7.439690595656013e-07, | |
| "loss": 0.7445, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8882309400444115, | |
| "eval_loss": 0.7531630992889404, | |
| "eval_runtime": 6.3295, | |
| "eval_samples_per_second": 20.223, | |
| "eval_steps_per_second": 2.528, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.8956328645447816, | |
| "grad_norm": 0.6333025954598674, | |
| "learning_rate": 6.491318756837417e-07, | |
| "loss": 0.7298, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.9030347890451518, | |
| "grad_norm": 0.5151052584290405, | |
| "learning_rate": 5.605563602421149e-07, | |
| "loss": 0.7058, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.9104367135455218, | |
| "grad_norm": 0.558083301486103, | |
| "learning_rate": 4.783018265047179e-07, | |
| "loss": 0.7557, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.9178386380458919, | |
| "grad_norm": 0.5838054593517799, | |
| "learning_rate": 4.024233549850509e-07, | |
| "loss": 0.7436, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.9252405625462621, | |
| "grad_norm": 0.5532527164872905, | |
| "learning_rate": 3.329717565622825e-07, | |
| "loss": 0.7404, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.9326424870466321, | |
| "grad_norm": 0.5531839239223881, | |
| "learning_rate": 2.6999353845651113e-07, | |
| "loss": 0.724, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.9400444115470022, | |
| "grad_norm": 0.5908996580668381, | |
| "learning_rate": 2.1353087308590314e-07, | |
| "loss": 0.7391, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.9474463360473723, | |
| "grad_norm": 0.5583503930295213, | |
| "learning_rate": 1.6362156982656085e-07, | |
| "loss": 0.7292, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.9548482605477424, | |
| "grad_norm": 0.5273929642155748, | |
| "learning_rate": 1.2029904969404482e-07, | |
| "loss": 0.7127, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.9622501850481125, | |
| "grad_norm": 0.6043817732068986, | |
| "learning_rate": 8.359232296349163e-08, | |
| "loss": 0.7163, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.9696521095484826, | |
| "grad_norm": 0.5680817655810946, | |
| "learning_rate": 5.3525969743324356e-08, | |
| "loss": 0.7322, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.9770540340488527, | |
| "grad_norm": 0.5484921768212552, | |
| "learning_rate": 3.012012351554017e-08, | |
| "loss": 0.7064, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.9844559585492227, | |
| "grad_norm": 0.5841736663763849, | |
| "learning_rate": 1.3390457653639221e-08, | |
| "loss": 0.7353, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.9918578830495929, | |
| "grad_norm": 0.6439690756031937, | |
| "learning_rate": 3.3481749271768726e-09, | |
| "loss": 0.7463, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.999259807549963, | |
| "grad_norm": 0.5777335037865771, | |
| "learning_rate": 0.0, | |
| "loss": 0.7158, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.999259807549963, | |
| "step": 675, | |
| "total_flos": 76888336760832.0, | |
| "train_loss": 0.7675936229140671, | |
| "train_runtime": 4627.4844, | |
| "train_samples_per_second": 4.67, | |
| "train_steps_per_second": 0.146 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 675, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": false, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 76888336760832.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |