|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990999099909991, |
|
"eval_steps": 500, |
|
"global_step": 555, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018001800180018, |
|
"grad_norm": 28.85651397705078, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 14.6669, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0036003600360036, |
|
"grad_norm": 20.232847213745117, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 12.6879, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0054005400540054005, |
|
"grad_norm": 20.170488357543945, |
|
"learning_rate": 1.5e-06, |
|
"loss": 9.9005, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0072007200720072, |
|
"grad_norm": 18.635835647583008, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 9.5553, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.009000900090009001, |
|
"grad_norm": 17.15140724182129, |
|
"learning_rate": 2.5e-06, |
|
"loss": 8.7361, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.010801080108010801, |
|
"grad_norm": 14.188899993896484, |
|
"learning_rate": 3e-06, |
|
"loss": 8.5991, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012601260126012601, |
|
"grad_norm": 16.08758544921875, |
|
"learning_rate": 3.5000000000000004e-06, |
|
"loss": 8.1545, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0144014401440144, |
|
"grad_norm": 15.893816947937012, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 7.9389, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016201620162016202, |
|
"grad_norm": 13.540562629699707, |
|
"learning_rate": 4.5e-06, |
|
"loss": 7.9457, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.018001800180018002, |
|
"grad_norm": 12.593961715698242, |
|
"learning_rate": 5e-06, |
|
"loss": 7.9617, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019801980198019802, |
|
"grad_norm": 14.274964332580566, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 7.7485, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.021602160216021602, |
|
"grad_norm": 13.785284996032715, |
|
"learning_rate": 6e-06, |
|
"loss": 8.1615, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.023402340234023402, |
|
"grad_norm": 12.692747116088867, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 7.9709, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025202520252025202, |
|
"grad_norm": 11.758572578430176, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 7.5982, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.027002700270027002, |
|
"grad_norm": 11.451689720153809, |
|
"learning_rate": 7.5e-06, |
|
"loss": 7.8241, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0288028802880288, |
|
"grad_norm": 13.09874153137207, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 7.679, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0306030603060306, |
|
"grad_norm": 11.725943565368652, |
|
"learning_rate": 8.500000000000002e-06, |
|
"loss": 7.8199, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.032403240324032405, |
|
"grad_norm": 12.393065452575684, |
|
"learning_rate": 9e-06, |
|
"loss": 7.8977, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.034203420342034205, |
|
"grad_norm": 11.96495246887207, |
|
"learning_rate": 9.5e-06, |
|
"loss": 7.4002, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.036003600360036005, |
|
"grad_norm": 10.148777961730957, |
|
"learning_rate": 1e-05, |
|
"loss": 7.6521, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.037803780378037805, |
|
"grad_norm": 9.769454956054688, |
|
"learning_rate": 1.05e-05, |
|
"loss": 7.5508, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.039603960396039604, |
|
"grad_norm": 8.934900283813477, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 7.263, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.041404140414041404, |
|
"grad_norm": 8.807928085327148, |
|
"learning_rate": 1.1500000000000002e-05, |
|
"loss": 7.4388, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.043204320432043204, |
|
"grad_norm": 10.690240859985352, |
|
"learning_rate": 1.2e-05, |
|
"loss": 6.5005, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.045004500450045004, |
|
"grad_norm": 8.784053802490234, |
|
"learning_rate": 1.25e-05, |
|
"loss": 6.7989, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.046804680468046804, |
|
"grad_norm": 8.401625633239746, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 6.8476, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.048604860486048604, |
|
"grad_norm": 7.974181175231934, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 6.8067, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.050405040504050404, |
|
"grad_norm": 6.8001909255981445, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 6.8886, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.052205220522052204, |
|
"grad_norm": 7.849862098693848, |
|
"learning_rate": 1.45e-05, |
|
"loss": 6.7455, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.054005400540054004, |
|
"grad_norm": 7.1335625648498535, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.1861, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0558055805580558, |
|
"grad_norm": 6.963135719299316, |
|
"learning_rate": 1.55e-05, |
|
"loss": 6.2953, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0576057605760576, |
|
"grad_norm": 6.415379524230957, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 6.4054, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0594059405940594, |
|
"grad_norm": 6.006982326507568, |
|
"learning_rate": 1.65e-05, |
|
"loss": 6.6365, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0612061206120612, |
|
"grad_norm": 5.94774866104126, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 6.3694, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.063006300630063, |
|
"grad_norm": 5.748636722564697, |
|
"learning_rate": 1.75e-05, |
|
"loss": 6.0299, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06480648064806481, |
|
"grad_norm": 5.941126346588135, |
|
"learning_rate": 1.8e-05, |
|
"loss": 6.1121, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0666066606660666, |
|
"grad_norm": 5.799063682556152, |
|
"learning_rate": 1.85e-05, |
|
"loss": 6.1198, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.06840684068406841, |
|
"grad_norm": 6.232558250427246, |
|
"learning_rate": 1.9e-05, |
|
"loss": 5.8275, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0702070207020702, |
|
"grad_norm": 5.436257362365723, |
|
"learning_rate": 1.9500000000000003e-05, |
|
"loss": 6.6235, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07200720072007201, |
|
"grad_norm": 5.369800090789795, |
|
"learning_rate": 2e-05, |
|
"loss": 6.3276, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0738073807380738, |
|
"grad_norm": 5.262800693511963, |
|
"learning_rate": 2.05e-05, |
|
"loss": 5.5985, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07560756075607561, |
|
"grad_norm": 5.9995903968811035, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.043, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0774077407740774, |
|
"grad_norm": 5.5932183265686035, |
|
"learning_rate": 2.15e-05, |
|
"loss": 5.9072, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.07920792079207921, |
|
"grad_norm": 5.312334060668945, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 7.0407, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.081008100810081, |
|
"grad_norm": 5.7829155921936035, |
|
"learning_rate": 2.25e-05, |
|
"loss": 6.006, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08280828082808281, |
|
"grad_norm": 5.04926872253418, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 6.7709, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0846084608460846, |
|
"grad_norm": 5.008914470672607, |
|
"learning_rate": 2.35e-05, |
|
"loss": 7.0494, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08640864086408641, |
|
"grad_norm": 4.904027938842773, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.2418, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.08820882088208822, |
|
"grad_norm": 5.208195686340332, |
|
"learning_rate": 2.45e-05, |
|
"loss": 6.1476, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09000900090009001, |
|
"grad_norm": 5.478446006774902, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.1826, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09180918091809182, |
|
"grad_norm": 18.321847915649414, |
|
"learning_rate": 2.5500000000000003e-05, |
|
"loss": 11.8766, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.09360936093609361, |
|
"grad_norm": 22.275039672851562, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 9.0741, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.09540954095409541, |
|
"grad_norm": 18.914152145385742, |
|
"learning_rate": 2.6500000000000004e-05, |
|
"loss": 6.9466, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.09720972097209721, |
|
"grad_norm": 10.226181030273438, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 5.7314, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.09900990099009901, |
|
"grad_norm": 7.28556489944458, |
|
"learning_rate": 2.7500000000000004e-05, |
|
"loss": 4.9557, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.10081008100810081, |
|
"grad_norm": 6.028027057647705, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 5.4561, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10261026102610261, |
|
"grad_norm": 6.356996536254883, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 5.1208, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10441044104410441, |
|
"grad_norm": 5.628063201904297, |
|
"learning_rate": 2.9e-05, |
|
"loss": 5.31, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.10621062106210621, |
|
"grad_norm": 5.183619022369385, |
|
"learning_rate": 2.95e-05, |
|
"loss": 4.8533, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.10801080108010801, |
|
"grad_norm": 5.032276630401611, |
|
"learning_rate": 3e-05, |
|
"loss": 4.5937, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.10981098109810981, |
|
"grad_norm": 4.725021839141846, |
|
"learning_rate": 3.05e-05, |
|
"loss": 4.8969, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.1116111611161116, |
|
"grad_norm": 4.868443965911865, |
|
"learning_rate": 3.1e-05, |
|
"loss": 5.03, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11341134113411341, |
|
"grad_norm": 4.929288864135742, |
|
"learning_rate": 3.15e-05, |
|
"loss": 4.9148, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1152115211521152, |
|
"grad_norm": 5.14013671875, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 5.2505, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.11701170117011701, |
|
"grad_norm": 5.411927223205566, |
|
"learning_rate": 3.2500000000000004e-05, |
|
"loss": 4.91, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1188118811881188, |
|
"grad_norm": 4.524367809295654, |
|
"learning_rate": 3.3e-05, |
|
"loss": 5.1703, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12061206120612061, |
|
"grad_norm": 4.979662895202637, |
|
"learning_rate": 3.35e-05, |
|
"loss": 5.1811, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1224122412241224, |
|
"grad_norm": 4.6250457763671875, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 4.8259, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12421242124212421, |
|
"grad_norm": 4.706791400909424, |
|
"learning_rate": 3.45e-05, |
|
"loss": 4.4952, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.126012601260126, |
|
"grad_norm": 4.518699645996094, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.7958, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1278127812781278, |
|
"grad_norm": 3.9459586143493652, |
|
"learning_rate": 3.55e-05, |
|
"loss": 5.0545, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.12961296129612962, |
|
"grad_norm": 3.954882860183716, |
|
"learning_rate": 3.6e-05, |
|
"loss": 4.9021, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13141314131413143, |
|
"grad_norm": 4.027074813842773, |
|
"learning_rate": 3.65e-05, |
|
"loss": 4.6918, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.1332133213321332, |
|
"grad_norm": 4.441482067108154, |
|
"learning_rate": 3.7e-05, |
|
"loss": 4.9625, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.135013501350135, |
|
"grad_norm": 4.631096839904785, |
|
"learning_rate": 3.7500000000000003e-05, |
|
"loss": 4.7489, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.13681368136813682, |
|
"grad_norm": 4.464879989624023, |
|
"learning_rate": 3.8e-05, |
|
"loss": 4.4667, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.13861386138613863, |
|
"grad_norm": 4.17475700378418, |
|
"learning_rate": 3.85e-05, |
|
"loss": 5.517, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.1404140414041404, |
|
"grad_norm": 4.135541915893555, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 5.1446, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1422142214221422, |
|
"grad_norm": 4.521326541900635, |
|
"learning_rate": 3.9500000000000005e-05, |
|
"loss": 5.0185, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14401440144014402, |
|
"grad_norm": 4.14456844329834, |
|
"learning_rate": 4e-05, |
|
"loss": 4.5989, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14581458145814583, |
|
"grad_norm": 4.918880462646484, |
|
"learning_rate": 4.05e-05, |
|
"loss": 5.1453, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.1476147614761476, |
|
"grad_norm": 3.9074041843414307, |
|
"learning_rate": 4.1e-05, |
|
"loss": 4.5343, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1494149414941494, |
|
"grad_norm": 4.088869094848633, |
|
"learning_rate": 4.15e-05, |
|
"loss": 5.2312, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15121512151215122, |
|
"grad_norm": 4.230950355529785, |
|
"learning_rate": 4.2e-05, |
|
"loss": 5.2262, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15301530153015303, |
|
"grad_norm": 3.9280920028686523, |
|
"learning_rate": 4.25e-05, |
|
"loss": 5.0867, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1548154815481548, |
|
"grad_norm": 4.369322776794434, |
|
"learning_rate": 4.3e-05, |
|
"loss": 5.6706, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1566156615661566, |
|
"grad_norm": 4.20168924331665, |
|
"learning_rate": 4.35e-05, |
|
"loss": 5.2653, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.15841584158415842, |
|
"grad_norm": 4.246135711669922, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 5.3541, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16021602160216022, |
|
"grad_norm": 4.878535270690918, |
|
"learning_rate": 4.4500000000000004e-05, |
|
"loss": 5.2614, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.162016201620162, |
|
"grad_norm": 4.4837799072265625, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.7189, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1638163816381638, |
|
"grad_norm": 4.485711574554443, |
|
"learning_rate": 4.55e-05, |
|
"loss": 6.1362, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.16561656165616562, |
|
"grad_norm": 4.198153495788574, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 5.5297, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.16741674167416742, |
|
"grad_norm": 5.264278411865234, |
|
"learning_rate": 4.6500000000000005e-05, |
|
"loss": 5.475, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1692169216921692, |
|
"grad_norm": 4.983798503875732, |
|
"learning_rate": 4.7e-05, |
|
"loss": 5.8982, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.171017101710171, |
|
"grad_norm": 4.64500617980957, |
|
"learning_rate": 4.75e-05, |
|
"loss": 5.4172, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17281728172817282, |
|
"grad_norm": 4.246720314025879, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.1043, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17461746174617462, |
|
"grad_norm": 4.144988536834717, |
|
"learning_rate": 4.85e-05, |
|
"loss": 6.1926, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.17641764176417643, |
|
"grad_norm": 4.693668842315674, |
|
"learning_rate": 4.9e-05, |
|
"loss": 6.3127, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1782178217821782, |
|
"grad_norm": 4.599040985107422, |
|
"learning_rate": 4.9500000000000004e-05, |
|
"loss": 5.5763, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18001800180018002, |
|
"grad_norm": 4.811699390411377, |
|
"learning_rate": 5e-05, |
|
"loss": 6.2599, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 23.242341995239258, |
|
"learning_rate": 5.05e-05, |
|
"loss": 10.2299, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18361836183618363, |
|
"grad_norm": 24.24691390991211, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 6.5731, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1854185418541854, |
|
"grad_norm": 13.8417387008667, |
|
"learning_rate": 5.1500000000000005e-05, |
|
"loss": 4.5894, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.18721872187218722, |
|
"grad_norm": 7.606777667999268, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 4.422, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.18901890189018902, |
|
"grad_norm": 7.0894598960876465, |
|
"learning_rate": 5.25e-05, |
|
"loss": 4.4822, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.19081908190819083, |
|
"grad_norm": 4.703729152679443, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 4.0663, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1926192619261926, |
|
"grad_norm": 3.9182114601135254, |
|
"learning_rate": 5.3500000000000006e-05, |
|
"loss": 3.7222, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.19441944194419442, |
|
"grad_norm": 4.467214584350586, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 4.4871, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.19621962196219622, |
|
"grad_norm": 4.333056926727295, |
|
"learning_rate": 5.45e-05, |
|
"loss": 4.1119, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.19801980198019803, |
|
"grad_norm": 4.1896467208862305, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 3.8633, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1998199819981998, |
|
"grad_norm": 4.409817218780518, |
|
"learning_rate": 5.550000000000001e-05, |
|
"loss": 4.1133, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.20162016201620162, |
|
"grad_norm": 4.587196350097656, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 4.085, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.20342034203420342, |
|
"grad_norm": 4.2752556800842285, |
|
"learning_rate": 5.65e-05, |
|
"loss": 4.3178, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.20522052205220523, |
|
"grad_norm": 4.687012672424316, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 4.4426, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.207020702070207, |
|
"grad_norm": 4.1435017585754395, |
|
"learning_rate": 5.7499999999999995e-05, |
|
"loss": 4.268, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.20882088208820881, |
|
"grad_norm": 4.161345481872559, |
|
"learning_rate": 5.8e-05, |
|
"loss": 4.4851, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21062106210621062, |
|
"grad_norm": 3.927691698074341, |
|
"learning_rate": 5.85e-05, |
|
"loss": 4.0147, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.21242124212421243, |
|
"grad_norm": 4.2265944480896, |
|
"learning_rate": 5.9e-05, |
|
"loss": 4.3646, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.21422142214221424, |
|
"grad_norm": 4.2369704246521, |
|
"learning_rate": 5.95e-05, |
|
"loss": 4.3241, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.21602160216021601, |
|
"grad_norm": 4.384726047515869, |
|
"learning_rate": 6e-05, |
|
"loss": 4.4394, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.21782178217821782, |
|
"grad_norm": 4.316718578338623, |
|
"learning_rate": 6.05e-05, |
|
"loss": 4.0843, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.21962196219621963, |
|
"grad_norm": 3.7887487411499023, |
|
"learning_rate": 6.1e-05, |
|
"loss": 4.1607, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22142214221422143, |
|
"grad_norm": 4.181461334228516, |
|
"learning_rate": 6.15e-05, |
|
"loss": 4.2074, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.2232223222322232, |
|
"grad_norm": 3.9978582859039307, |
|
"learning_rate": 6.2e-05, |
|
"loss": 4.4763, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.22502250225022502, |
|
"grad_norm": 4.156275749206543, |
|
"learning_rate": 6.25e-05, |
|
"loss": 4.2295, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.22682268226822683, |
|
"grad_norm": 3.867684841156006, |
|
"learning_rate": 6.3e-05, |
|
"loss": 4.4481, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.22862286228622863, |
|
"grad_norm": 4.037459373474121, |
|
"learning_rate": 6.35e-05, |
|
"loss": 4.157, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2304230423042304, |
|
"grad_norm": 4.304506778717041, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 4.5553, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23222322232223222, |
|
"grad_norm": 4.406405925750732, |
|
"learning_rate": 6.450000000000001e-05, |
|
"loss": 4.7441, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.23402340234023403, |
|
"grad_norm": 4.19051456451416, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.7459, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23582358235823583, |
|
"grad_norm": 4.138574600219727, |
|
"learning_rate": 6.55e-05, |
|
"loss": 4.6568, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2376237623762376, |
|
"grad_norm": 4.434947967529297, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.8297, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.23942394239423942, |
|
"grad_norm": 3.5757968425750732, |
|
"learning_rate": 6.65e-05, |
|
"loss": 4.2438, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24122412241224123, |
|
"grad_norm": 4.133842945098877, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.5298, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24302430243024303, |
|
"grad_norm": 4.089065074920654, |
|
"learning_rate": 6.750000000000001e-05, |
|
"loss": 5.4232, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.2448244824482448, |
|
"grad_norm": 4.211568832397461, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 5.3946, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.24662466246624662, |
|
"grad_norm": 4.204323768615723, |
|
"learning_rate": 6.850000000000001e-05, |
|
"loss": 4.9143, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.24842484248424843, |
|
"grad_norm": 4.3052873611450195, |
|
"learning_rate": 6.9e-05, |
|
"loss": 5.5809, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.2502250225022502, |
|
"grad_norm": 3.8890812397003174, |
|
"learning_rate": 6.95e-05, |
|
"loss": 5.0493, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.252025202520252, |
|
"grad_norm": 4.273584842681885, |
|
"learning_rate": 7e-05, |
|
"loss": 5.45, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2538253825382538, |
|
"grad_norm": 4.8961358070373535, |
|
"learning_rate": 7.05e-05, |
|
"loss": 5.5538, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2556255625562556, |
|
"grad_norm": 4.320993423461914, |
|
"learning_rate": 7.1e-05, |
|
"loss": 5.3233, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.25742574257425743, |
|
"grad_norm": 3.967057704925537, |
|
"learning_rate": 7.15e-05, |
|
"loss": 5.1942, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.25922592259225924, |
|
"grad_norm": 4.173534870147705, |
|
"learning_rate": 7.2e-05, |
|
"loss": 5.1138, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26102610261026105, |
|
"grad_norm": 4.54023551940918, |
|
"learning_rate": 7.25e-05, |
|
"loss": 5.6945, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.26282628262826285, |
|
"grad_norm": 4.46029806137085, |
|
"learning_rate": 7.3e-05, |
|
"loss": 5.5752, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2646264626462646, |
|
"grad_norm": 4.0620574951171875, |
|
"learning_rate": 7.35e-05, |
|
"loss": 5.2059, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.2664266426642664, |
|
"grad_norm": 4.501194477081299, |
|
"learning_rate": 7.4e-05, |
|
"loss": 5.6742, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.2682268226822682, |
|
"grad_norm": 4.779675483703613, |
|
"learning_rate": 7.450000000000001e-05, |
|
"loss": 6.3616, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.27002700270027, |
|
"grad_norm": 5.094762802124023, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 5.8473, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.27182718271827183, |
|
"grad_norm": 30.64379119873047, |
|
"learning_rate": 7.55e-05, |
|
"loss": 6.9989, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.27362736273627364, |
|
"grad_norm": 11.478928565979004, |
|
"learning_rate": 7.6e-05, |
|
"loss": 3.2922, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.27542754275427545, |
|
"grad_norm": 5.4645466804504395, |
|
"learning_rate": 7.65e-05, |
|
"loss": 3.4407, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.27722772277227725, |
|
"grad_norm": 5.904770374298096, |
|
"learning_rate": 7.7e-05, |
|
"loss": 3.5843, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.279027902790279, |
|
"grad_norm": 4.119198322296143, |
|
"learning_rate": 7.75e-05, |
|
"loss": 3.1653, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2808280828082808, |
|
"grad_norm": 4.040031909942627, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 3.4589, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2826282628262826, |
|
"grad_norm": 4.188577175140381, |
|
"learning_rate": 7.850000000000001e-05, |
|
"loss": 3.7084, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2844284428442844, |
|
"grad_norm": 4.088983058929443, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 3.8008, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.28622862286228623, |
|
"grad_norm": 3.6383821964263916, |
|
"learning_rate": 7.950000000000001e-05, |
|
"loss": 3.9436, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.28802880288028804, |
|
"grad_norm": 4.231886863708496, |
|
"learning_rate": 8e-05, |
|
"loss": 4.5073, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.28982898289828984, |
|
"grad_norm": 4.518820762634277, |
|
"learning_rate": 8.05e-05, |
|
"loss": 3.5422, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.29162916291629165, |
|
"grad_norm": 4.026054382324219, |
|
"learning_rate": 8.1e-05, |
|
"loss": 3.7513, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.2934293429342934, |
|
"grad_norm": 3.7655909061431885, |
|
"learning_rate": 8.15e-05, |
|
"loss": 4.2454, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.2952295229522952, |
|
"grad_norm": 3.6118693351745605, |
|
"learning_rate": 8.2e-05, |
|
"loss": 3.7299, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.297029702970297, |
|
"grad_norm": 4.284143447875977, |
|
"learning_rate": 8.25e-05, |
|
"loss": 4.07, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2988298829882988, |
|
"grad_norm": 4.056666374206543, |
|
"learning_rate": 8.3e-05, |
|
"loss": 4.1954, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.30063006300630063, |
|
"grad_norm": 4.1431565284729, |
|
"learning_rate": 8.35e-05, |
|
"loss": 4.252, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.30243024302430244, |
|
"grad_norm": 4.241552829742432, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.1166, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.30423042304230424, |
|
"grad_norm": 4.314858913421631, |
|
"learning_rate": 8.450000000000001e-05, |
|
"loss": 4.0205, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.30603060306030605, |
|
"grad_norm": 4.8378400802612305, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.4915, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.30783078307830786, |
|
"grad_norm": 3.758328437805176, |
|
"learning_rate": 8.55e-05, |
|
"loss": 4.2785, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3096309630963096, |
|
"grad_norm": 4.4342360496521, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.2285, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3114311431143114, |
|
"grad_norm": 4.5404052734375, |
|
"learning_rate": 8.65e-05, |
|
"loss": 4.3089, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3132313231323132, |
|
"grad_norm": 3.7609307765960693, |
|
"learning_rate": 8.7e-05, |
|
"loss": 4.3388, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.31503150315031503, |
|
"grad_norm": 4.27951717376709, |
|
"learning_rate": 8.75e-05, |
|
"loss": 4.3054, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.31683168316831684, |
|
"grad_norm": 4.713607311248779, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 4.5114, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.31863186318631864, |
|
"grad_norm": 3.725269079208374, |
|
"learning_rate": 8.850000000000001e-05, |
|
"loss": 4.4626, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.32043204320432045, |
|
"grad_norm": 4.011518478393555, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 4.4429, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.32223222322232226, |
|
"grad_norm": 4.707291603088379, |
|
"learning_rate": 8.950000000000001e-05, |
|
"loss": 4.3184, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.324032403240324, |
|
"grad_norm": 4.8839192390441895, |
|
"learning_rate": 9e-05, |
|
"loss": 4.9505, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3258325832583258, |
|
"grad_norm": 4.054986953735352, |
|
"learning_rate": 9.05e-05, |
|
"loss": 4.754, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.3276327632763276, |
|
"grad_norm": 3.74249529838562, |
|
"learning_rate": 9.1e-05, |
|
"loss": 4.5569, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.32943294329432943, |
|
"grad_norm": 4.3617939949035645, |
|
"learning_rate": 9.15e-05, |
|
"loss": 4.1692, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.33123312331233123, |
|
"grad_norm": 4.2513346672058105, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.6149, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.33303330333033304, |
|
"grad_norm": 4.553803443908691, |
|
"learning_rate": 9.250000000000001e-05, |
|
"loss": 4.5845, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.33483348334833485, |
|
"grad_norm": 3.855337619781494, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 4.5672, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.33663366336633666, |
|
"grad_norm": 4.010441303253174, |
|
"learning_rate": 9.350000000000001e-05, |
|
"loss": 5.1355, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.3384338433843384, |
|
"grad_norm": 4.344321250915527, |
|
"learning_rate": 9.4e-05, |
|
"loss": 5.2234, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.3402340234023402, |
|
"grad_norm": 3.883000373840332, |
|
"learning_rate": 9.449999999999999e-05, |
|
"loss": 4.6074, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.342034203420342, |
|
"grad_norm": 4.245122909545898, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.7656, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3438343834383438, |
|
"grad_norm": 4.1966023445129395, |
|
"learning_rate": 9.55e-05, |
|
"loss": 5.1034, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.34563456345634563, |
|
"grad_norm": 4.888998031616211, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.543, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.34743474347434744, |
|
"grad_norm": 4.0577311515808105, |
|
"learning_rate": 9.65e-05, |
|
"loss": 5.0657, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.34923492349234925, |
|
"grad_norm": 4.154134750366211, |
|
"learning_rate": 9.7e-05, |
|
"loss": 5.1474, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.35103510351035105, |
|
"grad_norm": 4.5482282638549805, |
|
"learning_rate": 9.75e-05, |
|
"loss": 5.611, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.35283528352835286, |
|
"grad_norm": 4.7754292488098145, |
|
"learning_rate": 9.8e-05, |
|
"loss": 5.8798, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.3546354635463546, |
|
"grad_norm": 4.595111846923828, |
|
"learning_rate": 9.850000000000001e-05, |
|
"loss": 5.5665, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.3564356435643564, |
|
"grad_norm": 4.434641361236572, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 5.017, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3582358235823582, |
|
"grad_norm": 4.628114223480225, |
|
"learning_rate": 9.95e-05, |
|
"loss": 5.4085, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.36003600360036003, |
|
"grad_norm": 4.574793815612793, |
|
"learning_rate": 0.0001, |
|
"loss": 4.868, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.36183618361836184, |
|
"grad_norm": 48.827030181884766, |
|
"learning_rate": 9.999804214640151e-05, |
|
"loss": 5.6442, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 7.539086818695068, |
|
"learning_rate": 9.999216873893363e-05, |
|
"loss": 2.9833, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.36543654365436545, |
|
"grad_norm": 4.620749473571777, |
|
"learning_rate": 9.998238023756727e-05, |
|
"loss": 2.9572, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.36723672367236726, |
|
"grad_norm": 3.8858468532562256, |
|
"learning_rate": 9.996867740888052e-05, |
|
"loss": 3.4909, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.369036903690369, |
|
"grad_norm": 4.157265663146973, |
|
"learning_rate": 9.995106132599869e-05, |
|
"loss": 3.794, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.3708370837083708, |
|
"grad_norm": 3.5816516876220703, |
|
"learning_rate": 9.99295333685102e-05, |
|
"loss": 3.6478, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3726372637263726, |
|
"grad_norm": 4.691586494445801, |
|
"learning_rate": 9.990409522235866e-05, |
|
"loss": 3.6508, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.37443744374437443, |
|
"grad_norm": 4.206489562988281, |
|
"learning_rate": 9.987474887971067e-05, |
|
"loss": 4.0346, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.37623762376237624, |
|
"grad_norm": 4.957794189453125, |
|
"learning_rate": 9.984149663879993e-05, |
|
"loss": 3.9599, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.37803780378037805, |
|
"grad_norm": 3.705470561981201, |
|
"learning_rate": 9.980434110374724e-05, |
|
"loss": 3.7593, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.37983798379837985, |
|
"grad_norm": 3.760009288787842, |
|
"learning_rate": 9.976328518435653e-05, |
|
"loss": 3.6834, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.38163816381638166, |
|
"grad_norm": 4.849155902862549, |
|
"learning_rate": 9.971833209588696e-05, |
|
"loss": 3.7644, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.38343834383438347, |
|
"grad_norm": 4.008197784423828, |
|
"learning_rate": 9.966948535880118e-05, |
|
"loss": 3.9239, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.3852385238523852, |
|
"grad_norm": 4.340366840362549, |
|
"learning_rate": 9.961674879848957e-05, |
|
"loss": 3.8541, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.387038703870387, |
|
"grad_norm": 4.3433332443237305, |
|
"learning_rate": 9.956012654497074e-05, |
|
"loss": 3.6372, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.38883888388838883, |
|
"grad_norm": 3.838965654373169, |
|
"learning_rate": 9.949962303256796e-05, |
|
"loss": 4.1607, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.39063906390639064, |
|
"grad_norm": 3.958833932876587, |
|
"learning_rate": 9.943524299956207e-05, |
|
"loss": 4.0357, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.39243924392439244, |
|
"grad_norm": 4.2294840812683105, |
|
"learning_rate": 9.936699148782019e-05, |
|
"loss": 4.1384, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.39423942394239425, |
|
"grad_norm": 4.255922317504883, |
|
"learning_rate": 9.929487384240103e-05, |
|
"loss": 4.3947, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.39603960396039606, |
|
"grad_norm": 4.967748641967773, |
|
"learning_rate": 9.921889571113628e-05, |
|
"loss": 4.2571, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.39783978397839787, |
|
"grad_norm": 5.7947845458984375, |
|
"learning_rate": 9.913906304418825e-05, |
|
"loss": 4.3985, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3996399639963996, |
|
"grad_norm": 4.166469097137451, |
|
"learning_rate": 9.90553820935839e-05, |
|
"loss": 4.3324, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4014401440144014, |
|
"grad_norm": 3.7093265056610107, |
|
"learning_rate": 9.896785941272523e-05, |
|
"loss": 4.4313, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.40324032403240323, |
|
"grad_norm": 4.43189001083374, |
|
"learning_rate": 9.887650185587612e-05, |
|
"loss": 4.723, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.40504050405040504, |
|
"grad_norm": 4.146303653717041, |
|
"learning_rate": 9.878131657762535e-05, |
|
"loss": 4.555, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.40684068406840684, |
|
"grad_norm": 4.075449466705322, |
|
"learning_rate": 9.868231103232655e-05, |
|
"loss": 4.4108, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.40864086408640865, |
|
"grad_norm": 4.104311943054199, |
|
"learning_rate": 9.857949297351422e-05, |
|
"loss": 4.867, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.41044104410441046, |
|
"grad_norm": 3.903787612915039, |
|
"learning_rate": 9.847287045329666e-05, |
|
"loss": 4.3154, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.41224122412241226, |
|
"grad_norm": 4.177591800689697, |
|
"learning_rate": 9.83624518217252e-05, |
|
"loss": 4.3139, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.414041404140414, |
|
"grad_norm": 3.577852725982666, |
|
"learning_rate": 9.824824572614051e-05, |
|
"loss": 4.6006, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4158415841584158, |
|
"grad_norm": 4.238375186920166, |
|
"learning_rate": 9.813026111049515e-05, |
|
"loss": 4.1235, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.41764176417641763, |
|
"grad_norm": 4.316523551940918, |
|
"learning_rate": 9.800850721465334e-05, |
|
"loss": 4.9625, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.41944194419441944, |
|
"grad_norm": 3.8222544193267822, |
|
"learning_rate": 9.788299357366716e-05, |
|
"loss": 4.843, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.42124212421242124, |
|
"grad_norm": 4.11749792098999, |
|
"learning_rate": 9.775373001702998e-05, |
|
"loss": 4.5882, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.42304230423042305, |
|
"grad_norm": 3.6795084476470947, |
|
"learning_rate": 9.762072666790658e-05, |
|
"loss": 5.2389, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.42484248424842486, |
|
"grad_norm": 3.996441602706909, |
|
"learning_rate": 9.748399394234038e-05, |
|
"loss": 4.6672, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.42664266426642666, |
|
"grad_norm": 4.023096561431885, |
|
"learning_rate": 9.734354254843773e-05, |
|
"loss": 5.3549, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.42844284428442847, |
|
"grad_norm": 4.174993515014648, |
|
"learning_rate": 9.71993834855293e-05, |
|
"loss": 5.1587, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.4302430243024302, |
|
"grad_norm": 4.372593879699707, |
|
"learning_rate": 9.705152804330872e-05, |
|
"loss": 5.6517, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.43204320432043203, |
|
"grad_norm": 4.535490989685059, |
|
"learning_rate": 9.689998780094837e-05, |
|
"loss": 5.4977, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.43384338433843384, |
|
"grad_norm": 4.093436241149902, |
|
"learning_rate": 9.67447746261926e-05, |
|
"loss": 4.3227, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.43564356435643564, |
|
"grad_norm": 4.311260223388672, |
|
"learning_rate": 9.658590067442829e-05, |
|
"loss": 5.137, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.43744374437443745, |
|
"grad_norm": 4.544931411743164, |
|
"learning_rate": 9.6423378387733e-05, |
|
"loss": 5.3618, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.43924392439243926, |
|
"grad_norm": 4.6238298416137695, |
|
"learning_rate": 9.625722049390046e-05, |
|
"loss": 5.7622, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.44104410441044106, |
|
"grad_norm": 4.168641567230225, |
|
"learning_rate": 9.608744000544392e-05, |
|
"loss": 5.2432, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.44284428442844287, |
|
"grad_norm": 4.634570598602295, |
|
"learning_rate": 9.591405021857695e-05, |
|
"loss": 5.8623, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.4446444644464446, |
|
"grad_norm": 4.366397380828857, |
|
"learning_rate": 9.573706471217232e-05, |
|
"loss": 5.4993, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.4464446444644464, |
|
"grad_norm": 4.067739963531494, |
|
"learning_rate": 9.55564973466984e-05, |
|
"loss": 5.6379, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.44824482448244823, |
|
"grad_norm": 3.8812038898468018, |
|
"learning_rate": 9.537236226313389e-05, |
|
"loss": 5.4358, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.45004500450045004, |
|
"grad_norm": 5.2032318115234375, |
|
"learning_rate": 9.51846738818602e-05, |
|
"loss": 5.2139, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.45184518451845185, |
|
"grad_norm": 64.75902557373047, |
|
"learning_rate": 9.499344690153225e-05, |
|
"loss": 6.1023, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.45364536453645365, |
|
"grad_norm": 35.30427932739258, |
|
"learning_rate": 9.479869629792728e-05, |
|
"loss": 3.4732, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.45544554455445546, |
|
"grad_norm": 4.7051496505737305, |
|
"learning_rate": 9.460043732277213e-05, |
|
"loss": 2.73, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.45724572457245727, |
|
"grad_norm": 3.9896416664123535, |
|
"learning_rate": 9.439868550254869e-05, |
|
"loss": 3.2622, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.459045904590459, |
|
"grad_norm": 3.751338005065918, |
|
"learning_rate": 9.419345663727805e-05, |
|
"loss": 3.0939, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.4608460846084608, |
|
"grad_norm": 3.8281116485595703, |
|
"learning_rate": 9.398476679928313e-05, |
|
"loss": 3.9917, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.46264626462646263, |
|
"grad_norm": 3.47261905670166, |
|
"learning_rate": 9.377263233192992e-05, |
|
"loss": 3.471, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.46444644464446444, |
|
"grad_norm": 3.8381025791168213, |
|
"learning_rate": 9.355706984834763e-05, |
|
"loss": 3.7771, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.46624662466246625, |
|
"grad_norm": 4.145185470581055, |
|
"learning_rate": 9.333809623012763e-05, |
|
"loss": 3.7653, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.46804680468046805, |
|
"grad_norm": 3.826444387435913, |
|
"learning_rate": 9.311572862600139e-05, |
|
"loss": 3.7425, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.46984698469846986, |
|
"grad_norm": 3.433979034423828, |
|
"learning_rate": 9.288998445049746e-05, |
|
"loss": 3.4126, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.47164716471647167, |
|
"grad_norm": 4.036341667175293, |
|
"learning_rate": 9.266088138257768e-05, |
|
"loss": 3.6221, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.4734473447344735, |
|
"grad_norm": 3.754890203475952, |
|
"learning_rate": 9.24284373642527e-05, |
|
"loss": 4.0088, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.4752475247524752, |
|
"grad_norm": 3.838566780090332, |
|
"learning_rate": 9.219267059917682e-05, |
|
"loss": 3.6828, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.47704770477047703, |
|
"grad_norm": 3.6993019580841064, |
|
"learning_rate": 9.195359955122244e-05, |
|
"loss": 3.7106, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.47884788478847884, |
|
"grad_norm": 3.721072196960449, |
|
"learning_rate": 9.171124294303399e-05, |
|
"loss": 3.9501, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.48064806480648065, |
|
"grad_norm": 3.40067982673645, |
|
"learning_rate": 9.14656197545618e-05, |
|
"loss": 3.6647, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.48244824482448245, |
|
"grad_norm": 4.225182056427002, |
|
"learning_rate": 9.121674922157558e-05, |
|
"loss": 4.2479, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.48424842484248426, |
|
"grad_norm": 3.574556589126587, |
|
"learning_rate": 9.096465083415808e-05, |
|
"loss": 4.3452, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.48604860486048607, |
|
"grad_norm": 3.840705394744873, |
|
"learning_rate": 9.070934433517873e-05, |
|
"loss": 4.7066, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4878487848784879, |
|
"grad_norm": 3.4904778003692627, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 3.7437, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4896489648964896, |
|
"grad_norm": 3.8224945068359375, |
|
"learning_rate": 9.018918722864867e-05, |
|
"loss": 4.1212, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.49144914491449143, |
|
"grad_norm": 4.084190368652344, |
|
"learning_rate": 8.992437735675651e-05, |
|
"loss": 3.883, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.49324932493249324, |
|
"grad_norm": 4.186155796051025, |
|
"learning_rate": 8.965644084142932e-05, |
|
"loss": 4.6328, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.49504950495049505, |
|
"grad_norm": 4.2307047843933105, |
|
"learning_rate": 8.938539866588592e-05, |
|
"loss": 3.9238, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.49684968496849685, |
|
"grad_norm": 4.097965717315674, |
|
"learning_rate": 8.911127205656226e-05, |
|
"loss": 4.4123, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.49864986498649866, |
|
"grad_norm": 3.495042562484741, |
|
"learning_rate": 8.88340824814491e-05, |
|
"loss": 3.8635, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.5004500450045004, |
|
"grad_norm": 3.689110517501831, |
|
"learning_rate": 8.855385164841072e-05, |
|
"loss": 4.6177, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.5022502250225023, |
|
"grad_norm": 3.6329872608184814, |
|
"learning_rate": 8.82706015034849e-05, |
|
"loss": 4.2534, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.504050405040504, |
|
"grad_norm": 3.8967461585998535, |
|
"learning_rate": 8.798435422916425e-05, |
|
"loss": 3.7907, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5058505850585059, |
|
"grad_norm": 4.086053848266602, |
|
"learning_rate": 8.769513224265904e-05, |
|
"loss": 4.5879, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.5076507650765076, |
|
"grad_norm": 4.173221111297607, |
|
"learning_rate": 8.740295819414155e-05, |
|
"loss": 4.1702, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.5094509450945095, |
|
"grad_norm": 3.921698570251465, |
|
"learning_rate": 8.710785496497225e-05, |
|
"loss": 4.2377, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.5112511251125113, |
|
"grad_norm": 3.830862045288086, |
|
"learning_rate": 8.680984566590793e-05, |
|
"loss": 4.3651, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.513051305130513, |
|
"grad_norm": 3.77140212059021, |
|
"learning_rate": 8.650895363529173e-05, |
|
"loss": 4.4687, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.5148514851485149, |
|
"grad_norm": 3.5552985668182373, |
|
"learning_rate": 8.620520243722541e-05, |
|
"loss": 4.8082, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.5166516651665166, |
|
"grad_norm": 3.6477136611938477, |
|
"learning_rate": 8.589861585972407e-05, |
|
"loss": 5.2562, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.5184518451845185, |
|
"grad_norm": 4.382421493530273, |
|
"learning_rate": 8.558921791285304e-05, |
|
"loss": 5.0799, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.5202520252025202, |
|
"grad_norm": 4.065561294555664, |
|
"learning_rate": 8.527703282684766e-05, |
|
"loss": 4.9109, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.5220522052205221, |
|
"grad_norm": 3.5327258110046387, |
|
"learning_rate": 8.496208505021571e-05, |
|
"loss": 4.2811, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5238523852385238, |
|
"grad_norm": 4.304015636444092, |
|
"learning_rate": 8.464439924782271e-05, |
|
"loss": 5.0544, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.5256525652565257, |
|
"grad_norm": 4.300441265106201, |
|
"learning_rate": 8.432400029896028e-05, |
|
"loss": 5.1385, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.5274527452745275, |
|
"grad_norm": 3.970590114593506, |
|
"learning_rate": 8.400091329539784e-05, |
|
"loss": 5.5669, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.5292529252925292, |
|
"grad_norm": 4.322851657867432, |
|
"learning_rate": 8.367516353941751e-05, |
|
"loss": 5.1609, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.5310531053105311, |
|
"grad_norm": 4.039634704589844, |
|
"learning_rate": 8.334677654183254e-05, |
|
"loss": 5.3988, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.5328532853285328, |
|
"grad_norm": 4.090119361877441, |
|
"learning_rate": 8.301577801998954e-05, |
|
"loss": 5.3261, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.5346534653465347, |
|
"grad_norm": 4.731489181518555, |
|
"learning_rate": 8.26821938957544e-05, |
|
"loss": 5.4674, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.5364536453645364, |
|
"grad_norm": 4.765017032623291, |
|
"learning_rate": 8.234605029348223e-05, |
|
"loss": 5.5113, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.5382538253825383, |
|
"grad_norm": 4.620230197906494, |
|
"learning_rate": 8.200737353797149e-05, |
|
"loss": 6.6506, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.54005400540054, |
|
"grad_norm": 4.256903648376465, |
|
"learning_rate": 8.166619015240236e-05, |
|
"loss": 5.401, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5418541854185418, |
|
"grad_norm": 72.8096694946289, |
|
"learning_rate": 8.132252685625958e-05, |
|
"loss": 5.5679, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.5436543654365437, |
|
"grad_norm": 12.82461166381836, |
|
"learning_rate": 8.097641056324004e-05, |
|
"loss": 2.3965, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 3.8120839595794678, |
|
"learning_rate": 8.062786837914492e-05, |
|
"loss": 2.5916, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.5472547254725473, |
|
"grad_norm": 4.205955505371094, |
|
"learning_rate": 8.027692759975695e-05, |
|
"loss": 3.1019, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.549054905490549, |
|
"grad_norm": 3.6653831005096436, |
|
"learning_rate": 7.992361570870288e-05, |
|
"loss": 3.0688, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.5508550855085509, |
|
"grad_norm": 3.9019699096679688, |
|
"learning_rate": 7.956796037530099e-05, |
|
"loss": 3.4956, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.5526552655265526, |
|
"grad_norm": 4.44380521774292, |
|
"learning_rate": 7.920998945239427e-05, |
|
"loss": 3.8163, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.5544554455445545, |
|
"grad_norm": 4.306615352630615, |
|
"learning_rate": 7.884973097416908e-05, |
|
"loss": 3.8254, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.5562556255625563, |
|
"grad_norm": 3.5584664344787598, |
|
"learning_rate": 7.848721315395974e-05, |
|
"loss": 3.9131, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.558055805580558, |
|
"grad_norm": 3.8743574619293213, |
|
"learning_rate": 7.812246438203904e-05, |
|
"loss": 3.7078, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5598559855985599, |
|
"grad_norm": 3.7516815662384033, |
|
"learning_rate": 7.775551322339476e-05, |
|
"loss": 3.8186, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.5616561656165616, |
|
"grad_norm": 3.739532232284546, |
|
"learning_rate": 7.738638841549277e-05, |
|
"loss": 3.7782, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.5634563456345635, |
|
"grad_norm": 3.378246784210205, |
|
"learning_rate": 7.701511886602643e-05, |
|
"loss": 3.703, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.5652565256525652, |
|
"grad_norm": 3.18391752243042, |
|
"learning_rate": 7.664173365065265e-05, |
|
"loss": 3.7862, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.5670567056705671, |
|
"grad_norm": 3.660829782485962, |
|
"learning_rate": 7.626626201071494e-05, |
|
"loss": 4.0942, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.5688568856885688, |
|
"grad_norm": 3.49433970451355, |
|
"learning_rate": 7.588873335095337e-05, |
|
"loss": 4.4192, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.5706570657065707, |
|
"grad_norm": 3.631074905395508, |
|
"learning_rate": 7.550917723720172e-05, |
|
"loss": 4.0662, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.5724572457245725, |
|
"grad_norm": 3.1131577491760254, |
|
"learning_rate": 7.512762339407214e-05, |
|
"loss": 3.5052, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.5742574257425742, |
|
"grad_norm": 3.5523805618286133, |
|
"learning_rate": 7.474410170262718e-05, |
|
"loss": 4.2908, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.5760576057605761, |
|
"grad_norm": 3.9423253536224365, |
|
"learning_rate": 7.435864219803983e-05, |
|
"loss": 4.3511, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5778577857785778, |
|
"grad_norm": 3.6397900581359863, |
|
"learning_rate": 7.39712750672412e-05, |
|
"loss": 4.0423, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.5796579657965797, |
|
"grad_norm": 3.341845750808716, |
|
"learning_rate": 7.358203064655653e-05, |
|
"loss": 4.0845, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.5814581458145814, |
|
"grad_norm": 3.8859944343566895, |
|
"learning_rate": 7.31909394193294e-05, |
|
"loss": 3.6363, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.5832583258325833, |
|
"grad_norm": 3.2730302810668945, |
|
"learning_rate": 7.27980320135345e-05, |
|
"loss": 3.9988, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.585058505850585, |
|
"grad_norm": 3.4280858039855957, |
|
"learning_rate": 7.240333919937893e-05, |
|
"loss": 3.9773, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.5868586858685868, |
|
"grad_norm": 3.675854444503784, |
|
"learning_rate": 7.200689188689256e-05, |
|
"loss": 4.4474, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.5886588658865887, |
|
"grad_norm": 3.922302007675171, |
|
"learning_rate": 7.16087211235073e-05, |
|
"loss": 4.1046, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.5904590459045904, |
|
"grad_norm": 4.065755367279053, |
|
"learning_rate": 7.120885809162561e-05, |
|
"loss": 4.6008, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.5922592259225923, |
|
"grad_norm": 4.271775722503662, |
|
"learning_rate": 7.080733410617853e-05, |
|
"loss": 4.2479, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.594059405940594, |
|
"grad_norm": 3.5132906436920166, |
|
"learning_rate": 7.040418061217325e-05, |
|
"loss": 4.0792, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.5958595859585959, |
|
"grad_norm": 3.62770676612854, |
|
"learning_rate": 6.999942918223053e-05, |
|
"loss": 4.6398, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.5976597659765976, |
|
"grad_norm": 3.656203508377075, |
|
"learning_rate": 6.959311151411212e-05, |
|
"loss": 4.2742, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.5994599459945995, |
|
"grad_norm": 4.29753303527832, |
|
"learning_rate": 6.918525942823835e-05, |
|
"loss": 4.6216, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.6012601260126013, |
|
"grad_norm": 3.8375144004821777, |
|
"learning_rate": 6.87759048651962e-05, |
|
"loss": 4.6474, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.603060306030603, |
|
"grad_norm": 3.9374406337738037, |
|
"learning_rate": 6.836507988323784e-05, |
|
"loss": 4.8221, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.6048604860486049, |
|
"grad_norm": 4.202131748199463, |
|
"learning_rate": 6.795281665577003e-05, |
|
"loss": 4.4779, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.6066606660666066, |
|
"grad_norm": 4.333240985870361, |
|
"learning_rate": 6.753914746883451e-05, |
|
"loss": 5.2416, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.6084608460846085, |
|
"grad_norm": 4.519970417022705, |
|
"learning_rate": 6.712410471857955e-05, |
|
"loss": 5.6601, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.6102610261026102, |
|
"grad_norm": 4.426661968231201, |
|
"learning_rate": 6.67077209087228e-05, |
|
"loss": 4.9072, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.6120612061206121, |
|
"grad_norm": 3.4359755516052246, |
|
"learning_rate": 6.629002864800589e-05, |
|
"loss": 4.8438, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6138613861386139, |
|
"grad_norm": 3.7532505989074707, |
|
"learning_rate": 6.587106064764064e-05, |
|
"loss": 5.4166, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.6156615661566157, |
|
"grad_norm": 3.9281110763549805, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 4.7921, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.6174617461746175, |
|
"grad_norm": 3.9203011989593506, |
|
"learning_rate": 6.502942876978524e-05, |
|
"loss": 4.6832, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.6192619261926192, |
|
"grad_norm": 3.9752700328826904, |
|
"learning_rate": 6.460683080397508e-05, |
|
"loss": 5.1418, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.6210621062106211, |
|
"grad_norm": 3.637481451034546, |
|
"learning_rate": 6.418308891671484e-05, |
|
"loss": 4.8411, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.6228622862286228, |
|
"grad_norm": 3.9811997413635254, |
|
"learning_rate": 6.375823629298765e-05, |
|
"loss": 4.9951, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.6246624662466247, |
|
"grad_norm": 4.1592559814453125, |
|
"learning_rate": 6.333230620476307e-05, |
|
"loss": 5.9485, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.6264626462646264, |
|
"grad_norm": 3.6163744926452637, |
|
"learning_rate": 6.29053320083913e-05, |
|
"loss": 5.3969, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.6282628262826283, |
|
"grad_norm": 4.366085052490234, |
|
"learning_rate": 6.247734714199105e-05, |
|
"loss": 5.3296, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.6300630063006301, |
|
"grad_norm": 4.482414722442627, |
|
"learning_rate": 6.204838512283072e-05, |
|
"loss": 6.0348, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6318631863186318, |
|
"grad_norm": 38.78593826293945, |
|
"learning_rate": 6.161847954470365e-05, |
|
"loss": 4.4123, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.6336633663366337, |
|
"grad_norm": 4.5870041847229, |
|
"learning_rate": 6.118766407529713e-05, |
|
"loss": 2.4915, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.6354635463546354, |
|
"grad_norm": 3.5220015048980713, |
|
"learning_rate": 6.075597245355589e-05, |
|
"loss": 2.8723, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.6372637263726373, |
|
"grad_norm": 3.2156431674957275, |
|
"learning_rate": 6.03234384870397e-05, |
|
"loss": 3.0837, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.639063906390639, |
|
"grad_norm": 3.223064422607422, |
|
"learning_rate": 5.989009604927587e-05, |
|
"loss": 2.9846, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.6408640864086409, |
|
"grad_norm": 3.5234880447387695, |
|
"learning_rate": 5.9455979077106463e-05, |
|
"loss": 3.4383, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.6426642664266426, |
|
"grad_norm": 3.648319721221924, |
|
"learning_rate": 5.9021121568030514e-05, |
|
"loss": 3.2179, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.6444644464446445, |
|
"grad_norm": 3.348188877105713, |
|
"learning_rate": 5.8585557577541595e-05, |
|
"loss": 3.2722, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.6462646264626463, |
|
"grad_norm": 3.6280741691589355, |
|
"learning_rate": 5.814932121646074e-05, |
|
"loss": 3.6418, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.648064806480648, |
|
"grad_norm": 3.3993427753448486, |
|
"learning_rate": 5.771244664826512e-05, |
|
"loss": 3.6959, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6498649864986499, |
|
"grad_norm": 3.4230000972747803, |
|
"learning_rate": 5.727496808641256e-05, |
|
"loss": 3.5139, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.6516651665166516, |
|
"grad_norm": 3.6757190227508545, |
|
"learning_rate": 5.6836919791662136e-05, |
|
"loss": 4.1188, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.6534653465346535, |
|
"grad_norm": 3.232292413711548, |
|
"learning_rate": 5.639833606939102e-05, |
|
"loss": 3.7585, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.6552655265526552, |
|
"grad_norm": 3.3726987838745117, |
|
"learning_rate": 5.595925126690801e-05, |
|
"loss": 3.8637, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.6570657065706571, |
|
"grad_norm": 3.570510149002075, |
|
"learning_rate": 5.55196997707635e-05, |
|
"loss": 3.6172, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.6588658865886589, |
|
"grad_norm": 3.3099026679992676, |
|
"learning_rate": 5.507971600405663e-05, |
|
"loss": 4.0602, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.6606660666066607, |
|
"grad_norm": 3.730677843093872, |
|
"learning_rate": 5.463933442373945e-05, |
|
"loss": 4.0647, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.6624662466246625, |
|
"grad_norm": 3.8183600902557373, |
|
"learning_rate": 5.419858951791842e-05, |
|
"loss": 3.9943, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.6642664266426642, |
|
"grad_norm": 3.7817885875701904, |
|
"learning_rate": 5.3757515803153546e-05, |
|
"loss": 3.6044, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.6660666066606661, |
|
"grad_norm": 3.696652412414551, |
|
"learning_rate": 5.3316147821755205e-05, |
|
"loss": 3.8305, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6678667866786678, |
|
"grad_norm": 3.8938591480255127, |
|
"learning_rate": 5.2874520139079006e-05, |
|
"loss": 4.1762, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.6696669666966697, |
|
"grad_norm": 3.8813014030456543, |
|
"learning_rate": 5.2432667340818906e-05, |
|
"loss": 4.0786, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.6714671467146714, |
|
"grad_norm": 3.655909299850464, |
|
"learning_rate": 5.1990624030298506e-05, |
|
"loss": 3.9051, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.6732673267326733, |
|
"grad_norm": 3.8451969623565674, |
|
"learning_rate": 5.154842482576127e-05, |
|
"loss": 4.1839, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.6750675067506751, |
|
"grad_norm": 4.180874824523926, |
|
"learning_rate": 5.110610435765934e-05, |
|
"loss": 4.0388, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.6768676867686768, |
|
"grad_norm": 3.5825304985046387, |
|
"learning_rate": 5.0663697265941545e-05, |
|
"loss": 4.1734, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.6786678667866787, |
|
"grad_norm": 3.780687093734741, |
|
"learning_rate": 5.022123819734053e-05, |
|
"loss": 4.3508, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.6804680468046804, |
|
"grad_norm": 3.889195203781128, |
|
"learning_rate": 4.977876180265948e-05, |
|
"loss": 4.1945, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.6822682268226823, |
|
"grad_norm": 3.726132869720459, |
|
"learning_rate": 4.933630273405847e-05, |
|
"loss": 3.8745, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.684068406840684, |
|
"grad_norm": 3.7415261268615723, |
|
"learning_rate": 4.889389564234066e-05, |
|
"loss": 4.2891, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6858685868586859, |
|
"grad_norm": 3.6238245964050293, |
|
"learning_rate": 4.845157517423874e-05, |
|
"loss": 4.3567, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.6876687668766877, |
|
"grad_norm": 3.964704990386963, |
|
"learning_rate": 4.8009375969701505e-05, |
|
"loss": 4.5154, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.6894689468946895, |
|
"grad_norm": 3.337338447570801, |
|
"learning_rate": 4.7567332659181106e-05, |
|
"loss": 4.5835, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.6912691269126913, |
|
"grad_norm": 3.447648286819458, |
|
"learning_rate": 4.7125479860921e-05, |
|
"loss": 4.2734, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.693069306930693, |
|
"grad_norm": 4.408335208892822, |
|
"learning_rate": 4.668385217824482e-05, |
|
"loss": 4.8601, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.6948694869486949, |
|
"grad_norm": 3.683608293533325, |
|
"learning_rate": 4.6242484196846465e-05, |
|
"loss": 4.9472, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.6966696669666966, |
|
"grad_norm": 4.226807117462158, |
|
"learning_rate": 4.5801410482081584e-05, |
|
"loss": 4.6788, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.6984698469846985, |
|
"grad_norm": 3.806995391845703, |
|
"learning_rate": 4.536066557626056e-05, |
|
"loss": 4.9015, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.7002700270027002, |
|
"grad_norm": 3.541186571121216, |
|
"learning_rate": 4.4920283995943383e-05, |
|
"loss": 5.1853, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.7020702070207021, |
|
"grad_norm": 3.971658706665039, |
|
"learning_rate": 4.448030022923652e-05, |
|
"loss": 4.8798, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7038703870387039, |
|
"grad_norm": 3.6937930583953857, |
|
"learning_rate": 4.404074873309201e-05, |
|
"loss": 4.9251, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.7056705670567057, |
|
"grad_norm": 4.2643723487854, |
|
"learning_rate": 4.3601663930608995e-05, |
|
"loss": 4.8249, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.7074707470747075, |
|
"grad_norm": 3.8684353828430176, |
|
"learning_rate": 4.316308020833788e-05, |
|
"loss": 5.6238, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.7092709270927092, |
|
"grad_norm": 3.8425145149230957, |
|
"learning_rate": 4.272503191358743e-05, |
|
"loss": 4.9036, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.7110711071107111, |
|
"grad_norm": 3.797442674636841, |
|
"learning_rate": 4.228755335173488e-05, |
|
"loss": 5.0452, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.7128712871287128, |
|
"grad_norm": 4.2270355224609375, |
|
"learning_rate": 4.1850678783539265e-05, |
|
"loss": 4.7575, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.7146714671467147, |
|
"grad_norm": 4.077787399291992, |
|
"learning_rate": 4.141444242245841e-05, |
|
"loss": 5.9227, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.7164716471647165, |
|
"grad_norm": 3.7013025283813477, |
|
"learning_rate": 4.0978878431969484e-05, |
|
"loss": 4.7173, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.7182718271827183, |
|
"grad_norm": 4.04628849029541, |
|
"learning_rate": 4.054402092289354e-05, |
|
"loss": 5.4247, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.7200720072007201, |
|
"grad_norm": 4.189887523651123, |
|
"learning_rate": 4.0109903950724134e-05, |
|
"loss": 5.3008, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7218721872187218, |
|
"grad_norm": 20.025436401367188, |
|
"learning_rate": 3.967656151296031e-05, |
|
"loss": 3.8243, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.7236723672367237, |
|
"grad_norm": 4.733638286590576, |
|
"learning_rate": 3.924402754644412e-05, |
|
"loss": 2.4717, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.7254725472547254, |
|
"grad_norm": 3.5588278770446777, |
|
"learning_rate": 3.881233592470287e-05, |
|
"loss": 2.7574, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 3.0511527061462402, |
|
"learning_rate": 3.8381520455296364e-05, |
|
"loss": 2.6524, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.729072907290729, |
|
"grad_norm": 3.7463011741638184, |
|
"learning_rate": 3.7951614877169284e-05, |
|
"loss": 3.2022, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.7308730873087309, |
|
"grad_norm": 3.974733829498291, |
|
"learning_rate": 3.7522652858008964e-05, |
|
"loss": 3.7213, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.7326732673267327, |
|
"grad_norm": 3.5178704261779785, |
|
"learning_rate": 3.7094667991608704e-05, |
|
"loss": 3.7185, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.7344734473447345, |
|
"grad_norm": 3.9185893535614014, |
|
"learning_rate": 3.6667693795236946e-05, |
|
"loss": 4.0272, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.7362736273627363, |
|
"grad_norm": 3.0775508880615234, |
|
"learning_rate": 3.624176370701236e-05, |
|
"loss": 3.0848, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.738073807380738, |
|
"grad_norm": 3.445986270904541, |
|
"learning_rate": 3.581691108328517e-05, |
|
"loss": 3.857, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7398739873987399, |
|
"grad_norm": 3.1240127086639404, |
|
"learning_rate": 3.5393169196024925e-05, |
|
"loss": 3.0593, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.7416741674167416, |
|
"grad_norm": 3.4222195148468018, |
|
"learning_rate": 3.4970571230214774e-05, |
|
"loss": 3.551, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.7434743474347435, |
|
"grad_norm": 3.3344242572784424, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.6521, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.7452745274527453, |
|
"grad_norm": 3.6145176887512207, |
|
"learning_rate": 3.412893935235936e-05, |
|
"loss": 3.7669, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.7470747074707471, |
|
"grad_norm": 3.2903146743774414, |
|
"learning_rate": 3.370997135199413e-05, |
|
"loss": 3.333, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.7488748874887489, |
|
"grad_norm": 3.5195930004119873, |
|
"learning_rate": 3.329227909127722e-05, |
|
"loss": 4.2719, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.7506750675067507, |
|
"grad_norm": 3.5990970134735107, |
|
"learning_rate": 3.287589528142048e-05, |
|
"loss": 4.1684, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.7524752475247525, |
|
"grad_norm": 3.387673854827881, |
|
"learning_rate": 3.24608525311655e-05, |
|
"loss": 3.8335, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.7542754275427542, |
|
"grad_norm": 3.6439712047576904, |
|
"learning_rate": 3.204718334422998e-05, |
|
"loss": 4.3415, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.7560756075607561, |
|
"grad_norm": 3.3546173572540283, |
|
"learning_rate": 3.1634920116762176e-05, |
|
"loss": 3.8877, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7578757875787578, |
|
"grad_norm": 3.6753296852111816, |
|
"learning_rate": 3.122409513480381e-05, |
|
"loss": 4.1525, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.7596759675967597, |
|
"grad_norm": 3.644063711166382, |
|
"learning_rate": 3.081474057176164e-05, |
|
"loss": 3.8885, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.7614761476147615, |
|
"grad_norm": 4.089780807495117, |
|
"learning_rate": 3.040688848588788e-05, |
|
"loss": 4.0093, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.7632763276327633, |
|
"grad_norm": 3.1043710708618164, |
|
"learning_rate": 3.000057081776947e-05, |
|
"loss": 4.0178, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.7650765076507651, |
|
"grad_norm": 3.322037935256958, |
|
"learning_rate": 2.959581938782675e-05, |
|
"loss": 4.1183, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.7668766876687669, |
|
"grad_norm": 3.9151599407196045, |
|
"learning_rate": 2.9192665893821475e-05, |
|
"loss": 4.2036, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.7686768676867687, |
|
"grad_norm": 3.6815481185913086, |
|
"learning_rate": 2.8791141908374397e-05, |
|
"loss": 4.7545, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.7704770477047704, |
|
"grad_norm": 4.053499698638916, |
|
"learning_rate": 2.839127887649271e-05, |
|
"loss": 4.0245, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.7722772277227723, |
|
"grad_norm": 3.4955430030822754, |
|
"learning_rate": 2.7993108113107447e-05, |
|
"loss": 3.901, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.774077407740774, |
|
"grad_norm": 3.2984628677368164, |
|
"learning_rate": 2.7596660800621078e-05, |
|
"loss": 3.9142, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7758775877587759, |
|
"grad_norm": 3.7901782989501953, |
|
"learning_rate": 2.7201967986465515e-05, |
|
"loss": 3.669, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.7776777677767777, |
|
"grad_norm": 3.3658523559570312, |
|
"learning_rate": 2.6809060580670607e-05, |
|
"loss": 4.3362, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.7794779477947795, |
|
"grad_norm": 3.361583948135376, |
|
"learning_rate": 2.6417969353443483e-05, |
|
"loss": 4.4502, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.7812781278127813, |
|
"grad_norm": 3.271467924118042, |
|
"learning_rate": 2.6028724932758813e-05, |
|
"loss": 4.1924, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.783078307830783, |
|
"grad_norm": 3.368018627166748, |
|
"learning_rate": 2.5641357801960187e-05, |
|
"loss": 4.2437, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.7848784878487849, |
|
"grad_norm": 4.052670955657959, |
|
"learning_rate": 2.525589829737284e-05, |
|
"loss": 4.7636, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.7866786678667866, |
|
"grad_norm": 3.8365402221679688, |
|
"learning_rate": 2.487237660592789e-05, |
|
"loss": 4.259, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.7884788478847885, |
|
"grad_norm": 3.631394386291504, |
|
"learning_rate": 2.44908227627983e-05, |
|
"loss": 4.7527, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.7902790279027903, |
|
"grad_norm": 3.6414878368377686, |
|
"learning_rate": 2.4111266649046647e-05, |
|
"loss": 4.5161, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.7920792079207921, |
|
"grad_norm": 3.9235427379608154, |
|
"learning_rate": 2.373373798928507e-05, |
|
"loss": 4.7452, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7938793879387939, |
|
"grad_norm": 3.502267599105835, |
|
"learning_rate": 2.335826634934737e-05, |
|
"loss": 4.2153, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.7956795679567957, |
|
"grad_norm": 3.719147205352783, |
|
"learning_rate": 2.298488113397359e-05, |
|
"loss": 4.7187, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.7974797479747975, |
|
"grad_norm": 4.110799789428711, |
|
"learning_rate": 2.2613611584507228e-05, |
|
"loss": 4.898, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.7992799279927992, |
|
"grad_norm": 3.9746625423431396, |
|
"learning_rate": 2.2244486776605243e-05, |
|
"loss": 5.2031, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.8010801080108011, |
|
"grad_norm": 3.9334423542022705, |
|
"learning_rate": 2.187753561796097e-05, |
|
"loss": 5.5832, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.8028802880288028, |
|
"grad_norm": 3.596407413482666, |
|
"learning_rate": 2.1512786846040255e-05, |
|
"loss": 5.0518, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.8046804680468047, |
|
"grad_norm": 3.7824950218200684, |
|
"learning_rate": 2.1150269025830925e-05, |
|
"loss": 5.0857, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.8064806480648065, |
|
"grad_norm": 3.96897292137146, |
|
"learning_rate": 2.0790010547605743e-05, |
|
"loss": 5.4674, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.8082808280828083, |
|
"grad_norm": 3.868983030319214, |
|
"learning_rate": 2.0432039624699016e-05, |
|
"loss": 5.3334, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.8100810081008101, |
|
"grad_norm": 4.298758029937744, |
|
"learning_rate": 2.0076384291297134e-05, |
|
"loss": 5.33, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.8118811881188119, |
|
"grad_norm": 14.23192024230957, |
|
"learning_rate": 1.972307240024307e-05, |
|
"loss": 3.0892, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.8136813681368137, |
|
"grad_norm": 4.406993865966797, |
|
"learning_rate": 1.9372131620855095e-05, |
|
"loss": 2.5208, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.8154815481548154, |
|
"grad_norm": 5.394495487213135, |
|
"learning_rate": 1.9023589436759954e-05, |
|
"loss": 2.8551, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.8172817281728173, |
|
"grad_norm": 3.0106537342071533, |
|
"learning_rate": 1.867747314374041e-05, |
|
"loss": 3.0363, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.819081908190819, |
|
"grad_norm": 2.969430685043335, |
|
"learning_rate": 1.8333809847597642e-05, |
|
"loss": 2.9957, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.8208820882088209, |
|
"grad_norm": 3.101269245147705, |
|
"learning_rate": 1.7992626462028513e-05, |
|
"loss": 3.0854, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.8226822682268227, |
|
"grad_norm": 3.125643730163574, |
|
"learning_rate": 1.765394970651777e-05, |
|
"loss": 3.2697, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.8244824482448245, |
|
"grad_norm": 3.0227460861206055, |
|
"learning_rate": 1.7317806104245597e-05, |
|
"loss": 3.4779, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.8262826282628263, |
|
"grad_norm": 3.203869342803955, |
|
"learning_rate": 1.6984221980010455e-05, |
|
"loss": 3.7277, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.828082808280828, |
|
"grad_norm": 3.483610153198242, |
|
"learning_rate": 1.665322345816746e-05, |
|
"loss": 3.8941, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.8298829882988299, |
|
"grad_norm": 3.1454741954803467, |
|
"learning_rate": 1.6324836460582498e-05, |
|
"loss": 3.883, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.8316831683168316, |
|
"grad_norm": 3.6109461784362793, |
|
"learning_rate": 1.599908670460216e-05, |
|
"loss": 3.2933, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.8334833483348335, |
|
"grad_norm": 3.4250762462615967, |
|
"learning_rate": 1.5675999701039735e-05, |
|
"loss": 3.628, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.8352835283528353, |
|
"grad_norm": 3.624478578567505, |
|
"learning_rate": 1.535560075217731e-05, |
|
"loss": 4.2137, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.8370837083708371, |
|
"grad_norm": 3.289508819580078, |
|
"learning_rate": 1.5037914949784299e-05, |
|
"loss": 3.5101, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.8388838883888389, |
|
"grad_norm": 3.1444578170776367, |
|
"learning_rate": 1.4722967173152353e-05, |
|
"loss": 4.0805, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.8406840684068407, |
|
"grad_norm": 2.974825143814087, |
|
"learning_rate": 1.441078208714698e-05, |
|
"loss": 3.5221, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.8424842484248425, |
|
"grad_norm": 3.4931812286376953, |
|
"learning_rate": 1.4101384140275947e-05, |
|
"loss": 3.6931, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.8442844284428442, |
|
"grad_norm": 3.261122703552246, |
|
"learning_rate": 1.3794797562774598e-05, |
|
"loss": 3.769, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.8460846084608461, |
|
"grad_norm": 3.2930214405059814, |
|
"learning_rate": 1.3491046364708293e-05, |
|
"loss": 3.6359, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8478847884788479, |
|
"grad_norm": 3.166865587234497, |
|
"learning_rate": 1.319015433409208e-05, |
|
"loss": 4.2264, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.8496849684968497, |
|
"grad_norm": 3.187593936920166, |
|
"learning_rate": 1.2892145035027758e-05, |
|
"loss": 4.2423, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.8514851485148515, |
|
"grad_norm": 2.961714506149292, |
|
"learning_rate": 1.2597041805858468e-05, |
|
"loss": 3.7844, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.8532853285328533, |
|
"grad_norm": 3.1579463481903076, |
|
"learning_rate": 1.2304867757340972e-05, |
|
"loss": 3.9704, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.8550855085508551, |
|
"grad_norm": 3.4631705284118652, |
|
"learning_rate": 1.2015645770835764e-05, |
|
"loss": 3.943, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.8568856885688569, |
|
"grad_norm": 2.9861361980438232, |
|
"learning_rate": 1.1729398496515126e-05, |
|
"loss": 3.6184, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.8586858685868587, |
|
"grad_norm": 3.6547658443450928, |
|
"learning_rate": 1.1446148351589297e-05, |
|
"loss": 4.8532, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.8604860486048604, |
|
"grad_norm": 3.7734615802764893, |
|
"learning_rate": 1.1165917518550911e-05, |
|
"loss": 4.3348, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.8622862286228623, |
|
"grad_norm": 3.309903621673584, |
|
"learning_rate": 1.0888727943437748e-05, |
|
"loss": 4.2671, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.8640864086408641, |
|
"grad_norm": 3.503133535385132, |
|
"learning_rate": 1.0614601334114099e-05, |
|
"loss": 4.4343, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8658865886588659, |
|
"grad_norm": 3.389491558074951, |
|
"learning_rate": 1.0343559158570704e-05, |
|
"loss": 4.7319, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.8676867686768677, |
|
"grad_norm": 3.3840572834014893, |
|
"learning_rate": 1.0075622643243499e-05, |
|
"loss": 4.5601, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.8694869486948695, |
|
"grad_norm": 3.25216007232666, |
|
"learning_rate": 9.810812771351335e-06, |
|
"loss": 4.2966, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.8712871287128713, |
|
"grad_norm": 3.635775089263916, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 4.7205, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.873087308730873, |
|
"grad_norm": 3.3356218338012695, |
|
"learning_rate": 9.290655664821296e-06, |
|
"loss": 4.9521, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.8748874887488749, |
|
"grad_norm": 3.6561996936798096, |
|
"learning_rate": 9.035349165841923e-06, |
|
"loss": 4.7275, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.8766876687668766, |
|
"grad_norm": 3.564394235610962, |
|
"learning_rate": 8.783250778424428e-06, |
|
"loss": 4.1435, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.8784878487848785, |
|
"grad_norm": 3.7181074619293213, |
|
"learning_rate": 8.534380245438211e-06, |
|
"loss": 5.1263, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.8802880288028803, |
|
"grad_norm": 3.6209585666656494, |
|
"learning_rate": 8.288757056966018e-06, |
|
"loss": 5.0345, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.8820882088208821, |
|
"grad_norm": 4.174393653869629, |
|
"learning_rate": 8.046400448777574e-06, |
|
"loss": 5.2958, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.8838883888388839, |
|
"grad_norm": 3.6721696853637695, |
|
"learning_rate": 7.807329400823177e-06, |
|
"loss": 5.1729, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.8856885688568857, |
|
"grad_norm": 3.6310222148895264, |
|
"learning_rate": 7.571562635747304e-06, |
|
"loss": 4.2984, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.8874887488748875, |
|
"grad_norm": 3.5798494815826416, |
|
"learning_rate": 7.3391186174223245e-06, |
|
"loss": 4.7273, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.8892889288928892, |
|
"grad_norm": 3.331014633178711, |
|
"learning_rate": 7.1100155495025464e-06, |
|
"loss": 5.1268, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.8910891089108911, |
|
"grad_norm": 3.6740384101867676, |
|
"learning_rate": 6.8842713739986075e-06, |
|
"loss": 5.3065, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.8928892889288929, |
|
"grad_norm": 3.8056387901306152, |
|
"learning_rate": 6.661903769872363e-06, |
|
"loss": 5.405, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.8946894689468947, |
|
"grad_norm": 3.7739763259887695, |
|
"learning_rate": 6.442930151652371e-06, |
|
"loss": 5.9443, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.8964896489648965, |
|
"grad_norm": 3.922481060028076, |
|
"learning_rate": 6.227367668070083e-06, |
|
"loss": 5.1865, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.8982898289828983, |
|
"grad_norm": 3.815195083618164, |
|
"learning_rate": 6.015233200716874e-06, |
|
"loss": 5.4361, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.9000900090009001, |
|
"grad_norm": 4.479306697845459, |
|
"learning_rate": 5.806543362721945e-06, |
|
"loss": 6.0537, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9018901890189019, |
|
"grad_norm": 25.004121780395508, |
|
"learning_rate": 5.601314497451316e-06, |
|
"loss": 3.6237, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.9036903690369037, |
|
"grad_norm": 3.7974557876586914, |
|
"learning_rate": 5.399562677227876e-06, |
|
"loss": 2.1349, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.9054905490549054, |
|
"grad_norm": 3.4542431831359863, |
|
"learning_rate": 5.201303702072724e-06, |
|
"loss": 2.8379, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.9072907290729073, |
|
"grad_norm": 3.14579176902771, |
|
"learning_rate": 5.006553098467764e-06, |
|
"loss": 2.9554, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.9090909090909091, |
|
"grad_norm": 2.649793863296509, |
|
"learning_rate": 4.8153261181398125e-06, |
|
"loss": 2.2224, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.9108910891089109, |
|
"grad_norm": 3.07043719291687, |
|
"learning_rate": 4.627637736866119e-06, |
|
"loss": 3.5249, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.9126912691269127, |
|
"grad_norm": 3.646077871322632, |
|
"learning_rate": 4.443502653301601e-06, |
|
"loss": 3.5183, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.9144914491449145, |
|
"grad_norm": 3.1195287704467773, |
|
"learning_rate": 4.262935287827696e-06, |
|
"loss": 3.507, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.9162916291629163, |
|
"grad_norm": 3.2357397079467773, |
|
"learning_rate": 4.085949781423048e-06, |
|
"loss": 3.3765, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.918091809180918, |
|
"grad_norm": 3.1325178146362305, |
|
"learning_rate": 3.912559994556086e-06, |
|
"loss": 3.5724, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9198919891989199, |
|
"grad_norm": 3.3491556644439697, |
|
"learning_rate": 3.7427795060995306e-06, |
|
"loss": 3.8159, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.9216921692169217, |
|
"grad_norm": 3.5712788105010986, |
|
"learning_rate": 3.5766216122670003e-06, |
|
"loss": 3.6621, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.9234923492349235, |
|
"grad_norm": 3.1630241870880127, |
|
"learning_rate": 3.4140993255717126e-06, |
|
"loss": 3.6589, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.9252925292529253, |
|
"grad_norm": 3.174769401550293, |
|
"learning_rate": 3.2552253738074077e-06, |
|
"loss": 3.8955, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.9270927092709271, |
|
"grad_norm": 3.325157880783081, |
|
"learning_rate": 3.1000121990516274e-06, |
|
"loss": 3.8899, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.9288928892889289, |
|
"grad_norm": 3.770094394683838, |
|
"learning_rate": 2.9484719566912735e-06, |
|
"loss": 4.0021, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.9306930693069307, |
|
"grad_norm": 3.7571139335632324, |
|
"learning_rate": 2.800616514470694e-06, |
|
"loss": 4.1601, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.9324932493249325, |
|
"grad_norm": 3.390287160873413, |
|
"learning_rate": 2.656457451562283e-06, |
|
"loss": 4.0102, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.9342934293429342, |
|
"grad_norm": 3.436528444290161, |
|
"learning_rate": 2.5160060576596366e-06, |
|
"loss": 4.2173, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.9360936093609361, |
|
"grad_norm": 2.9943151473999023, |
|
"learning_rate": 2.3792733320934346e-06, |
|
"loss": 3.7667, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9378937893789379, |
|
"grad_norm": 3.1079506874084473, |
|
"learning_rate": 2.2462699829700286e-06, |
|
"loss": 3.9543, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.9396939693969397, |
|
"grad_norm": 3.511355400085449, |
|
"learning_rate": 2.1170064263328514e-06, |
|
"loss": 4.3681, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.9414941494149415, |
|
"grad_norm": 3.2926292419433594, |
|
"learning_rate": 1.991492785346677e-06, |
|
"loss": 4.2362, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.9432943294329433, |
|
"grad_norm": 3.607163667678833, |
|
"learning_rate": 1.8697388895048518e-06, |
|
"loss": 4.1022, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.9450945094509451, |
|
"grad_norm": 3.624814510345459, |
|
"learning_rate": 1.7517542738595071e-06, |
|
"loss": 3.7475, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.946894689468947, |
|
"grad_norm": 3.673523426055908, |
|
"learning_rate": 1.637548178274806e-06, |
|
"loss": 3.8728, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.9486948694869487, |
|
"grad_norm": 3.480508804321289, |
|
"learning_rate": 1.5271295467033653e-06, |
|
"loss": 4.1873, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.9504950495049505, |
|
"grad_norm": 3.1145169734954834, |
|
"learning_rate": 1.42050702648579e-06, |
|
"loss": 4.2106, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.9522952295229523, |
|
"grad_norm": 3.012479782104492, |
|
"learning_rate": 1.3176889676734693e-06, |
|
"loss": 4.4033, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.9540954095409541, |
|
"grad_norm": 3.321237564086914, |
|
"learning_rate": 1.2186834223746612e-06, |
|
"loss": 3.8453, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9558955895589559, |
|
"grad_norm": 3.554750680923462, |
|
"learning_rate": 1.1234981441239022e-06, |
|
"loss": 4.2771, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.9576957695769577, |
|
"grad_norm": 3.595700979232788, |
|
"learning_rate": 1.0321405872747603e-06, |
|
"loss": 4.7381, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.9594959495949595, |
|
"grad_norm": 3.4314756393432617, |
|
"learning_rate": 9.446179064161009e-07, |
|
"loss": 4.7025, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.9612961296129613, |
|
"grad_norm": 3.4042766094207764, |
|
"learning_rate": 8.609369558117531e-07, |
|
"loss": 4.5694, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.963096309630963, |
|
"grad_norm": 3.677279472351074, |
|
"learning_rate": 7.81104288863721e-07, |
|
"loss": 4.4108, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.9648964896489649, |
|
"grad_norm": 4.0047712326049805, |
|
"learning_rate": 7.051261575989787e-07, |
|
"loss": 5.2683, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.9666966696669667, |
|
"grad_norm": 3.8668036460876465, |
|
"learning_rate": 6.330085121798279e-07, |
|
"loss": 5.0821, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.9684968496849685, |
|
"grad_norm": 3.524517297744751, |
|
"learning_rate": 5.647570004379432e-07, |
|
"loss": 4.7731, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.9702970297029703, |
|
"grad_norm": 4.2224955558776855, |
|
"learning_rate": 5.003769674320369e-07, |
|
"loss": 5.4911, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.9720972097209721, |
|
"grad_norm": 3.456015110015869, |
|
"learning_rate": 4.3987345502927157e-07, |
|
"loss": 4.9279, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9738973897389739, |
|
"grad_norm": 4.04163122177124, |
|
"learning_rate": 3.832512015104317e-07, |
|
"loss": 4.8574, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.9756975697569757, |
|
"grad_norm": 3.488652467727661, |
|
"learning_rate": 3.3051464119883136e-07, |
|
"loss": 4.5105, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.9774977497749775, |
|
"grad_norm": 3.6072845458984375, |
|
"learning_rate": 2.816679041130477e-07, |
|
"loss": 5.0124, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.9792979297929792, |
|
"grad_norm": 3.613863229751587, |
|
"learning_rate": 2.3671481564347397e-07, |
|
"loss": 4.7951, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.9810981098109811, |
|
"grad_norm": 3.676605701446533, |
|
"learning_rate": 1.9565889625275946e-07, |
|
"loss": 5.7295, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.9828982898289829, |
|
"grad_norm": 3.8148982524871826, |
|
"learning_rate": 1.5850336120007414e-07, |
|
"loss": 5.0033, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.9846984698469847, |
|
"grad_norm": 3.664438486099243, |
|
"learning_rate": 1.252511202893436e-07, |
|
"loss": 4.9703, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.9864986498649865, |
|
"grad_norm": 3.756563663482666, |
|
"learning_rate": 9.590477764135353e-08, |
|
"loss": 5.3585, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.9882988298829883, |
|
"grad_norm": 3.9945781230926514, |
|
"learning_rate": 7.046663148979616e-08, |
|
"loss": 5.4977, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.9900990099009901, |
|
"grad_norm": 4.365446090698242, |
|
"learning_rate": 4.89386740013198e-08, |
|
"loss": 5.6052, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.991899189918992, |
|
"grad_norm": 3.4431326389312744, |
|
"learning_rate": 3.1322591119481436e-08, |
|
"loss": 2.9021, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.9936993699369937, |
|
"grad_norm": 3.6631813049316406, |
|
"learning_rate": 1.7619762432730113e-08, |
|
"loss": 4.0126, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.9954995499549955, |
|
"grad_norm": 3.126596212387085, |
|
"learning_rate": 7.831261066371109e-09, |
|
"loss": 3.7145, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.9972997299729973, |
|
"grad_norm": 3.9898624420166016, |
|
"learning_rate": 1.9578535984998705e-09, |
|
"loss": 4.723, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.9990999099909991, |
|
"grad_norm": 3.7214019298553467, |
|
"learning_rate": 0.0, |
|
"loss": 5.0888, |
|
"step": 555 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 555, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 239, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5203413975262822e+18, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|