|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.29428534642903126, |
|
"eval_steps": 500, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0007357133660725781, |
|
"grad_norm": 223.60693359375, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 20.4521, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0014714267321451562, |
|
"grad_norm": 115.68708038330078, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 17.0724, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0022071400982177345, |
|
"grad_norm": 9.73855972290039, |
|
"learning_rate": 3e-06, |
|
"loss": 11.1765, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0029428534642903124, |
|
"grad_norm": 4.239206314086914, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 9.9334, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0036785668303628907, |
|
"grad_norm": 5.939801216125488, |
|
"learning_rate": 5e-06, |
|
"loss": 9.6538, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004414280196435469, |
|
"grad_norm": 7.083678245544434, |
|
"learning_rate": 6e-06, |
|
"loss": 9.4877, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0051499935625080465, |
|
"grad_norm": 4.155125617980957, |
|
"learning_rate": 7.000000000000001e-06, |
|
"loss": 9.2998, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.005885706928580625, |
|
"grad_norm": 8.080842018127441, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 9.1653, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006621420294653203, |
|
"grad_norm": 5.2444353103637695, |
|
"learning_rate": 9e-06, |
|
"loss": 9.0503, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0073571336607257815, |
|
"grad_norm": 5.591372966766357, |
|
"learning_rate": 1e-05, |
|
"loss": 8.9182, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008092847026798359, |
|
"grad_norm": 6.641251564025879, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 8.7308, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.008828560392870938, |
|
"grad_norm": 8.2617769241333, |
|
"learning_rate": 1.2e-05, |
|
"loss": 8.4667, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.009564273758943516, |
|
"grad_norm": 8.181135177612305, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 8.1982, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.010299987125016093, |
|
"grad_norm": 5.930114269256592, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 7.8692, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.011035700491088672, |
|
"grad_norm": 4.546728610992432, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.5773, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01177141385716125, |
|
"grad_norm": 4.127258777618408, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 7.3816, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.012507127223233829, |
|
"grad_norm": 4.3951849937438965, |
|
"learning_rate": 1.7000000000000003e-05, |
|
"loss": 7.2603, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.013242840589306406, |
|
"grad_norm": 2.085538625717163, |
|
"learning_rate": 1.8e-05, |
|
"loss": 7.16, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.013978553955378984, |
|
"grad_norm": 3.098052501678467, |
|
"learning_rate": 1.9e-05, |
|
"loss": 7.0236, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.014714267321451563, |
|
"grad_norm": 4.054460525512695, |
|
"learning_rate": 2e-05, |
|
"loss": 7.0169, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.01544998068752414, |
|
"grad_norm": 1.6470402479171753, |
|
"learning_rate": 2.1e-05, |
|
"loss": 6.9933, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.016185694053596718, |
|
"grad_norm": 1.3456400632858276, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 6.9151, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.016921407419669295, |
|
"grad_norm": 1.3457653522491455, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 6.9246, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.017657120785741876, |
|
"grad_norm": 2.5111804008483887, |
|
"learning_rate": 2.4e-05, |
|
"loss": 6.8615, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.018392834151814454, |
|
"grad_norm": 2.082517147064209, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.8109, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.01912854751788703, |
|
"grad_norm": 2.9603984355926514, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 6.7036, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.01986426088395961, |
|
"grad_norm": 1.7493945360183716, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.6403, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.020599974250032186, |
|
"grad_norm": 3.83150315284729, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 6.6216, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.021335687616104767, |
|
"grad_norm": 2.9999842643737793, |
|
"learning_rate": 2.9e-05, |
|
"loss": 6.6013, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.022071400982177344, |
|
"grad_norm": 2.799868583679199, |
|
"learning_rate": 3e-05, |
|
"loss": 6.4751, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.022807114348249922, |
|
"grad_norm": 3.548393487930298, |
|
"learning_rate": 3.1e-05, |
|
"loss": 6.5082, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.0235428277143225, |
|
"grad_norm": 3.523336410522461, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 6.4566, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.024278541080395077, |
|
"grad_norm": 2.0755739212036133, |
|
"learning_rate": 3.3e-05, |
|
"loss": 6.3776, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.025014254446467658, |
|
"grad_norm": 3.288764238357544, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 6.3559, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.025749967812540235, |
|
"grad_norm": 3.3932132720947266, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.3234, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.026485681178612813, |
|
"grad_norm": 2.7775251865386963, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.2551, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.02722139454468539, |
|
"grad_norm": 2.6773834228515625, |
|
"learning_rate": 3.7e-05, |
|
"loss": 6.206, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.027957107910757967, |
|
"grad_norm": 2.556666612625122, |
|
"learning_rate": 3.8e-05, |
|
"loss": 6.1396, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.02869282127683055, |
|
"grad_norm": 3.2724132537841797, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.106, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.029428534642903126, |
|
"grad_norm": 2.354327440261841, |
|
"learning_rate": 4e-05, |
|
"loss": 6.1258, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.030164248008975703, |
|
"grad_norm": 1.804317593574524, |
|
"learning_rate": 4.1e-05, |
|
"loss": 6.0516, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.03089996137504828, |
|
"grad_norm": 1.7134747505187988, |
|
"learning_rate": 4.2e-05, |
|
"loss": 5.9582, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.03163567474112086, |
|
"grad_norm": 1.4939428567886353, |
|
"learning_rate": 4.3e-05, |
|
"loss": 5.9328, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.032371388107193436, |
|
"grad_norm": 1.9662060737609863, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 5.8578, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.03310710147326602, |
|
"grad_norm": 1.748353123664856, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.7874, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.03384281483933859, |
|
"grad_norm": 2.1945955753326416, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 5.7787, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.03457852820541117, |
|
"grad_norm": 2.1684770584106445, |
|
"learning_rate": 4.7e-05, |
|
"loss": 5.7138, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.03531424157148375, |
|
"grad_norm": 1.728945255279541, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.6774, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.036049954937556326, |
|
"grad_norm": 1.8206098079681396, |
|
"learning_rate": 4.9e-05, |
|
"loss": 5.6214, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.03678566830362891, |
|
"grad_norm": 1.5937659740447998, |
|
"learning_rate": 5e-05, |
|
"loss": 5.5198, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.03752138166970148, |
|
"grad_norm": 1.617659568786621, |
|
"learning_rate": 5.1000000000000006e-05, |
|
"loss": 5.4818, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.03825709503577406, |
|
"grad_norm": 1.7942918539047241, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.5156, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.03899280840184664, |
|
"grad_norm": 1.830945611000061, |
|
"learning_rate": 5.300000000000001e-05, |
|
"loss": 5.3836, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.03972852176791922, |
|
"grad_norm": 1.5295898914337158, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 5.3989, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.0404642351339918, |
|
"grad_norm": 1.527603268623352, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.3724, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.04119994850006437, |
|
"grad_norm": 1.6225807666778564, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 5.2788, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.04193566186613695, |
|
"grad_norm": 2.0076022148132324, |
|
"learning_rate": 5.6999999999999996e-05, |
|
"loss": 5.2227, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.042671375232209534, |
|
"grad_norm": 1.5891458988189697, |
|
"learning_rate": 5.8e-05, |
|
"loss": 5.2265, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.04340708859828211, |
|
"grad_norm": 1.5814419984817505, |
|
"learning_rate": 5.9e-05, |
|
"loss": 5.1948, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.04414280196435469, |
|
"grad_norm": 1.7139537334442139, |
|
"learning_rate": 6e-05, |
|
"loss": 5.1036, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.04487851533042726, |
|
"grad_norm": 1.2224972248077393, |
|
"learning_rate": 6.1e-05, |
|
"loss": 5.1932, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.045614228696499844, |
|
"grad_norm": 1.6268278360366821, |
|
"learning_rate": 6.2e-05, |
|
"loss": 5.068, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.046349942062572425, |
|
"grad_norm": 1.4944841861724854, |
|
"learning_rate": 6.3e-05, |
|
"loss": 5.032, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.047085655428645, |
|
"grad_norm": 1.442824363708496, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 5.0077, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.04782136879471758, |
|
"grad_norm": 1.6031944751739502, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.9808, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.04855708216079015, |
|
"grad_norm": 1.6006144285202026, |
|
"learning_rate": 6.6e-05, |
|
"loss": 4.9239, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.049292795526862734, |
|
"grad_norm": 1.3961182832717896, |
|
"learning_rate": 6.7e-05, |
|
"loss": 4.9158, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.050028508892935315, |
|
"grad_norm": 1.4314249753952026, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 4.8126, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.05076422225900789, |
|
"grad_norm": 1.4509848356246948, |
|
"learning_rate": 6.9e-05, |
|
"loss": 4.8713, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.05149993562508047, |
|
"grad_norm": 1.5618196725845337, |
|
"learning_rate": 7e-05, |
|
"loss": 4.7814, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.052235648991153044, |
|
"grad_norm": 1.5423110723495483, |
|
"learning_rate": 7.1e-05, |
|
"loss": 4.7409, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.052971362357225625, |
|
"grad_norm": 1.2559527158737183, |
|
"learning_rate": 7.2e-05, |
|
"loss": 4.73, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.053707075723298206, |
|
"grad_norm": 1.1119533777236938, |
|
"learning_rate": 7.3e-05, |
|
"loss": 4.7055, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.05444278908937078, |
|
"grad_norm": 1.2400840520858765, |
|
"learning_rate": 7.4e-05, |
|
"loss": 4.6501, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.05517850245544336, |
|
"grad_norm": 1.2191784381866455, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.6239, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.055914215821515935, |
|
"grad_norm": 1.269572377204895, |
|
"learning_rate": 7.6e-05, |
|
"loss": 4.6498, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.056649929187588516, |
|
"grad_norm": 1.275608777999878, |
|
"learning_rate": 7.7e-05, |
|
"loss": 4.6326, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.0573856425536611, |
|
"grad_norm": 1.3013856410980225, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 4.5869, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.05812135591973367, |
|
"grad_norm": 1.37381112575531, |
|
"learning_rate": 7.900000000000001e-05, |
|
"loss": 4.5785, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.05885706928580625, |
|
"grad_norm": 1.2166635990142822, |
|
"learning_rate": 8e-05, |
|
"loss": 4.5126, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.059592782651878826, |
|
"grad_norm": 1.4644134044647217, |
|
"learning_rate": 8.1e-05, |
|
"loss": 4.5228, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.06032849601795141, |
|
"grad_norm": 1.206525206565857, |
|
"learning_rate": 8.2e-05, |
|
"loss": 4.4874, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.06106420938402399, |
|
"grad_norm": 1.1188849210739136, |
|
"learning_rate": 8.3e-05, |
|
"loss": 4.4619, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.06179992275009656, |
|
"grad_norm": 1.2260874509811401, |
|
"learning_rate": 8.4e-05, |
|
"loss": 4.4118, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.06253563611616914, |
|
"grad_norm": 1.19864022731781, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.3522, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.06327134948224172, |
|
"grad_norm": 1.1710054874420166, |
|
"learning_rate": 8.6e-05, |
|
"loss": 4.4048, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.0640070628483143, |
|
"grad_norm": 0.8916666507720947, |
|
"learning_rate": 8.7e-05, |
|
"loss": 4.3865, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.06474277621438687, |
|
"grad_norm": 1.135651707649231, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 4.3554, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.06547848958045946, |
|
"grad_norm": 0.9037818312644958, |
|
"learning_rate": 8.900000000000001e-05, |
|
"loss": 4.4055, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.06621420294653203, |
|
"grad_norm": 1.1230660676956177, |
|
"learning_rate": 9e-05, |
|
"loss": 4.3131, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.06694991631260461, |
|
"grad_norm": 1.149318814277649, |
|
"learning_rate": 9.1e-05, |
|
"loss": 4.2923, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.06768562967867718, |
|
"grad_norm": 1.1154922246932983, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 4.2774, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.06842134304474977, |
|
"grad_norm": 1.0399177074432373, |
|
"learning_rate": 9.300000000000001e-05, |
|
"loss": 4.2766, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.06915705641082234, |
|
"grad_norm": 1.0165122747421265, |
|
"learning_rate": 9.4e-05, |
|
"loss": 4.2479, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.06989276977689492, |
|
"grad_norm": 1.3305314779281616, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.2443, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.0706284831429675, |
|
"grad_norm": 0.8728047609329224, |
|
"learning_rate": 9.6e-05, |
|
"loss": 4.3267, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.07136419650904008, |
|
"grad_norm": 1.117966890335083, |
|
"learning_rate": 9.7e-05, |
|
"loss": 4.2435, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.07209990987511265, |
|
"grad_norm": 0.934165894985199, |
|
"learning_rate": 9.8e-05, |
|
"loss": 4.2125, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.07283562324118524, |
|
"grad_norm": 1.0023953914642334, |
|
"learning_rate": 9.900000000000001e-05, |
|
"loss": 4.1705, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.07357133660725781, |
|
"grad_norm": 0.9707255959510803, |
|
"learning_rate": 0.0001, |
|
"loss": 4.202, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.07430704997333039, |
|
"grad_norm": 0.988510012626648, |
|
"learning_rate": 9.999998440456413e-05, |
|
"loss": 4.1781, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.07504276333940296, |
|
"grad_norm": 1.0348843336105347, |
|
"learning_rate": 9.99999376182662e-05, |
|
"loss": 4.1589, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.07577847670547555, |
|
"grad_norm": 1.0255194902420044, |
|
"learning_rate": 9.999985964113542e-05, |
|
"loss": 4.1653, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.07651419007154812, |
|
"grad_norm": 0.831902801990509, |
|
"learning_rate": 9.999975047322044e-05, |
|
"loss": 4.1595, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.0772499034376207, |
|
"grad_norm": 0.964509904384613, |
|
"learning_rate": 9.999961011458933e-05, |
|
"loss": 4.1414, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.07798561680369329, |
|
"grad_norm": 0.796290397644043, |
|
"learning_rate": 9.99994385653297e-05, |
|
"loss": 4.0971, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.07872133016976586, |
|
"grad_norm": 0.8830526471138, |
|
"learning_rate": 9.99992358255485e-05, |
|
"loss": 4.1182, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.07945704353583843, |
|
"grad_norm": 1.0227144956588745, |
|
"learning_rate": 9.999900189537226e-05, |
|
"loss": 4.0917, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.08019275690191102, |
|
"grad_norm": 1.0185173749923706, |
|
"learning_rate": 9.999873677494689e-05, |
|
"loss": 4.1066, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.0809284702679836, |
|
"grad_norm": 0.8834981322288513, |
|
"learning_rate": 9.999844046443776e-05, |
|
"loss": 4.0413, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.08166418363405617, |
|
"grad_norm": 0.9037182331085205, |
|
"learning_rate": 9.999811296402975e-05, |
|
"loss": 4.1012, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.08239989700012874, |
|
"grad_norm": 0.9970824122428894, |
|
"learning_rate": 9.99977542739271e-05, |
|
"loss": 4.0548, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.08313561036620133, |
|
"grad_norm": 0.9786319136619568, |
|
"learning_rate": 9.999736439435364e-05, |
|
"loss": 4.0613, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.0838713237322739, |
|
"grad_norm": 0.8991515636444092, |
|
"learning_rate": 9.999694332555253e-05, |
|
"loss": 4.07, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.08460703709834648, |
|
"grad_norm": 0.8191851377487183, |
|
"learning_rate": 9.999649106778646e-05, |
|
"loss": 4.0169, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.08534275046441907, |
|
"grad_norm": 0.7964187860488892, |
|
"learning_rate": 9.999600762133756e-05, |
|
"loss": 4.0185, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.08607846383049164, |
|
"grad_norm": 0.8330606818199158, |
|
"learning_rate": 9.99954929865074e-05, |
|
"loss": 4.0695, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.08681417719656422, |
|
"grad_norm": 0.8456650376319885, |
|
"learning_rate": 9.999494716361703e-05, |
|
"loss": 3.971, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.0875498905626368, |
|
"grad_norm": 0.7506271600723267, |
|
"learning_rate": 9.999437015300694e-05, |
|
"loss": 4.0053, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.08828560392870938, |
|
"grad_norm": 0.7525317668914795, |
|
"learning_rate": 9.999376195503709e-05, |
|
"loss": 3.9699, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.08902131729478195, |
|
"grad_norm": 0.7871759533882141, |
|
"learning_rate": 9.999312257008685e-05, |
|
"loss": 3.9137, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.08975703066085453, |
|
"grad_norm": 0.8398749828338623, |
|
"learning_rate": 9.999245199855512e-05, |
|
"loss": 3.9667, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.09049274402692711, |
|
"grad_norm": 0.7666105031967163, |
|
"learning_rate": 9.99917502408602e-05, |
|
"loss": 3.9273, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.09122845739299969, |
|
"grad_norm": 0.8470506072044373, |
|
"learning_rate": 9.999101729743985e-05, |
|
"loss": 3.9694, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.09196417075907226, |
|
"grad_norm": 0.7060391306877136, |
|
"learning_rate": 9.999025316875129e-05, |
|
"loss": 3.9618, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.09269988412514485, |
|
"grad_norm": 0.7745850682258606, |
|
"learning_rate": 9.99894578552712e-05, |
|
"loss": 3.9252, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.09343559749121742, |
|
"grad_norm": 0.8749181032180786, |
|
"learning_rate": 9.998863135749575e-05, |
|
"loss": 3.9104, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.09417131085729, |
|
"grad_norm": 0.906620979309082, |
|
"learning_rate": 9.998777367594046e-05, |
|
"loss": 3.9174, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.09490702422336257, |
|
"grad_norm": 0.7657135725021362, |
|
"learning_rate": 9.998688481114039e-05, |
|
"loss": 3.9238, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.09564273758943516, |
|
"grad_norm": 0.6726455688476562, |
|
"learning_rate": 9.998596476365006e-05, |
|
"loss": 3.8952, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.09637845095550773, |
|
"grad_norm": 0.7561966776847839, |
|
"learning_rate": 9.998501353404336e-05, |
|
"loss": 3.9123, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.0971141643215803, |
|
"grad_norm": 0.8008501529693604, |
|
"learning_rate": 9.998403112291372e-05, |
|
"loss": 3.8617, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.0978498776876529, |
|
"grad_norm": 0.7718631029129028, |
|
"learning_rate": 9.998301753087398e-05, |
|
"loss": 3.8859, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.09858559105372547, |
|
"grad_norm": 0.8178690671920776, |
|
"learning_rate": 9.998197275855644e-05, |
|
"loss": 3.8473, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.09932130441979804, |
|
"grad_norm": 0.7664726376533508, |
|
"learning_rate": 9.998089680661282e-05, |
|
"loss": 3.8305, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.10005701778587063, |
|
"grad_norm": 0.8279010653495789, |
|
"learning_rate": 9.997978967571436e-05, |
|
"loss": 3.8802, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.1007927311519432, |
|
"grad_norm": 0.7610680460929871, |
|
"learning_rate": 9.997865136655166e-05, |
|
"loss": 3.8869, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.10152844451801578, |
|
"grad_norm": 0.7211320400238037, |
|
"learning_rate": 9.997748187983487e-05, |
|
"loss": 3.9122, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.10226415788408835, |
|
"grad_norm": 0.7668148279190063, |
|
"learning_rate": 9.997628121629349e-05, |
|
"loss": 3.8238, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.10299987125016094, |
|
"grad_norm": 0.6940723061561584, |
|
"learning_rate": 9.997504937667654e-05, |
|
"loss": 3.8464, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.10373558461623351, |
|
"grad_norm": 0.681667149066925, |
|
"learning_rate": 9.997378636175245e-05, |
|
"loss": 3.8182, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.10447129798230609, |
|
"grad_norm": 0.6430841684341431, |
|
"learning_rate": 9.99724921723091e-05, |
|
"loss": 3.8379, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.10520701134837868, |
|
"grad_norm": 0.6482489705085754, |
|
"learning_rate": 9.997116680915389e-05, |
|
"loss": 3.8117, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.10594272471445125, |
|
"grad_norm": 0.6649356484413147, |
|
"learning_rate": 9.996981027311352e-05, |
|
"loss": 3.8452, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.10667843808052382, |
|
"grad_norm": 0.757146954536438, |
|
"learning_rate": 9.996842256503428e-05, |
|
"loss": 3.7886, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.10741415144659641, |
|
"grad_norm": 0.6373863816261292, |
|
"learning_rate": 9.99670036857818e-05, |
|
"loss": 3.7975, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.10814986481266899, |
|
"grad_norm": 0.6672409176826477, |
|
"learning_rate": 9.996555363624124e-05, |
|
"loss": 3.843, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.10888557817874156, |
|
"grad_norm": 0.6595909595489502, |
|
"learning_rate": 9.996407241731717e-05, |
|
"loss": 3.8397, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.10962129154481413, |
|
"grad_norm": 0.725779116153717, |
|
"learning_rate": 9.996256002993358e-05, |
|
"loss": 3.8264, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.11035700491088672, |
|
"grad_norm": 0.6590442061424255, |
|
"learning_rate": 9.99610164750339e-05, |
|
"loss": 3.8375, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1110927182769593, |
|
"grad_norm": 0.707144558429718, |
|
"learning_rate": 9.99594417535811e-05, |
|
"loss": 3.7805, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.11182843164303187, |
|
"grad_norm": 0.7135934233665466, |
|
"learning_rate": 9.995783586655745e-05, |
|
"loss": 3.7696, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.11256414500910446, |
|
"grad_norm": 0.7143461108207703, |
|
"learning_rate": 9.995619881496474e-05, |
|
"loss": 3.7558, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.11329985837517703, |
|
"grad_norm": 0.6768718957901001, |
|
"learning_rate": 9.995453059982422e-05, |
|
"loss": 3.8221, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.1140355717412496, |
|
"grad_norm": 0.6788365244865417, |
|
"learning_rate": 9.995283122217653e-05, |
|
"loss": 3.7792, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.1147712851073222, |
|
"grad_norm": 0.6880599856376648, |
|
"learning_rate": 9.995110068308179e-05, |
|
"loss": 3.7191, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.11550699847339477, |
|
"grad_norm": 0.6511822938919067, |
|
"learning_rate": 9.994933898361951e-05, |
|
"loss": 3.7585, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.11624271183946734, |
|
"grad_norm": 0.6109036803245544, |
|
"learning_rate": 9.99475461248887e-05, |
|
"loss": 3.7555, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.11697842520553992, |
|
"grad_norm": 0.6387907862663269, |
|
"learning_rate": 9.994572210800777e-05, |
|
"loss": 3.7283, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.1177141385716125, |
|
"grad_norm": 0.6535074710845947, |
|
"learning_rate": 9.994386693411455e-05, |
|
"loss": 3.7781, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.11844985193768508, |
|
"grad_norm": 0.6718204617500305, |
|
"learning_rate": 9.994198060436637e-05, |
|
"loss": 3.7622, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.11918556530375765, |
|
"grad_norm": 0.75942063331604, |
|
"learning_rate": 9.994006311993991e-05, |
|
"loss": 3.6595, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.11992127866983024, |
|
"grad_norm": 0.6169456243515015, |
|
"learning_rate": 9.993811448203136e-05, |
|
"loss": 3.6921, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.12065699203590281, |
|
"grad_norm": 0.6598173379898071, |
|
"learning_rate": 9.993613469185631e-05, |
|
"loss": 3.6957, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.12139270540197539, |
|
"grad_norm": 0.6640283465385437, |
|
"learning_rate": 9.993412375064979e-05, |
|
"loss": 3.7507, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.12212841876804797, |
|
"grad_norm": 0.720782458782196, |
|
"learning_rate": 9.993208165966623e-05, |
|
"loss": 3.741, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.12286413213412055, |
|
"grad_norm": 0.6343806385993958, |
|
"learning_rate": 9.993000842017957e-05, |
|
"loss": 3.7112, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.12359984550019312, |
|
"grad_norm": 0.72925865650177, |
|
"learning_rate": 9.99279040334831e-05, |
|
"loss": 3.7455, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.1243355588662657, |
|
"grad_norm": 0.7056775093078613, |
|
"learning_rate": 9.99257685008896e-05, |
|
"loss": 3.7007, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.12507127223233827, |
|
"grad_norm": 0.6219266057014465, |
|
"learning_rate": 9.992360182373122e-05, |
|
"loss": 3.6704, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.12580698559841086, |
|
"grad_norm": 0.8017805814743042, |
|
"learning_rate": 9.992140400335958e-05, |
|
"loss": 3.7098, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.12654269896448345, |
|
"grad_norm": 0.6517687439918518, |
|
"learning_rate": 9.991917504114574e-05, |
|
"loss": 3.7145, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.127278412330556, |
|
"grad_norm": 0.6560338735580444, |
|
"learning_rate": 9.991691493848015e-05, |
|
"loss": 3.7046, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.1280141256966286, |
|
"grad_norm": 0.7055456638336182, |
|
"learning_rate": 9.991462369677267e-05, |
|
"loss": 3.6855, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.12874983906270118, |
|
"grad_norm": 0.6441034078598022, |
|
"learning_rate": 9.991230131745268e-05, |
|
"loss": 3.7338, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.12948555242877374, |
|
"grad_norm": 0.6746709942817688, |
|
"learning_rate": 9.990994780196889e-05, |
|
"loss": 3.7237, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.13022126579484633, |
|
"grad_norm": 0.6609126925468445, |
|
"learning_rate": 9.990756315178945e-05, |
|
"loss": 3.7393, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.13095697916091892, |
|
"grad_norm": 0.648073673248291, |
|
"learning_rate": 9.990514736840197e-05, |
|
"loss": 3.7057, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.13169269252699148, |
|
"grad_norm": 0.6735105514526367, |
|
"learning_rate": 9.990270045331344e-05, |
|
"loss": 3.7574, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.13242840589306407, |
|
"grad_norm": 0.6125868558883667, |
|
"learning_rate": 9.99002224080503e-05, |
|
"loss": 3.6548, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.13316411925913665, |
|
"grad_norm": 0.5899696946144104, |
|
"learning_rate": 9.98977132341584e-05, |
|
"loss": 3.624, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.13389983262520921, |
|
"grad_norm": 0.6936771273612976, |
|
"learning_rate": 9.989517293320299e-05, |
|
"loss": 3.7022, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.1346355459912818, |
|
"grad_norm": 0.6176466345787048, |
|
"learning_rate": 9.989260150676876e-05, |
|
"loss": 3.697, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.13537125935735436, |
|
"grad_norm": 0.6583747863769531, |
|
"learning_rate": 9.988999895645981e-05, |
|
"loss": 3.7048, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.13610697272342695, |
|
"grad_norm": 0.6942030191421509, |
|
"learning_rate": 9.988736528389969e-05, |
|
"loss": 3.6784, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.13684268608949954, |
|
"grad_norm": 0.6578043103218079, |
|
"learning_rate": 9.988470049073126e-05, |
|
"loss": 3.6411, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.1375783994555721, |
|
"grad_norm": 0.648202657699585, |
|
"learning_rate": 9.988200457861694e-05, |
|
"loss": 3.6472, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.13831411282164469, |
|
"grad_norm": 0.6567854285240173, |
|
"learning_rate": 9.987927754923843e-05, |
|
"loss": 3.6734, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.13904982618771727, |
|
"grad_norm": 0.6515029668807983, |
|
"learning_rate": 9.987651940429695e-05, |
|
"loss": 3.6463, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.13978553955378983, |
|
"grad_norm": 0.597316324710846, |
|
"learning_rate": 9.987373014551302e-05, |
|
"loss": 3.5951, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.14052125291986242, |
|
"grad_norm": 0.6288499236106873, |
|
"learning_rate": 9.987090977462668e-05, |
|
"loss": 3.675, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.141256966285935, |
|
"grad_norm": 0.6451112627983093, |
|
"learning_rate": 9.986805829339729e-05, |
|
"loss": 3.6552, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.14199267965200757, |
|
"grad_norm": 0.6517444252967834, |
|
"learning_rate": 9.986517570360368e-05, |
|
"loss": 3.6206, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.14272839301808016, |
|
"grad_norm": 0.6413518786430359, |
|
"learning_rate": 9.986226200704404e-05, |
|
"loss": 3.6848, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.14346410638415275, |
|
"grad_norm": 0.6431183815002441, |
|
"learning_rate": 9.9859317205536e-05, |
|
"loss": 3.6339, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.1441998197502253, |
|
"grad_norm": 0.7160407900810242, |
|
"learning_rate": 9.985634130091657e-05, |
|
"loss": 3.6306, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.1449355331162979, |
|
"grad_norm": 0.602085292339325, |
|
"learning_rate": 9.985333429504216e-05, |
|
"loss": 3.6556, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.14567124648237048, |
|
"grad_norm": 0.6392213702201843, |
|
"learning_rate": 9.985029618978863e-05, |
|
"loss": 3.6195, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.14640695984844304, |
|
"grad_norm": 0.5774185657501221, |
|
"learning_rate": 9.984722698705115e-05, |
|
"loss": 3.6463, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.14714267321451563, |
|
"grad_norm": 0.6097477674484253, |
|
"learning_rate": 9.984412668874442e-05, |
|
"loss": 3.6759, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1478783865805882, |
|
"grad_norm": 0.5511993765830994, |
|
"learning_rate": 9.984099529680237e-05, |
|
"loss": 3.6254, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.14861409994666078, |
|
"grad_norm": 0.6293554306030273, |
|
"learning_rate": 9.983783281317847e-05, |
|
"loss": 3.6285, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.14934981331273336, |
|
"grad_norm": 0.5784502625465393, |
|
"learning_rate": 9.983463923984551e-05, |
|
"loss": 3.5952, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.15008552667880593, |
|
"grad_norm": 0.5587512254714966, |
|
"learning_rate": 9.983141457879572e-05, |
|
"loss": 3.5877, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.1508212400448785, |
|
"grad_norm": 0.5835023522377014, |
|
"learning_rate": 9.982815883204068e-05, |
|
"loss": 3.6139, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.1515569534109511, |
|
"grad_norm": 0.6096065044403076, |
|
"learning_rate": 9.982487200161139e-05, |
|
"loss": 3.6261, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.15229266677702366, |
|
"grad_norm": 0.6385859847068787, |
|
"learning_rate": 9.982155408955822e-05, |
|
"loss": 3.6228, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.15302838014309625, |
|
"grad_norm": 0.6276549100875854, |
|
"learning_rate": 9.981820509795096e-05, |
|
"loss": 3.6525, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.15376409350916884, |
|
"grad_norm": 0.5835126638412476, |
|
"learning_rate": 9.981482502887878e-05, |
|
"loss": 3.6081, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.1544998068752414, |
|
"grad_norm": 0.6056969165802002, |
|
"learning_rate": 9.981141388445019e-05, |
|
"loss": 3.6241, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.15523552024131398, |
|
"grad_norm": 0.6270286440849304, |
|
"learning_rate": 9.980797166679314e-05, |
|
"loss": 3.6231, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.15597123360738657, |
|
"grad_norm": 0.6744200587272644, |
|
"learning_rate": 9.980449837805494e-05, |
|
"loss": 3.5789, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.15670694697345913, |
|
"grad_norm": 0.5492793321609497, |
|
"learning_rate": 9.98009940204023e-05, |
|
"loss": 3.58, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.15744266033953172, |
|
"grad_norm": 0.601390540599823, |
|
"learning_rate": 9.979745859602129e-05, |
|
"loss": 3.6059, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.1581783737056043, |
|
"grad_norm": 0.6107606887817383, |
|
"learning_rate": 9.979389210711737e-05, |
|
"loss": 3.5928, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.15891408707167687, |
|
"grad_norm": 0.6118071675300598, |
|
"learning_rate": 9.979029455591538e-05, |
|
"loss": 3.5771, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.15964980043774946, |
|
"grad_norm": 0.6007488369941711, |
|
"learning_rate": 9.978666594465953e-05, |
|
"loss": 3.5729, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.16038551380382204, |
|
"grad_norm": 0.5910535454750061, |
|
"learning_rate": 9.978300627561343e-05, |
|
"loss": 3.5953, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.1611212271698946, |
|
"grad_norm": 0.5529370903968811, |
|
"learning_rate": 9.977931555106002e-05, |
|
"loss": 3.6167, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.1618569405359672, |
|
"grad_norm": 0.6961104273796082, |
|
"learning_rate": 9.977559377330163e-05, |
|
"loss": 3.5541, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.16259265390203975, |
|
"grad_norm": 0.5334804654121399, |
|
"learning_rate": 9.977184094466001e-05, |
|
"loss": 3.6368, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.16332836726811234, |
|
"grad_norm": 0.6472899913787842, |
|
"learning_rate": 9.976805706747622e-05, |
|
"loss": 3.5896, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.16406408063418493, |
|
"grad_norm": 0.6276665329933167, |
|
"learning_rate": 9.976424214411071e-05, |
|
"loss": 3.5907, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.1647997940002575, |
|
"grad_norm": 0.5503706932067871, |
|
"learning_rate": 9.976039617694328e-05, |
|
"loss": 3.6037, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.16553550736633008, |
|
"grad_norm": 0.5655608773231506, |
|
"learning_rate": 9.975651916837313e-05, |
|
"loss": 3.6157, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.16627122073240266, |
|
"grad_norm": 0.5357344746589661, |
|
"learning_rate": 9.975261112081879e-05, |
|
"loss": 3.583, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.16700693409847522, |
|
"grad_norm": 0.5251188278198242, |
|
"learning_rate": 9.974867203671819e-05, |
|
"loss": 3.5705, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.1677426474645478, |
|
"grad_norm": 0.5895013809204102, |
|
"learning_rate": 9.974470191852858e-05, |
|
"loss": 3.5648, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.1684783608306204, |
|
"grad_norm": 0.5837428569793701, |
|
"learning_rate": 9.974070076872662e-05, |
|
"loss": 3.5607, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.16921407419669296, |
|
"grad_norm": 0.5866957306861877, |
|
"learning_rate": 9.973666858980824e-05, |
|
"loss": 3.5689, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.16994978756276555, |
|
"grad_norm": 0.5927156209945679, |
|
"learning_rate": 9.973260538428884e-05, |
|
"loss": 3.5806, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.17068550092883814, |
|
"grad_norm": 0.5668119788169861, |
|
"learning_rate": 9.972851115470307e-05, |
|
"loss": 3.5247, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.1714212142949107, |
|
"grad_norm": 0.5907779335975647, |
|
"learning_rate": 9.972438590360503e-05, |
|
"loss": 3.5757, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.17215692766098328, |
|
"grad_norm": 0.5702281594276428, |
|
"learning_rate": 9.97202296335681e-05, |
|
"loss": 3.5698, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.17289264102705587, |
|
"grad_norm": 0.5785143375396729, |
|
"learning_rate": 9.971604234718504e-05, |
|
"loss": 3.5547, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.17362835439312843, |
|
"grad_norm": 0.5959532856941223, |
|
"learning_rate": 9.971182404706792e-05, |
|
"loss": 3.4787, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.17436406775920102, |
|
"grad_norm": 0.5923709869384766, |
|
"learning_rate": 9.970757473584824e-05, |
|
"loss": 3.5127, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.1750997811252736, |
|
"grad_norm": 0.6314843893051147, |
|
"learning_rate": 9.970329441617676e-05, |
|
"loss": 3.5123, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.17583549449134617, |
|
"grad_norm": 0.6579912304878235, |
|
"learning_rate": 9.969898309072363e-05, |
|
"loss": 3.5295, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.17657120785741875, |
|
"grad_norm": 0.5852422118186951, |
|
"learning_rate": 9.969464076217831e-05, |
|
"loss": 3.5761, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.17730692122349132, |
|
"grad_norm": 0.5590935945510864, |
|
"learning_rate": 9.969026743324967e-05, |
|
"loss": 3.5697, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.1780426345895639, |
|
"grad_norm": 0.5512566566467285, |
|
"learning_rate": 9.968586310666583e-05, |
|
"loss": 3.5069, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.1787783479556365, |
|
"grad_norm": 0.6014286279678345, |
|
"learning_rate": 9.968142778517429e-05, |
|
"loss": 3.5985, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.17951406132170905, |
|
"grad_norm": 0.5852993726730347, |
|
"learning_rate": 9.967696147154187e-05, |
|
"loss": 3.4978, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.18024977468778164, |
|
"grad_norm": 0.5915689468383789, |
|
"learning_rate": 9.967246416855475e-05, |
|
"loss": 3.5926, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.18098548805385423, |
|
"grad_norm": 0.5336611866950989, |
|
"learning_rate": 9.966793587901844e-05, |
|
"loss": 3.5764, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.1817212014199268, |
|
"grad_norm": 0.6106781959533691, |
|
"learning_rate": 9.966337660575775e-05, |
|
"loss": 3.5095, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.18245691478599937, |
|
"grad_norm": 0.5323026180267334, |
|
"learning_rate": 9.965878635161682e-05, |
|
"loss": 3.4732, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.18319262815207196, |
|
"grad_norm": 0.5882947444915771, |
|
"learning_rate": 9.965416511945915e-05, |
|
"loss": 3.56, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.18392834151814452, |
|
"grad_norm": 0.5978416800498962, |
|
"learning_rate": 9.964951291216755e-05, |
|
"loss": 3.4982, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.1846640548842171, |
|
"grad_norm": 0.5574873089790344, |
|
"learning_rate": 9.964482973264413e-05, |
|
"loss": 3.5024, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.1853997682502897, |
|
"grad_norm": 0.6445927023887634, |
|
"learning_rate": 9.964011558381035e-05, |
|
"loss": 3.4935, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.18613548161636226, |
|
"grad_norm": 0.5893427729606628, |
|
"learning_rate": 9.963537046860697e-05, |
|
"loss": 3.5344, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.18687119498243485, |
|
"grad_norm": 0.5860798954963684, |
|
"learning_rate": 9.963059438999409e-05, |
|
"loss": 3.5498, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.18760690834850743, |
|
"grad_norm": 0.5362831950187683, |
|
"learning_rate": 9.962578735095109e-05, |
|
"loss": 3.4655, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.18834262171458, |
|
"grad_norm": 0.5293241143226624, |
|
"learning_rate": 9.96209493544767e-05, |
|
"loss": 3.5236, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.18907833508065258, |
|
"grad_norm": 0.6069090366363525, |
|
"learning_rate": 9.961608040358894e-05, |
|
"loss": 3.5332, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.18981404844672514, |
|
"grad_norm": 0.5620616674423218, |
|
"learning_rate": 9.961118050132517e-05, |
|
"loss": 3.4838, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.19054976181279773, |
|
"grad_norm": 0.5729257464408875, |
|
"learning_rate": 9.9606249650742e-05, |
|
"loss": 3.4809, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.19128547517887032, |
|
"grad_norm": 0.5876139998435974, |
|
"learning_rate": 9.96012878549154e-05, |
|
"loss": 3.4935, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.19202118854494288, |
|
"grad_norm": 0.5324752330780029, |
|
"learning_rate": 9.959629511694061e-05, |
|
"loss": 3.5621, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.19275690191101547, |
|
"grad_norm": 0.5787373781204224, |
|
"learning_rate": 9.959127143993219e-05, |
|
"loss": 3.4896, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.19349261527708805, |
|
"grad_norm": 0.5609232783317566, |
|
"learning_rate": 9.958621682702403e-05, |
|
"loss": 3.4973, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.1942283286431606, |
|
"grad_norm": 0.5869426131248474, |
|
"learning_rate": 9.958113128136923e-05, |
|
"loss": 3.5203, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.1949640420092332, |
|
"grad_norm": 0.609747052192688, |
|
"learning_rate": 9.957601480614029e-05, |
|
"loss": 3.5297, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.1956997553753058, |
|
"grad_norm": 0.5092261433601379, |
|
"learning_rate": 9.957086740452894e-05, |
|
"loss": 3.4951, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.19643546874137835, |
|
"grad_norm": 0.5409265756607056, |
|
"learning_rate": 9.956568907974621e-05, |
|
"loss": 3.528, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.19717118210745094, |
|
"grad_norm": 0.5979306101799011, |
|
"learning_rate": 9.956047983502245e-05, |
|
"loss": 3.5356, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.19790689547352353, |
|
"grad_norm": 0.5668174624443054, |
|
"learning_rate": 9.955523967360725e-05, |
|
"loss": 3.5797, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.19864260883959609, |
|
"grad_norm": 0.5992650389671326, |
|
"learning_rate": 9.954996859876953e-05, |
|
"loss": 3.5107, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.19937832220566867, |
|
"grad_norm": 0.5725685358047485, |
|
"learning_rate": 9.954466661379748e-05, |
|
"loss": 3.5161, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.20011403557174126, |
|
"grad_norm": 0.5398104786872864, |
|
"learning_rate": 9.953933372199855e-05, |
|
"loss": 3.5191, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.20084974893781382, |
|
"grad_norm": 0.5120781064033508, |
|
"learning_rate": 9.953396992669953e-05, |
|
"loss": 3.4936, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.2015854623038864, |
|
"grad_norm": 0.5185467004776001, |
|
"learning_rate": 9.952857523124641e-05, |
|
"loss": 3.4757, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.202321175669959, |
|
"grad_norm": 0.6107959747314453, |
|
"learning_rate": 9.952314963900453e-05, |
|
"loss": 3.4972, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.20305688903603156, |
|
"grad_norm": 0.5769765377044678, |
|
"learning_rate": 9.951769315335844e-05, |
|
"loss": 3.4983, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.20379260240210414, |
|
"grad_norm": 0.5694507956504822, |
|
"learning_rate": 9.951220577771201e-05, |
|
"loss": 3.5063, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.2045283157681767, |
|
"grad_norm": 0.5544769763946533, |
|
"learning_rate": 9.950668751548835e-05, |
|
"loss": 3.507, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.2052640291342493, |
|
"grad_norm": 0.5395680665969849, |
|
"learning_rate": 9.950113837012984e-05, |
|
"loss": 3.5315, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.20599974250032188, |
|
"grad_norm": 0.575812816619873, |
|
"learning_rate": 9.949555834509817e-05, |
|
"loss": 3.4873, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.20673545586639444, |
|
"grad_norm": 0.5248579978942871, |
|
"learning_rate": 9.948994744387419e-05, |
|
"loss": 3.4849, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.20747116923246703, |
|
"grad_norm": 0.5287348031997681, |
|
"learning_rate": 9.948430566995816e-05, |
|
"loss": 3.5028, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.20820688259853962, |
|
"grad_norm": 0.5448064804077148, |
|
"learning_rate": 9.947863302686945e-05, |
|
"loss": 3.4801, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.20894259596461218, |
|
"grad_norm": 0.583583652973175, |
|
"learning_rate": 9.947292951814679e-05, |
|
"loss": 3.4899, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.20967830933068476, |
|
"grad_norm": 0.5897386074066162, |
|
"learning_rate": 9.946719514734813e-05, |
|
"loss": 3.5659, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.21041402269675735, |
|
"grad_norm": 0.5261275172233582, |
|
"learning_rate": 9.946142991805062e-05, |
|
"loss": 3.4662, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.2111497360628299, |
|
"grad_norm": 0.5870264768600464, |
|
"learning_rate": 9.945563383385079e-05, |
|
"loss": 3.5123, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.2118854494289025, |
|
"grad_norm": 0.5746486186981201, |
|
"learning_rate": 9.944980689836429e-05, |
|
"loss": 3.4351, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.2126211627949751, |
|
"grad_norm": 0.6177893877029419, |
|
"learning_rate": 9.944394911522606e-05, |
|
"loss": 3.5206, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.21335687616104765, |
|
"grad_norm": 0.5619845390319824, |
|
"learning_rate": 9.94380604880903e-05, |
|
"loss": 3.4801, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.21409258952712024, |
|
"grad_norm": 0.5628277063369751, |
|
"learning_rate": 9.943214102063043e-05, |
|
"loss": 3.5564, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.21482830289319282, |
|
"grad_norm": 0.5306517481803894, |
|
"learning_rate": 9.942619071653914e-05, |
|
"loss": 3.5094, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.21556401625926538, |
|
"grad_norm": 0.6212943196296692, |
|
"learning_rate": 9.942020957952831e-05, |
|
"loss": 3.5182, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.21629972962533797, |
|
"grad_norm": 0.5883834362030029, |
|
"learning_rate": 9.941419761332908e-05, |
|
"loss": 3.4715, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.21703544299141056, |
|
"grad_norm": 0.5104158520698547, |
|
"learning_rate": 9.940815482169184e-05, |
|
"loss": 3.4712, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.21777115635748312, |
|
"grad_norm": 0.5349141955375671, |
|
"learning_rate": 9.940208120838616e-05, |
|
"loss": 3.4517, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.2185068697235557, |
|
"grad_norm": 0.555456280708313, |
|
"learning_rate": 9.939597677720089e-05, |
|
"loss": 3.4984, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.21924258308962827, |
|
"grad_norm": 0.5867215991020203, |
|
"learning_rate": 9.938984153194406e-05, |
|
"loss": 3.4821, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.21997829645570086, |
|
"grad_norm": 0.5781378149986267, |
|
"learning_rate": 9.938367547644296e-05, |
|
"loss": 3.4437, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.22071400982177344, |
|
"grad_norm": 0.5571138858795166, |
|
"learning_rate": 9.937747861454407e-05, |
|
"loss": 3.4935, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.221449723187846, |
|
"grad_norm": 0.47858932614326477, |
|
"learning_rate": 9.93712509501131e-05, |
|
"loss": 3.499, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.2221854365539186, |
|
"grad_norm": 0.500126838684082, |
|
"learning_rate": 9.936499248703499e-05, |
|
"loss": 3.4822, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.22292114991999118, |
|
"grad_norm": 0.5339503288269043, |
|
"learning_rate": 9.935870322921387e-05, |
|
"loss": 3.5028, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.22365686328606374, |
|
"grad_norm": 0.5465017557144165, |
|
"learning_rate": 9.93523831805731e-05, |
|
"loss": 3.4338, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.22439257665213633, |
|
"grad_norm": 0.5432729125022888, |
|
"learning_rate": 9.934603234505519e-05, |
|
"loss": 3.4831, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.22512829001820892, |
|
"grad_norm": 0.5051527619361877, |
|
"learning_rate": 9.933965072662197e-05, |
|
"loss": 3.4473, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.22586400338428148, |
|
"grad_norm": 0.5150954127311707, |
|
"learning_rate": 9.933323832925437e-05, |
|
"loss": 3.4755, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.22659971675035406, |
|
"grad_norm": 0.5967200398445129, |
|
"learning_rate": 9.932679515695253e-05, |
|
"loss": 3.4938, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.22733543011642665, |
|
"grad_norm": 0.5499495267868042, |
|
"learning_rate": 9.932032121373587e-05, |
|
"loss": 3.5033, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.2280711434824992, |
|
"grad_norm": 0.502854585647583, |
|
"learning_rate": 9.931381650364291e-05, |
|
"loss": 3.3716, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.2288068568485718, |
|
"grad_norm": 0.5126801133155823, |
|
"learning_rate": 9.93072810307314e-05, |
|
"loss": 3.4727, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.2295425702146444, |
|
"grad_norm": 0.48045796155929565, |
|
"learning_rate": 9.930071479907831e-05, |
|
"loss": 3.4312, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.23027828358071695, |
|
"grad_norm": 0.5664313435554504, |
|
"learning_rate": 9.929411781277974e-05, |
|
"loss": 3.4225, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.23101399694678953, |
|
"grad_norm": 0.5236510634422302, |
|
"learning_rate": 9.928749007595101e-05, |
|
"loss": 3.4255, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.23174971031286212, |
|
"grad_norm": 0.5875115990638733, |
|
"learning_rate": 9.928083159272666e-05, |
|
"loss": 3.4699, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.23248542367893468, |
|
"grad_norm": 0.5420896410942078, |
|
"learning_rate": 9.92741423672603e-05, |
|
"loss": 3.3887, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.23322113704500727, |
|
"grad_norm": 0.5325738191604614, |
|
"learning_rate": 9.926742240372483e-05, |
|
"loss": 3.4807, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.23395685041107983, |
|
"grad_norm": 0.5513830780982971, |
|
"learning_rate": 9.926067170631227e-05, |
|
"loss": 3.4647, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.23469256377715242, |
|
"grad_norm": 0.5124702453613281, |
|
"learning_rate": 9.925389027923382e-05, |
|
"loss": 3.4392, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.235428277143225, |
|
"grad_norm": 0.532706081867218, |
|
"learning_rate": 9.924707812671985e-05, |
|
"loss": 3.4456, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.23616399050929757, |
|
"grad_norm": 0.5108603835105896, |
|
"learning_rate": 9.924023525301991e-05, |
|
"loss": 3.4621, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.23689970387537015, |
|
"grad_norm": 0.5478849411010742, |
|
"learning_rate": 9.92333616624027e-05, |
|
"loss": 3.4602, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.23763541724144274, |
|
"grad_norm": 0.481143057346344, |
|
"learning_rate": 9.922645735915608e-05, |
|
"loss": 3.4647, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.2383711306075153, |
|
"grad_norm": 0.5544288754463196, |
|
"learning_rate": 9.921952234758709e-05, |
|
"loss": 3.4312, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.2391068439735879, |
|
"grad_norm": 0.5568479299545288, |
|
"learning_rate": 9.921255663202189e-05, |
|
"loss": 3.4306, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.23984255733966048, |
|
"grad_norm": 0.5197471380233765, |
|
"learning_rate": 9.92055602168058e-05, |
|
"loss": 3.4488, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.24057827070573304, |
|
"grad_norm": 0.5361900329589844, |
|
"learning_rate": 9.919853310630336e-05, |
|
"loss": 3.395, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.24131398407180563, |
|
"grad_norm": 0.5017499327659607, |
|
"learning_rate": 9.919147530489816e-05, |
|
"loss": 3.4654, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.24204969743787821, |
|
"grad_norm": 0.5076097846031189, |
|
"learning_rate": 9.9184386816993e-05, |
|
"loss": 3.3991, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.24278541080395077, |
|
"grad_norm": 0.5834246873855591, |
|
"learning_rate": 9.917726764700981e-05, |
|
"loss": 3.4058, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.24352112417002336, |
|
"grad_norm": 0.5632571578025818, |
|
"learning_rate": 9.917011779938961e-05, |
|
"loss": 3.4442, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.24425683753609595, |
|
"grad_norm": 0.5473082065582275, |
|
"learning_rate": 9.916293727859265e-05, |
|
"loss": 3.4536, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.2449925509021685, |
|
"grad_norm": 0.5029418468475342, |
|
"learning_rate": 9.915572608909824e-05, |
|
"loss": 3.4263, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.2457282642682411, |
|
"grad_norm": 0.5429378151893616, |
|
"learning_rate": 9.914848423540483e-05, |
|
"loss": 3.4652, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.24646397763431366, |
|
"grad_norm": 0.5347001552581787, |
|
"learning_rate": 9.914121172203005e-05, |
|
"loss": 3.4009, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.24719969100038625, |
|
"grad_norm": 0.538618266582489, |
|
"learning_rate": 9.913390855351058e-05, |
|
"loss": 3.457, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.24793540436645883, |
|
"grad_norm": 0.5618345141410828, |
|
"learning_rate": 9.912657473440232e-05, |
|
"loss": 3.4588, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.2486711177325314, |
|
"grad_norm": 0.5444225072860718, |
|
"learning_rate": 9.911921026928019e-05, |
|
"loss": 3.4401, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.24940683109860398, |
|
"grad_norm": 0.5804809927940369, |
|
"learning_rate": 9.911181516273826e-05, |
|
"loss": 3.3904, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.25014254446467654, |
|
"grad_norm": 0.5047810673713684, |
|
"learning_rate": 9.910438941938978e-05, |
|
"loss": 3.42, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.25087825783074913, |
|
"grad_norm": 0.5213153958320618, |
|
"learning_rate": 9.9096933043867e-05, |
|
"loss": 3.4572, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.2516139711968217, |
|
"grad_norm": 0.49462226033210754, |
|
"learning_rate": 9.908944604082138e-05, |
|
"loss": 3.432, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.2523496845628943, |
|
"grad_norm": 0.5353688597679138, |
|
"learning_rate": 9.908192841492343e-05, |
|
"loss": 3.4174, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.2530853979289669, |
|
"grad_norm": 0.4931166470050812, |
|
"learning_rate": 9.907438017086277e-05, |
|
"loss": 3.4175, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.2538211112950394, |
|
"grad_norm": 0.5079531669616699, |
|
"learning_rate": 9.906680131334813e-05, |
|
"loss": 3.478, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.254556824661112, |
|
"grad_norm": 0.5377930402755737, |
|
"learning_rate": 9.905919184710733e-05, |
|
"loss": 3.4353, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.2552925380271846, |
|
"grad_norm": 0.5525156855583191, |
|
"learning_rate": 9.90515517768873e-05, |
|
"loss": 3.4084, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.2560282513932572, |
|
"grad_norm": 0.5057874321937561, |
|
"learning_rate": 9.904388110745403e-05, |
|
"loss": 3.4503, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.2567639647593298, |
|
"grad_norm": 0.5415982007980347, |
|
"learning_rate": 9.903617984359263e-05, |
|
"loss": 3.412, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.25749967812540236, |
|
"grad_norm": 0.5116530656814575, |
|
"learning_rate": 9.902844799010729e-05, |
|
"loss": 3.3947, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2582353914914749, |
|
"grad_norm": 0.49354326725006104, |
|
"learning_rate": 9.902068555182124e-05, |
|
"loss": 3.4183, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.2589711048575475, |
|
"grad_norm": 0.5207719206809998, |
|
"learning_rate": 9.901289253357688e-05, |
|
"loss": 3.4225, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.2597068182236201, |
|
"grad_norm": 0.5140487551689148, |
|
"learning_rate": 9.900506894023558e-05, |
|
"loss": 3.4375, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.26044253158969266, |
|
"grad_norm": 0.5068781971931458, |
|
"learning_rate": 9.899721477667785e-05, |
|
"loss": 3.3917, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.26117824495576525, |
|
"grad_norm": 0.5903786420822144, |
|
"learning_rate": 9.898933004780328e-05, |
|
"loss": 3.4376, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.26191395832183784, |
|
"grad_norm": 0.5345304012298584, |
|
"learning_rate": 9.898141475853046e-05, |
|
"loss": 3.4211, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.26264967168791037, |
|
"grad_norm": 0.5153301954269409, |
|
"learning_rate": 9.89734689137971e-05, |
|
"loss": 3.4378, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.26338538505398296, |
|
"grad_norm": 0.48121339082717896, |
|
"learning_rate": 9.896549251855998e-05, |
|
"loss": 3.4369, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.26412109842005554, |
|
"grad_norm": 0.49092957377433777, |
|
"learning_rate": 9.89574855777949e-05, |
|
"loss": 3.3808, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.26485681178612813, |
|
"grad_norm": 0.49230051040649414, |
|
"learning_rate": 9.894944809649671e-05, |
|
"loss": 3.4153, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.2655925251522007, |
|
"grad_norm": 0.542644739151001, |
|
"learning_rate": 9.894138007967935e-05, |
|
"loss": 3.4127, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.2663282385182733, |
|
"grad_norm": 0.49647256731987, |
|
"learning_rate": 9.893328153237578e-05, |
|
"loss": 3.3944, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.26706395188434584, |
|
"grad_norm": 0.4804913103580475, |
|
"learning_rate": 9.892515245963803e-05, |
|
"loss": 3.4299, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.26779966525041843, |
|
"grad_norm": 0.5026209354400635, |
|
"learning_rate": 9.891699286653714e-05, |
|
"loss": 3.4199, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.268535378616491, |
|
"grad_norm": 0.5007442235946655, |
|
"learning_rate": 9.890880275816322e-05, |
|
"loss": 3.4058, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.2692710919825636, |
|
"grad_norm": 0.49342918395996094, |
|
"learning_rate": 9.890058213962538e-05, |
|
"loss": 3.408, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.2700068053486362, |
|
"grad_norm": 0.48717033863067627, |
|
"learning_rate": 9.889233101605184e-05, |
|
"loss": 3.3731, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.2707425187147087, |
|
"grad_norm": 0.5098682045936584, |
|
"learning_rate": 9.888404939258973e-05, |
|
"loss": 3.3768, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.2714782320807813, |
|
"grad_norm": 0.48432040214538574, |
|
"learning_rate": 9.88757372744053e-05, |
|
"loss": 3.4547, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.2722139454468539, |
|
"grad_norm": 0.487289160490036, |
|
"learning_rate": 9.886739466668379e-05, |
|
"loss": 3.4094, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.2729496588129265, |
|
"grad_norm": 0.5253422260284424, |
|
"learning_rate": 9.885902157462948e-05, |
|
"loss": 3.445, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.2736853721789991, |
|
"grad_norm": 0.5656958222389221, |
|
"learning_rate": 9.885061800346563e-05, |
|
"loss": 3.3925, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.27442108554507166, |
|
"grad_norm": 0.5607571601867676, |
|
"learning_rate": 9.884218395843452e-05, |
|
"loss": 3.386, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.2751567989111442, |
|
"grad_norm": 0.48992305994033813, |
|
"learning_rate": 9.883371944479749e-05, |
|
"loss": 3.4261, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.2758925122772168, |
|
"grad_norm": 0.5179588794708252, |
|
"learning_rate": 9.882522446783484e-05, |
|
"loss": 3.3555, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.27662822564328937, |
|
"grad_norm": 0.5124242305755615, |
|
"learning_rate": 9.88166990328459e-05, |
|
"loss": 3.4095, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.27736393900936196, |
|
"grad_norm": 0.5088645815849304, |
|
"learning_rate": 9.880814314514894e-05, |
|
"loss": 3.4152, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.27809965237543455, |
|
"grad_norm": 0.5194174647331238, |
|
"learning_rate": 9.87995568100813e-05, |
|
"loss": 3.3998, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.27883536574150714, |
|
"grad_norm": 0.5403995513916016, |
|
"learning_rate": 9.879094003299928e-05, |
|
"loss": 3.3553, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.27957107910757967, |
|
"grad_norm": 0.5247926712036133, |
|
"learning_rate": 9.87822928192782e-05, |
|
"loss": 3.4321, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.28030679247365226, |
|
"grad_norm": 0.5156255960464478, |
|
"learning_rate": 9.877361517431231e-05, |
|
"loss": 3.3851, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.28104250583972484, |
|
"grad_norm": 0.4857509732246399, |
|
"learning_rate": 9.876490710351489e-05, |
|
"loss": 3.3663, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.28177821920579743, |
|
"grad_norm": 0.5125151872634888, |
|
"learning_rate": 9.875616861231819e-05, |
|
"loss": 3.3637, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.28251393257187, |
|
"grad_norm": 0.47020360827445984, |
|
"learning_rate": 9.874739970617341e-05, |
|
"loss": 3.4038, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.28324964593794255, |
|
"grad_norm": 0.48255953192710876, |
|
"learning_rate": 9.87386003905508e-05, |
|
"loss": 3.3459, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.28398535930401514, |
|
"grad_norm": 0.47846198081970215, |
|
"learning_rate": 9.872977067093947e-05, |
|
"loss": 3.376, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.2847210726700877, |
|
"grad_norm": 0.5100752115249634, |
|
"learning_rate": 9.872091055284756e-05, |
|
"loss": 3.3609, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.2854567860361603, |
|
"grad_norm": 0.5000672936439514, |
|
"learning_rate": 9.87120200418022e-05, |
|
"loss": 3.4512, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.2861924994022329, |
|
"grad_norm": 0.5057271718978882, |
|
"learning_rate": 9.870309914334942e-05, |
|
"loss": 3.3253, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.2869282127683055, |
|
"grad_norm": 0.5031580328941345, |
|
"learning_rate": 9.869414786305424e-05, |
|
"loss": 3.3809, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.287663926134378, |
|
"grad_norm": 0.49000057578086853, |
|
"learning_rate": 9.868516620650062e-05, |
|
"loss": 3.4302, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.2883996395004506, |
|
"grad_norm": 0.4677979052066803, |
|
"learning_rate": 9.867615417929147e-05, |
|
"loss": 3.3782, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.2891353528665232, |
|
"grad_norm": 0.4826011061668396, |
|
"learning_rate": 9.866711178704869e-05, |
|
"loss": 3.3509, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.2898710662325958, |
|
"grad_norm": 0.5133295655250549, |
|
"learning_rate": 9.865803903541301e-05, |
|
"loss": 3.4039, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.2906067795986684, |
|
"grad_norm": 0.5011733174324036, |
|
"learning_rate": 9.864893593004422e-05, |
|
"loss": 3.3433, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.29134249296474096, |
|
"grad_norm": 0.49504631757736206, |
|
"learning_rate": 9.863980247662099e-05, |
|
"loss": 3.3795, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.2920782063308135, |
|
"grad_norm": 0.5216391682624817, |
|
"learning_rate": 9.863063868084093e-05, |
|
"loss": 3.4249, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.2928139196968861, |
|
"grad_norm": 0.5909170508384705, |
|
"learning_rate": 9.862144454842055e-05, |
|
"loss": 3.341, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.29354963306295867, |
|
"grad_norm": 0.5585739016532898, |
|
"learning_rate": 9.861222008509534e-05, |
|
"loss": 3.4334, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.29428534642903126, |
|
"grad_norm": 0.5350683927536011, |
|
"learning_rate": 9.860296529661966e-05, |
|
"loss": 3.4152, |
|
"step": 4000 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 40776, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 4000, |
|
"total_flos": 3.62650798129152e+17, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|