|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.444444444444445, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.008888888888888889, |
|
"grad_norm": 10.75, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.4464, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.017777777777777778, |
|
"grad_norm": 13.0, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.2646, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02666666666666667, |
|
"grad_norm": 11.3125, |
|
"learning_rate": 6e-06, |
|
"loss": 1.4738, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.035555555555555556, |
|
"grad_norm": 12.875, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.3848, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 11.0, |
|
"learning_rate": 1e-05, |
|
"loss": 1.5257, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 11.8125, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.4105, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06222222222222222, |
|
"grad_norm": 8.0625, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.287, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07111111111111111, |
|
"grad_norm": 9.4375, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.2099, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 26.25, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.311, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.08888888888888889, |
|
"grad_norm": 588.0, |
|
"learning_rate": 2e-05, |
|
"loss": 2.4975, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09777777777777778, |
|
"grad_norm": 73.5, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.2125, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10666666666666667, |
|
"grad_norm": 9.125, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.3688, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11555555555555555, |
|
"grad_norm": 4.96875, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.2372, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12444444444444444, |
|
"grad_norm": 11.1875, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.3227, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13333333333333333, |
|
"grad_norm": 5.15625, |
|
"learning_rate": 3e-05, |
|
"loss": 1.1693, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.14222222222222222, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.0922, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.1511111111111111, |
|
"grad_norm": 4.65625, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.0916, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 5.25, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.1219, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1688888888888889, |
|
"grad_norm": 4.875, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.2237, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.17777777777777778, |
|
"grad_norm": 4.9375, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0106, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.18666666666666668, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.0266, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19555555555555557, |
|
"grad_norm": 4.59375, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 0.9007, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.20444444444444446, |
|
"grad_norm": 5.3125, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 0.9784, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.21333333333333335, |
|
"grad_norm": 3.671875, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.1803, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 4.75, |
|
"learning_rate": 5e-05, |
|
"loss": 1.2705, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2311111111111111, |
|
"grad_norm": 4.5, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.0653, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 4.25, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 0.988, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24888888888888888, |
|
"grad_norm": 13.625, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.138, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2577777777777778, |
|
"grad_norm": 3.015625, |
|
"learning_rate": 5.8e-05, |
|
"loss": 0.9095, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 4.1875, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1132, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.27555555555555555, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 6.2e-05, |
|
"loss": 0.9967, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.28444444444444444, |
|
"grad_norm": 3.859375, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 0.9615, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.29333333333333333, |
|
"grad_norm": 4.53125, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.0561, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.3022222222222222, |
|
"grad_norm": 3.546875, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.1543, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3111111111111111, |
|
"grad_norm": 8.125, |
|
"learning_rate": 7e-05, |
|
"loss": 1.1004, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 4.21875, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.0279, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3288888888888889, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 7.4e-05, |
|
"loss": 0.9469, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3377777777777778, |
|
"grad_norm": 10.8125, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.0991, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3466666666666667, |
|
"grad_norm": 3.96875, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.0668, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.35555555555555557, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 8e-05, |
|
"loss": 0.9899, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.36444444444444446, |
|
"grad_norm": 3.296875, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.0636, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.37333333333333335, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.0076, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.38222222222222224, |
|
"grad_norm": 3.359375, |
|
"learning_rate": 8.6e-05, |
|
"loss": 0.9301, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.39111111111111113, |
|
"grad_norm": 2.90625, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.171, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 2.671875, |
|
"learning_rate": 9e-05, |
|
"loss": 0.9985, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4088888888888889, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.034, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.4177777777777778, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.4e-05, |
|
"loss": 0.9552, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4266666666666667, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.2027, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.43555555555555553, |
|
"grad_norm": 4.3125, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.2453, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 7.15625, |
|
"learning_rate": 0.0001, |
|
"loss": 1.1497, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4533333333333333, |
|
"grad_norm": 12.6875, |
|
"learning_rate": 9.999878153526974e-05, |
|
"loss": 1.0698, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4622222222222222, |
|
"grad_norm": 7.625, |
|
"learning_rate": 9.999512620046522e-05, |
|
"loss": 1.093, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4711111111111111, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 9.998903417374228e-05, |
|
"loss": 1.0031, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 7.46875, |
|
"learning_rate": 9.998050575201771e-05, |
|
"loss": 1.2479, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.4888888888888889, |
|
"grad_norm": 7.59375, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 1.1337, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.49777777777777776, |
|
"grad_norm": 7.96875, |
|
"learning_rate": 9.995614150494293e-05, |
|
"loss": 1.0747, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5066666666666667, |
|
"grad_norm": 9.0, |
|
"learning_rate": 9.99403068670717e-05, |
|
"loss": 1.1863, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5155555555555555, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 9.992203820909906e-05, |
|
"loss": 0.8643, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5244444444444445, |
|
"grad_norm": 6.40625, |
|
"learning_rate": 9.990133642141359e-05, |
|
"loss": 1.0411, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 5.25, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 1.0888, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5422222222222223, |
|
"grad_norm": 5.03125, |
|
"learning_rate": 9.985263761134602e-05, |
|
"loss": 0.9589, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5511111111111111, |
|
"grad_norm": 5.75, |
|
"learning_rate": 9.982464296247522e-05, |
|
"loss": 1.0835, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 6.5625, |
|
"learning_rate": 9.979421993079852e-05, |
|
"loss": 0.9976, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5688888888888889, |
|
"grad_norm": 5.8125, |
|
"learning_rate": 9.976136999909156e-05, |
|
"loss": 1.0755, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5777777777777777, |
|
"grad_norm": 7.34375, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 1.0238, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5866666666666667, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 9.968839595802982e-05, |
|
"loss": 0.9071, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.5955555555555555, |
|
"grad_norm": 5.96875, |
|
"learning_rate": 9.964827540532685e-05, |
|
"loss": 0.9407, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6044444444444445, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 9.96057350657239e-05, |
|
"loss": 1.1039, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6133333333333333, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 9.956077701257709e-05, |
|
"loss": 1.0651, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6222222222222222, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 1.0068, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6311111111111111, |
|
"grad_norm": 5.28125, |
|
"learning_rate": 9.946361664814943e-05, |
|
"loss": 0.9485, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 7.75, |
|
"learning_rate": 9.941141907232765e-05, |
|
"loss": 1.247, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6488888888888888, |
|
"grad_norm": 4.8125, |
|
"learning_rate": 9.93568132536494e-05, |
|
"loss": 1.0063, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6577777777777778, |
|
"grad_norm": 6.15625, |
|
"learning_rate": 9.929980185352526e-05, |
|
"loss": 0.9367, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 6.875, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 1.05, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6755555555555556, |
|
"grad_norm": 9.375, |
|
"learning_rate": 9.917857354066931e-05, |
|
"loss": 1.1569, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.6844444444444444, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 9.911436253643445e-05, |
|
"loss": 0.8825, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.6933333333333334, |
|
"grad_norm": 5.5, |
|
"learning_rate": 9.904775776745958e-05, |
|
"loss": 0.8632, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7022222222222222, |
|
"grad_norm": 5.71875, |
|
"learning_rate": 9.89787624799672e-05, |
|
"loss": 0.9715, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7111111111111111, |
|
"grad_norm": 5.59375, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 1.0032, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 6.28125, |
|
"learning_rate": 9.88336139167084e-05, |
|
"loss": 1.1563, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.7288888888888889, |
|
"grad_norm": 7.09375, |
|
"learning_rate": 9.875746771527816e-05, |
|
"loss": 1.0159, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7377777777777778, |
|
"grad_norm": 6.03125, |
|
"learning_rate": 9.867894514365802e-05, |
|
"loss": 1.1233, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.7466666666666667, |
|
"grad_norm": 6.84375, |
|
"learning_rate": 9.859805002892732e-05, |
|
"loss": 1.0673, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.7555555555555555, |
|
"grad_norm": 6.8125, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 0.9338, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.7644444444444445, |
|
"grad_norm": 6.4375, |
|
"learning_rate": 9.842915805643155e-05, |
|
"loss": 1.0843, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.7733333333333333, |
|
"grad_norm": 5.34375, |
|
"learning_rate": 9.834116943022298e-05, |
|
"loss": 0.9081, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.7822222222222223, |
|
"grad_norm": 5.875, |
|
"learning_rate": 9.825082472361557e-05, |
|
"loss": 0.9989, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.7911111111111111, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 9.815812833988291e-05, |
|
"loss": 0.8834, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 5.5625, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 0.9779, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8088888888888889, |
|
"grad_norm": 6.53125, |
|
"learning_rate": 9.796569872700288e-05, |
|
"loss": 1.0189, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8177777777777778, |
|
"grad_norm": 6.125, |
|
"learning_rate": 9.786597487660337e-05, |
|
"loss": 1.0696, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8266666666666667, |
|
"grad_norm": 5.6875, |
|
"learning_rate": 9.776391810611718e-05, |
|
"loss": 1.059, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.8355555555555556, |
|
"grad_norm": 5.4375, |
|
"learning_rate": 9.765953338964735e-05, |
|
"loss": 0.8577, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.8444444444444444, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9671, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.8533333333333334, |
|
"grad_norm": 5.875, |
|
"learning_rate": 9.744380058222483e-05, |
|
"loss": 1.0792, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.8622222222222222, |
|
"grad_norm": 7.84375, |
|
"learning_rate": 9.733246300578483e-05, |
|
"loss": 1.0958, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.8711111111111111, |
|
"grad_norm": 6.1875, |
|
"learning_rate": 9.721881851187406e-05, |
|
"loss": 0.966, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 5.84375, |
|
"learning_rate": 9.710287263936484e-05, |
|
"loss": 0.8868, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.8888888888888888, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 1.0577, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.8977777777777778, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 9.686409947459458e-05, |
|
"loss": 0.7904, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9066666666666666, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 9.674128381980072e-05, |
|
"loss": 0.8927, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9155555555555556, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 9.661619006077561e-05, |
|
"loss": 1.0477, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9244444444444444, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.648882429441257e-05, |
|
"loss": 1.0107, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.9333333333333333, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 1.0277, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.9422222222222222, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 9.622730168061567e-05, |
|
"loss": 0.9528, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.9511111111111111, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.609315757942503e-05, |
|
"loss": 0.8783, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 9.595676696276172e-05, |
|
"loss": 0.991, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.9688888888888889, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 9.581813647811198e-05, |
|
"loss": 1.0017, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.9777777777777777, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 1.0257, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.9866666666666667, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 9.553418304030886e-05, |
|
"loss": 0.9087, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.9955555555555555, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.538887392664544e-05, |
|
"loss": 1.002, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.0044444444444445, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.524135262330098e-05, |
|
"loss": 0.7493, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.0133333333333334, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 9.50916263202557e-05, |
|
"loss": 0.8562, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.0222222222222221, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 0.9812, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.031111111111111, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 9.478558801197065e-05, |
|
"loss": 0.8069, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 9.462929092260628e-05, |
|
"loss": 0.8336, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.048888888888889, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 9.447081866456489e-05, |
|
"loss": 0.9364, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.0577777777777777, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 9.431017896156074e-05, |
|
"loss": 0.7365, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 1.0947, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.0755555555555556, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 9.398242864333083e-05, |
|
"loss": 0.9642, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.0844444444444445, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 9.381533400219318e-05, |
|
"loss": 0.824, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.0933333333333333, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.364610386349049e-05, |
|
"loss": 0.814, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.1022222222222222, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 9.347474647526095e-05, |
|
"loss": 0.7891, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.1111111111111112, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 0.8481, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 9.312568346036288e-05, |
|
"loss": 0.7348, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.1288888888888888, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 9.294799484653323e-05, |
|
"loss": 1.1175, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 1.1377777777777778, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 9.276821300802534e-05, |
|
"loss": 1.1577, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 1.1466666666666667, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 9.258634670715238e-05, |
|
"loss": 0.8026, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 1.1555555555555554, |
|
"grad_norm": 1.875, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 0.8051, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.1644444444444444, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 9.221639627510076e-05, |
|
"loss": 1.0332, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 1.1733333333333333, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 9.202833017478422e-05, |
|
"loss": 0.9912, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 1.1822222222222223, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 9.183821567294809e-05, |
|
"loss": 0.7942, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 1.1911111111111112, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 9.164606203550497e-05, |
|
"loss": 0.8423, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 0.7237, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.208888888888889, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 9.125567491391476e-05, |
|
"loss": 0.8881, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 1.2177777777777778, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 9.105746045668521e-05, |
|
"loss": 0.8953, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 1.2266666666666666, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 9.085724491675642e-05, |
|
"loss": 1.0433, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 1.2355555555555555, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 9.065503805235138e-05, |
|
"loss": 0.9354, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 1.2444444444444445, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.9633, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.2533333333333334, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 9.02446898677957e-05, |
|
"loss": 1.0975, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.2622222222222224, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 9.003656854743667e-05, |
|
"loss": 0.8621, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.271111111111111, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 8.982649590120982e-05, |
|
"loss": 0.8958, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"grad_norm": 1.375, |
|
"learning_rate": 8.961448216775954e-05, |
|
"loss": 0.9975, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.2888888888888888, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 0.9486, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.2977777777777777, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 8.9184672866292e-05, |
|
"loss": 0.876, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.3066666666666666, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.896689824657372e-05, |
|
"loss": 0.8772, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.3155555555555556, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 8.874722443520899e-05, |
|
"loss": 0.8023, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.3244444444444445, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 8.852566213878947e-05, |
|
"loss": 1.0432, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 0.9147, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.3422222222222222, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 8.807691537683685e-05, |
|
"loss": 0.9798, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.3511111111111112, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 8.784975278258783e-05, |
|
"loss": 0.8849, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.3599999999999999, |
|
"grad_norm": 2.578125, |
|
"learning_rate": 8.762074544478623e-05, |
|
"loss": 0.9281, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.3688888888888888, |
|
"grad_norm": 1.375, |
|
"learning_rate": 8.73899045249266e-05, |
|
"loss": 1.0482, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.3777777777777778, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 1.0076, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.3866666666666667, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 8.692276703129421e-05, |
|
"loss": 0.817, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.3955555555555557, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 8.668649322514382e-05, |
|
"loss": 1.0906, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.4044444444444444, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.644843137107059e-05, |
|
"loss": 0.9078, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.4133333333333333, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 8.620859307187339e-05, |
|
"loss": 0.9268, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.4222222222222223, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 0.8873, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.431111111111111, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 8.572363398164017e-05, |
|
"loss": 0.8488, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"grad_norm": 1.375, |
|
"learning_rate": 8.547853682682604e-05, |
|
"loss": 1.001, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.448888888888889, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 8.523171049817974e-05, |
|
"loss": 1.0562, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.4577777777777778, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 8.498316702566828e-05, |
|
"loss": 0.845, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.4666666666666668, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 0.7172, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.4755555555555555, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 8.44809771867835e-05, |
|
"loss": 0.9064, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.4844444444444445, |
|
"grad_norm": 2.390625, |
|
"learning_rate": 8.422735529643444e-05, |
|
"loss": 0.888, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.4933333333333334, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 8.397206521307584e-05, |
|
"loss": 0.8158, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.5022222222222221, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 8.371511937918616e-05, |
|
"loss": 0.8498, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.511111111111111, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 0.93, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 8.319631063261209e-05, |
|
"loss": 1.2189, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.528888888888889, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 8.293447300593402e-05, |
|
"loss": 1.0049, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.537777777777778, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 8.267103019950529e-05, |
|
"loss": 0.9254, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.5466666666666666, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 8.240599505315655e-05, |
|
"loss": 0.8603, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.5555555555555556, |
|
"grad_norm": 1.5859375, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 0.9305, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.5644444444444443, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 8.18711994874345e-05, |
|
"loss": 1.0136, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.5733333333333333, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 8.160146513324254e-05, |
|
"loss": 1.0119, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.5822222222222222, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 8.133019056822304e-05, |
|
"loss": 0.8887, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.5911111111111111, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 8.105738901391552e-05, |
|
"loss": 1.0235, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 0.8074, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.608888888888889, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 8.05072581950634e-05, |
|
"loss": 0.861, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.6177777777777778, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 8.022995574311876e-05, |
|
"loss": 0.9879, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.6266666666666667, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 7.99511799257793e-05, |
|
"loss": 0.7969, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.6355555555555554, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 7.967094433018508e-05, |
|
"loss": 0.85, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.6444444444444444, |
|
"grad_norm": 1.078125, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.765, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.6533333333333333, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 7.910614850786448e-05, |
|
"loss": 1.0234, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.6622222222222223, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.882161580848967e-05, |
|
"loss": 0.9385, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.6711111111111112, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.85356783842216e-05, |
|
"loss": 0.8661, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.6800000000000002, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 7.82483501712469e-05, |
|
"loss": 1.1284, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.6888888888888889, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 0.9681, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.6977777777777778, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 7.766957746216721e-05, |
|
"loss": 0.8778, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.7066666666666666, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 7.737816117462752e-05, |
|
"loss": 0.9828, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.7155555555555555, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 7.7085410514137e-05, |
|
"loss": 0.9298, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.7244444444444444, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 7.679133974894983e-05, |
|
"loss": 0.9032, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.7333333333333334, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 1.0169, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.7422222222222223, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 7.619929529850397e-05, |
|
"loss": 0.8967, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.751111111111111, |
|
"grad_norm": 1.75, |
|
"learning_rate": 7.590135046865651e-05, |
|
"loss": 0.926, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 7.560214324352858e-05, |
|
"loss": 1.0398, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.7688888888888887, |
|
"grad_norm": 0.9765625, |
|
"learning_rate": 7.530168820605818e-05, |
|
"loss": 0.9307, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.7777777777777777, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.8868, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.7866666666666666, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 7.469709332921155e-05, |
|
"loss": 0.8817, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 1.7955555555555556, |
|
"grad_norm": 2.96875, |
|
"learning_rate": 7.439298295693665e-05, |
|
"loss": 1.1957, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 1.8044444444444445, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 7.408768370508576e-05, |
|
"loss": 1.087, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 1.8133333333333335, |
|
"grad_norm": 1.5, |
|
"learning_rate": 7.378121045351378e-05, |
|
"loss": 0.9296, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 1.8222222222222222, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 1.0053, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.8311111111111111, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 7.316480175599309e-05, |
|
"loss": 0.8588, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.8399999999999999, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 7.285489635293472e-05, |
|
"loss": 0.8839, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.8488888888888888, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 7.254387703447154e-05, |
|
"loss": 0.9149, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.8577777777777778, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 7.223175895924638e-05, |
|
"loss": 0.8972, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 1.0323, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.8755555555555556, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 7.160428744009912e-05, |
|
"loss": 1.0817, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.8844444444444446, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 7.128896457825364e-05, |
|
"loss": 1.0261, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.8933333333333333, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 7.097260412230886e-05, |
|
"loss": 0.9723, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.9022222222222223, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 7.06552214912271e-05, |
|
"loss": 1.1117, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.911111111111111, |
|
"grad_norm": 1.328125, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 0.8511, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 7.001745162784477e-05, |
|
"loss": 0.9363, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.9288888888888889, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 6.969709547954756e-05, |
|
"loss": 0.9447, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.9377777777777778, |
|
"grad_norm": 1.25, |
|
"learning_rate": 6.937577932260515e-05, |
|
"loss": 0.7898, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.9466666666666668, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 6.905351881751372e-05, |
|
"loss": 0.8289, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.9555555555555557, |
|
"grad_norm": 1.125, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 0.8569, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.9644444444444444, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 6.840622763423391e-05, |
|
"loss": 0.8994, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.9733333333333334, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 6.808122850410461e-05, |
|
"loss": 0.8751, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.982222222222222, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 6.775534812040685e-05, |
|
"loss": 1.0663, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.991111111111111, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 6.742860236609077e-05, |
|
"loss": 1.1425, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 0.9115, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.008888888888889, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 6.677257848751277e-05, |
|
"loss": 0.9296, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 2.017777777777778, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.644333233692916e-05, |
|
"loss": 1.0117, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 2.026666666666667, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 6.611328476152557e-05, |
|
"loss": 0.9275, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 2.0355555555555553, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 6.578245184735513e-05, |
|
"loss": 0.9839, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 2.0444444444444443, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.6496, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.0533333333333332, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 6.511849453752223e-05, |
|
"loss": 0.903, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 2.062222222222222, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 6.478540250220234e-05, |
|
"loss": 0.9134, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 2.071111111111111, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 6.445158984722358e-05, |
|
"loss": 0.7132, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.411707284214384e-05, |
|
"loss": 0.7823, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 2.088888888888889, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 0.8642, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.097777777777778, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 6.344599103076329e-05, |
|
"loss": 0.8489, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 2.1066666666666665, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 6.310945893204324e-05, |
|
"loss": 0.8013, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 2.1155555555555554, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 6.277228789678953e-05, |
|
"loss": 0.8753, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 2.1244444444444444, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 6.243449435824276e-05, |
|
"loss": 0.8545, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 0.7864, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.1422222222222222, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 6.17571056551295e-05, |
|
"loss": 0.9452, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 2.151111111111111, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 6.141754350553279e-05, |
|
"loss": 0.8245, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 1.0390625, |
|
"learning_rate": 6.107742488097338e-05, |
|
"loss": 0.8336, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 2.168888888888889, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 6.073676635835317e-05, |
|
"loss": 0.729, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 2.1777777777777776, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 0.7696, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.1866666666666665, |
|
"grad_norm": 0.89453125, |
|
"learning_rate": 6.005389605729824e-05, |
|
"loss": 0.754, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 2.1955555555555555, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 5.97117175609986e-05, |
|
"loss": 0.8451, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 2.2044444444444444, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 5.9369065729286245e-05, |
|
"loss": 0.9647, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 2.2133333333333334, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.902595726252801e-05, |
|
"loss": 0.9317, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 2.2222222222222223, |
|
"grad_norm": 2.0, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 1.1104, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.2311111111111113, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 5.833843733580512e-05, |
|
"loss": 0.8364, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 5.799405938459175e-05, |
|
"loss": 0.9288, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 2.2488888888888887, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.764929181420191e-05, |
|
"loss": 0.7471, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 2.2577777777777777, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.730415142812059e-05, |
|
"loss": 0.826, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 2.2666666666666666, |
|
"grad_norm": 2.25, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 0.9825, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.2755555555555556, |
|
"grad_norm": 1.5, |
|
"learning_rate": 5.661281951285613e-05, |
|
"loss": 0.8868, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 2.2844444444444445, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 5.6266661678215216e-05, |
|
"loss": 0.8671, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 2.2933333333333334, |
|
"grad_norm": 1.53125, |
|
"learning_rate": 5.5920198415325064e-05, |
|
"loss": 0.6903, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 2.3022222222222224, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 5.557344661031627e-05, |
|
"loss": 0.7786, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 2.311111111111111, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 1.0231, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 5.487914498795747e-05, |
|
"loss": 0.7585, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 2.328888888888889, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 5.453162900988902e-05, |
|
"loss": 0.9553, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 2.3377777777777777, |
|
"grad_norm": 1.3203125, |
|
"learning_rate": 5.418389216661579e-05, |
|
"loss": 0.7379, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 2.3466666666666667, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 5.383595140634093e-05, |
|
"loss": 0.7197, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 2.3555555555555556, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 0.9332, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.3644444444444446, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 5.313952597646568e-05, |
|
"loss": 0.8006, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 2.3733333333333335, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 5.279107524965819e-05, |
|
"loss": 0.8995, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 2.3822222222222225, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.244248848978067e-05, |
|
"loss": 1.009, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 2.391111111111111, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 5.209378268645998e-05, |
|
"loss": 1.0106, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 0.8872, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.408888888888889, |
|
"grad_norm": 1.40625, |
|
"learning_rate": 5.139608193617845e-05, |
|
"loss": 0.7844, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 2.417777777777778, |
|
"grad_norm": 1.65625, |
|
"learning_rate": 5.104712099416785e-05, |
|
"loss": 0.9682, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 2.4266666666666667, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 5.0698109016957274e-05, |
|
"loss": 0.7677, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 2.4355555555555557, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 5.034906301489808e-05, |
|
"loss": 0.807, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 2.4444444444444446, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8616, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.453333333333333, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.965093698510193e-05, |
|
"loss": 0.8555, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 2.462222222222222, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 4.9301890983042744e-05, |
|
"loss": 0.9175, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 2.471111111111111, |
|
"grad_norm": 1.25, |
|
"learning_rate": 4.895287900583216e-05, |
|
"loss": 0.8347, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 4.860391806382157e-05, |
|
"loss": 0.6997, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 2.488888888888889, |
|
"grad_norm": 1.5546875, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 0.7535, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.497777777777778, |
|
"grad_norm": 1.75, |
|
"learning_rate": 4.790621731354003e-05, |
|
"loss": 0.6498, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 2.506666666666667, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.755751151021934e-05, |
|
"loss": 0.8289, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 2.5155555555555553, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 4.720892475034181e-05, |
|
"loss": 0.7781, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 2.5244444444444447, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 4.6860474023534335e-05, |
|
"loss": 0.887, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 2.533333333333333, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 0.9711, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.542222222222222, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 4.616404859365907e-05, |
|
"loss": 0.7761, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 2.551111111111111, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 4.5816107833384234e-05, |
|
"loss": 0.8821, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 4.5468370990111006e-05, |
|
"loss": 0.9093, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 2.568888888888889, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 4.512085501204253e-05, |
|
"loss": 0.8367, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 2.5777777777777775, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 0.8892, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.586666666666667, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 4.442655338968373e-05, |
|
"loss": 0.8095, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 2.5955555555555554, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 4.407980158467495e-05, |
|
"loss": 0.6999, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 2.6044444444444443, |
|
"grad_norm": 1.375, |
|
"learning_rate": 4.373333832178478e-05, |
|
"loss": 0.9533, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 2.6133333333333333, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 4.3387180487143876e-05, |
|
"loss": 1.0029, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 2.6222222222222222, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 0.8176, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.631111111111111, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 4.269584857187943e-05, |
|
"loss": 0.7422, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 4.23507081857981e-05, |
|
"loss": 0.9981, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 2.648888888888889, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 4.2005940615408264e-05, |
|
"loss": 0.7285, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 2.6577777777777776, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 4.166156266419489e-05, |
|
"loss": 0.7904, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.8762, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.6755555555555555, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 4.0974042737472006e-05, |
|
"loss": 0.7147, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 2.6844444444444444, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 4.063093427071376e-05, |
|
"loss": 0.8428, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 2.6933333333333334, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 4.028828243900141e-05, |
|
"loss": 0.9927, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 2.7022222222222223, |
|
"grad_norm": 2.25, |
|
"learning_rate": 3.9946103942701777e-05, |
|
"loss": 0.882, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 2.7111111111111112, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 0.9207, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.7199999999999998, |
|
"grad_norm": 1.5390625, |
|
"learning_rate": 3.926323364164684e-05, |
|
"loss": 0.9289, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 2.728888888888889, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 3.892257511902664e-05, |
|
"loss": 0.972, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 2.7377777777777776, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 3.858245649446721e-05, |
|
"loss": 0.8648, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 2.7466666666666666, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 3.82428943448705e-05, |
|
"loss": 0.7915, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 2.7555555555555555, |
|
"grad_norm": 1.703125, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 0.9658, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.7644444444444445, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 3.756550564175727e-05, |
|
"loss": 0.9002, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 2.7733333333333334, |
|
"grad_norm": 1.25, |
|
"learning_rate": 3.7227712103210486e-05, |
|
"loss": 0.671, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 2.7822222222222224, |
|
"grad_norm": 1.4375, |
|
"learning_rate": 3.6890541067956776e-05, |
|
"loss": 0.8574, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 2.7911111111111113, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 3.655400896923672e-05, |
|
"loss": 1.0365, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 0.8598, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.8088888888888888, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 3.588292715785617e-05, |
|
"loss": 0.8167, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 2.8177777777777777, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 3.554841015277641e-05, |
|
"loss": 0.8554, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 2.8266666666666667, |
|
"grad_norm": 1.4140625, |
|
"learning_rate": 3.5214597497797684e-05, |
|
"loss": 0.9625, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 2.8355555555555556, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 3.488150546247778e-05, |
|
"loss": 0.9035, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 2.8444444444444446, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.85, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.8533333333333335, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 3.4217548152644885e-05, |
|
"loss": 1.1274, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 2.862222222222222, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.388671523847445e-05, |
|
"loss": 0.7901, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 2.871111111111111, |
|
"grad_norm": 1.234375, |
|
"learning_rate": 3.355666766307084e-05, |
|
"loss": 0.9324, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 3.322742151248725e-05, |
|
"loss": 0.8024, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 2.888888888888889, |
|
"grad_norm": 2.4375, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 0.9827, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.897777777777778, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 3.257139763390925e-05, |
|
"loss": 0.8115, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 2.9066666666666667, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 3.224465187959316e-05, |
|
"loss": 0.8628, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 2.9155555555555557, |
|
"grad_norm": 1.125, |
|
"learning_rate": 3.1918771495895396e-05, |
|
"loss": 0.8138, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 2.924444444444444, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 3.1593772365766105e-05, |
|
"loss": 0.7686, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 0.861, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 2.942222222222222, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 3.09464811824863e-05, |
|
"loss": 0.7815, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 2.951111111111111, |
|
"grad_norm": 1.296875, |
|
"learning_rate": 3.062422067739485e-05, |
|
"loss": 0.8253, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 3.0302904520452447e-05, |
|
"loss": 0.8101, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 2.968888888888889, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.9982548372155263e-05, |
|
"loss": 0.828, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 2.977777777777778, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 0.7968, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 2.986666666666667, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 2.934477850877292e-05, |
|
"loss": 1.0188, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 2.9955555555555557, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 2.9027395877691144e-05, |
|
"loss": 0.8255, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 3.0044444444444443, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 2.8711035421746367e-05, |
|
"loss": 0.6085, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 3.013333333333333, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 2.8395712559900877e-05, |
|
"loss": 0.7823, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 3.022222222222222, |
|
"grad_norm": 1.2265625, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 0.8947, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.031111111111111, |
|
"grad_norm": 1.140625, |
|
"learning_rate": 2.776824104075364e-05, |
|
"loss": 0.8595, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 2.7456122965528475e-05, |
|
"loss": 0.829, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 3.048888888888889, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 2.7145103647065308e-05, |
|
"loss": 0.5761, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 3.057777777777778, |
|
"grad_norm": 1.1328125, |
|
"learning_rate": 2.6835198244006927e-05, |
|
"loss": 0.7939, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 3.066666666666667, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 0.6288, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.0755555555555554, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 2.6218789546486234e-05, |
|
"loss": 0.7359, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 3.0844444444444443, |
|
"grad_norm": 1.3359375, |
|
"learning_rate": 2.591231629491423e-05, |
|
"loss": 0.7755, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 3.0933333333333333, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 2.560701704306336e-05, |
|
"loss": 0.6429, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 3.102222222222222, |
|
"grad_norm": 1.28125, |
|
"learning_rate": 2.5302906670788462e-05, |
|
"loss": 0.7758, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 3.111111111111111, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.6468, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.469831179394182e-05, |
|
"loss": 0.6939, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 3.128888888888889, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 2.4397856756471432e-05, |
|
"loss": 0.8737, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 3.137777777777778, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.4098649531343497e-05, |
|
"loss": 0.6743, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 3.1466666666666665, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 2.3800704701496053e-05, |
|
"loss": 0.7847, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 3.1555555555555554, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 0.8001, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.1644444444444444, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 2.3208660251050158e-05, |
|
"loss": 0.7351, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 3.1733333333333333, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 2.2914589485863014e-05, |
|
"loss": 0.7505, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 3.1822222222222223, |
|
"grad_norm": 2.609375, |
|
"learning_rate": 2.2621838825372493e-05, |
|
"loss": 0.8125, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 3.1911111111111112, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 2.23304225378328e-05, |
|
"loss": 0.8101, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.6484375, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 0.8313, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.2088888888888887, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 2.1751649828753106e-05, |
|
"loss": 0.6678, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 3.2177777777777776, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 2.1464321615778422e-05, |
|
"loss": 0.7392, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 3.2266666666666666, |
|
"grad_norm": 1.484375, |
|
"learning_rate": 2.117838419151034e-05, |
|
"loss": 0.6166, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 3.2355555555555555, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 2.0893851492135537e-05, |
|
"loss": 0.5738, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 3.2444444444444445, |
|
"grad_norm": 1.453125, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.6822, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.2533333333333334, |
|
"grad_norm": 1.625, |
|
"learning_rate": 2.0329055669814934e-05, |
|
"loss": 0.9337, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 3.2622222222222224, |
|
"grad_norm": 1.828125, |
|
"learning_rate": 2.0048820074220715e-05, |
|
"loss": 0.8167, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 3.2711111111111113, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 1.977004425688126e-05, |
|
"loss": 0.7508, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 3.2800000000000002, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.9492741804936622e-05, |
|
"loss": 0.745, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 3.2888888888888888, |
|
"grad_norm": 1.671875, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 0.8252, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.2977777777777777, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 1.8942610986084486e-05, |
|
"loss": 0.6201, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 3.3066666666666666, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.866980943177699e-05, |
|
"loss": 0.6965, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 3.3155555555555556, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 1.8398534866757454e-05, |
|
"loss": 0.7522, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 3.3244444444444445, |
|
"grad_norm": 1.9296875, |
|
"learning_rate": 1.8128800512565513e-05, |
|
"loss": 0.7584, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 3.3333333333333335, |
|
"grad_norm": 1.46875, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 0.6047, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.3422222222222224, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 1.7594004946843456e-05, |
|
"loss": 0.77, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 3.351111111111111, |
|
"grad_norm": 1.984375, |
|
"learning_rate": 1.7328969800494726e-05, |
|
"loss": 0.7815, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 1.7065526994065973e-05, |
|
"loss": 0.7838, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 3.368888888888889, |
|
"grad_norm": 1.7265625, |
|
"learning_rate": 1.680368936738792e-05, |
|
"loss": 0.7368, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 3.3777777777777778, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 0.8079, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.3866666666666667, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.6284880620813848e-05, |
|
"loss": 0.9025, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 3.3955555555555557, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.602793478692419e-05, |
|
"loss": 0.8089, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 3.4044444444444446, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.5772644703565565e-05, |
|
"loss": 0.6786, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 3.413333333333333, |
|
"grad_norm": 1.5, |
|
"learning_rate": 1.551902281321651e-05, |
|
"loss": 0.6152, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 3.422222222222222, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 0.5888, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.431111111111111, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 1.5016832974331724e-05, |
|
"loss": 0.8046, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 1.4768289501820265e-05, |
|
"loss": 0.7949, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 3.448888888888889, |
|
"grad_norm": 1.5078125, |
|
"learning_rate": 1.4521463173173965e-05, |
|
"loss": 0.6952, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 3.457777777777778, |
|
"grad_norm": 1.4609375, |
|
"learning_rate": 1.4276366018359844e-05, |
|
"loss": 0.7142, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 3.466666666666667, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 0.8104, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.4755555555555557, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 1.3791406928126638e-05, |
|
"loss": 0.8418, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 3.4844444444444447, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 1.3551568628929434e-05, |
|
"loss": 0.6546, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 3.493333333333333, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 1.3313506774856177e-05, |
|
"loss": 0.8296, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 3.502222222222222, |
|
"grad_norm": 1.3046875, |
|
"learning_rate": 1.3077232968705805e-05, |
|
"loss": 0.7203, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 3.511111111111111, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 0.6301, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 1.578125, |
|
"learning_rate": 1.2610095475073414e-05, |
|
"loss": 0.7751, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 3.528888888888889, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 1.2379254555213788e-05, |
|
"loss": 0.7448, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 3.537777777777778, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 1.2150247217412186e-05, |
|
"loss": 0.6768, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 3.546666666666667, |
|
"grad_norm": 1.4765625, |
|
"learning_rate": 1.1923084623163172e-05, |
|
"loss": 0.7193, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 3.5555555555555554, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 0.6328, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.5644444444444443, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.1474337861210543e-05, |
|
"loss": 0.727, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 3.5733333333333333, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 1.1252775564791024e-05, |
|
"loss": 0.7467, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 3.582222222222222, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 1.1033101753426283e-05, |
|
"loss": 0.7672, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 3.591111111111111, |
|
"grad_norm": 1.84375, |
|
"learning_rate": 1.0815327133708015e-05, |
|
"loss": 0.7215, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 0.6078, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.608888888888889, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 1.0385517832240471e-05, |
|
"loss": 0.7894, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 3.6177777777777775, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.0173504098790187e-05, |
|
"loss": 0.7391, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 3.626666666666667, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 9.963431452563332e-06, |
|
"loss": 0.6962, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 3.6355555555555554, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 9.755310132204298e-06, |
|
"loss": 0.8193, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 3.6444444444444444, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.7433, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.6533333333333333, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 9.344961947648623e-06, |
|
"loss": 0.5545, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 3.6622222222222223, |
|
"grad_norm": 2.546875, |
|
"learning_rate": 9.142755083243576e-06, |
|
"loss": 0.938, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 3.671111111111111, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 8.9425395433148e-06, |
|
"loss": 0.8629, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 8.744325086085248e-06, |
|
"loss": 0.8981, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 3.688888888888889, |
|
"grad_norm": 2.0, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 0.7164, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.6977777777777776, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 8.353937964495029e-06, |
|
"loss": 0.7385, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 3.7066666666666666, |
|
"grad_norm": 1.921875, |
|
"learning_rate": 8.16178432705192e-06, |
|
"loss": 0.6595, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 3.7155555555555555, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 7.971669825215788e-06, |
|
"loss": 0.7062, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 3.7244444444444444, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 7.783603724899257e-06, |
|
"loss": 0.8843, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 3.7333333333333334, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 0.6351, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.7422222222222223, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 7.413653292847617e-06, |
|
"loss": 0.7729, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 3.7511111111111113, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 7.2317869919746705e-06, |
|
"loss": 0.8741, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 7.052005153466779e-06, |
|
"loss": 0.7252, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 3.7688888888888887, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 6.874316539637127e-06, |
|
"loss": 0.6773, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 3.7777777777777777, |
|
"grad_norm": 2.28125, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 0.6376, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.7866666666666666, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 6.52525352473905e-06, |
|
"loss": 0.7645, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 3.7955555555555556, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 6.353896136509524e-06, |
|
"loss": 0.797, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 3.8044444444444445, |
|
"grad_norm": 1.6796875, |
|
"learning_rate": 6.184665997806832e-06, |
|
"loss": 0.7175, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 3.8133333333333335, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 6.017571356669183e-06, |
|
"loss": 0.8561, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 3.822222222222222, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 0.5715, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.8311111111111114, |
|
"grad_norm": 1.6875, |
|
"learning_rate": 5.689821038439263e-06, |
|
"loss": 0.68, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"grad_norm": 2.125, |
|
"learning_rate": 5.529181335435124e-06, |
|
"loss": 0.7089, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 3.848888888888889, |
|
"grad_norm": 1.734375, |
|
"learning_rate": 5.370709077393721e-06, |
|
"loss": 0.7589, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 3.8577777777777778, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 5.214411988029355e-06, |
|
"loss": 0.7617, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 3.8666666666666667, |
|
"grad_norm": 1.78125, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 0.9029, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.8755555555555556, |
|
"grad_norm": 1.953125, |
|
"learning_rate": 4.908373679744316e-06, |
|
"loss": 0.6779, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 3.8844444444444446, |
|
"grad_norm": 2.0, |
|
"learning_rate": 4.758647376699032e-06, |
|
"loss": 0.8606, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 3.8933333333333335, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 4.611126073354571e-06, |
|
"loss": 0.7648, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 3.902222222222222, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 4.465816959691149e-06, |
|
"loss": 0.6418, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 3.911111111111111, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 0.7436, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 4.181863521888019e-06, |
|
"loss": 0.7485, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 3.928888888888889, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 4.043233037238281e-06, |
|
"loss": 0.7783, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 3.937777777777778, |
|
"grad_norm": 1.6640625, |
|
"learning_rate": 3.90684242057498e-06, |
|
"loss": 0.5753, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 3.9466666666666668, |
|
"grad_norm": 1.7109375, |
|
"learning_rate": 3.772698319384349e-06, |
|
"loss": 0.6734, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 3.9555555555555557, |
|
"grad_norm": 1.6953125, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 0.7748, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 3.964444444444444, |
|
"grad_norm": 1.765625, |
|
"learning_rate": 3.511175705587433e-06, |
|
"loss": 0.7126, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 3.9733333333333336, |
|
"grad_norm": 1.875, |
|
"learning_rate": 3.3838099392243916e-06, |
|
"loss": 0.7099, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 3.982222222222222, |
|
"grad_norm": 2.296875, |
|
"learning_rate": 3.258716180199278e-06, |
|
"loss": 0.9139, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 3.991111111111111, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.1359005254054273e-06, |
|
"loss": 0.7272, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"grad_norm": 1.875, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 0.8561, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.0088888888888885, |
|
"grad_norm": 1.8515625, |
|
"learning_rate": 2.8971273606351658e-06, |
|
"loss": 0.7015, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 4.017777777777778, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 2.7811814881259503e-06, |
|
"loss": 0.6025, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 4.026666666666666, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.667536994215186e-06, |
|
"loss": 0.7018, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 4.035555555555556, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.5561994177751737e-06, |
|
"loss": 0.6452, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 4.044444444444444, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.573, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.053333333333334, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 2.340466610352654e-06, |
|
"loss": 0.6177, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 4.062222222222222, |
|
"grad_norm": 1.7890625, |
|
"learning_rate": 2.2360818938828187e-06, |
|
"loss": 0.6479, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 4.071111111111111, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 2.134025123396638e-06, |
|
"loss": 0.6464, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"grad_norm": 2.140625, |
|
"learning_rate": 2.0343012729971243e-06, |
|
"loss": 0.6545, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 4.088888888888889, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 0.6401, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.097777777777778, |
|
"grad_norm": 1.75, |
|
"learning_rate": 1.841871660117095e-06, |
|
"loss": 0.5654, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 4.1066666666666665, |
|
"grad_norm": 1.859375, |
|
"learning_rate": 1.7491752763844293e-06, |
|
"loss": 0.6355, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 4.115555555555556, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.6588305697770313e-06, |
|
"loss": 0.8252, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 4.124444444444444, |
|
"grad_norm": 2.078125, |
|
"learning_rate": 1.5708419435684462e-06, |
|
"loss": 0.573, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 4.133333333333334, |
|
"grad_norm": 2.8125, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 0.6218, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.142222222222222, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.4019499710726913e-06, |
|
"loss": 0.5241, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 4.151111111111111, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 1.3210548563419856e-06, |
|
"loss": 0.6395, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 1.2425322847218368e-06, |
|
"loss": 0.7088, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 4.168888888888889, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 1.166386083291604e-06, |
|
"loss": 0.7894, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 4.177777777777778, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 0.6565, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.1866666666666665, |
|
"grad_norm": 1.8046875, |
|
"learning_rate": 1.0212375200327973e-06, |
|
"loss": 0.5306, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 4.195555555555556, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 9.522422325404235e-07, |
|
"loss": 0.6762, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 4.204444444444444, |
|
"grad_norm": 1.6171875, |
|
"learning_rate": 8.856374635655695e-07, |
|
"loss": 0.5428, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 4.213333333333333, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 8.214264593307098e-07, |
|
"loss": 0.6506, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 4.222222222222222, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 0.5546, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.231111111111111, |
|
"grad_norm": 1.7578125, |
|
"learning_rate": 7.001981464747565e-07, |
|
"loss": 0.6947, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 6.431867463506048e-07, |
|
"loss": 0.8005, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 4.248888888888889, |
|
"grad_norm": 2.03125, |
|
"learning_rate": 5.885809276723608e-07, |
|
"loss": 0.7249, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 4.257777777777778, |
|
"grad_norm": 2.171875, |
|
"learning_rate": 5.363833518505834e-07, |
|
"loss": 0.7854, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 4.266666666666667, |
|
"grad_norm": 2.484375, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 0.762, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.275555555555556, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 4.392229874229159e-07, |
|
"loss": 0.7091, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 4.2844444444444445, |
|
"grad_norm": 1.8671875, |
|
"learning_rate": 3.9426493427611177e-07, |
|
"loss": 0.6231, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 4.293333333333333, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 3.517245946731529e-07, |
|
"loss": 0.7348, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 4.302222222222222, |
|
"grad_norm": 2.09375, |
|
"learning_rate": 3.1160404197018154e-07, |
|
"loss": 0.531, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 4.311111111111111, |
|
"grad_norm": 2.015625, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 0.5834, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"grad_norm": 1.875, |
|
"learning_rate": 2.386300009084408e-07, |
|
"loss": 0.5736, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 4.328888888888889, |
|
"grad_norm": 1.8828125, |
|
"learning_rate": 2.057800692014833e-07, |
|
"loss": 0.4758, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 4.337777777777778, |
|
"grad_norm": 1.890625, |
|
"learning_rate": 1.753570375247815e-07, |
|
"loss": 0.4747, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 4.346666666666667, |
|
"grad_norm": 1.8203125, |
|
"learning_rate": 1.4736238865398765e-07, |
|
"loss": 0.6023, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 4.355555555555555, |
|
"grad_norm": 2.40625, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 0.6376, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.364444444444445, |
|
"grad_norm": 2.1875, |
|
"learning_rate": 9.866357858642205e-08, |
|
"loss": 0.5588, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 4.373333333333333, |
|
"grad_norm": 2.0, |
|
"learning_rate": 7.796179090094891e-08, |
|
"loss": 0.6635, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 4.3822222222222225, |
|
"grad_norm": 2.109375, |
|
"learning_rate": 5.969313292830125e-08, |
|
"loss": 0.5795, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 4.391111111111111, |
|
"grad_norm": 1.7734375, |
|
"learning_rate": 4.385849505708084e-08, |
|
"loss": 0.5087, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 0.5711, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.408888888888889, |
|
"grad_norm": 2.46875, |
|
"learning_rate": 1.949424798228239e-08, |
|
"loss": 0.6284, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 4.417777777777777, |
|
"grad_norm": 2.5, |
|
"learning_rate": 1.096582625772502e-08, |
|
"loss": 0.7002, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 4.426666666666667, |
|
"grad_norm": 2.046875, |
|
"learning_rate": 4.873799534788059e-09, |
|
"loss": 0.5831, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 4.435555555555555, |
|
"grad_norm": 1.9609375, |
|
"learning_rate": 1.2184647302626583e-09, |
|
"loss": 0.5923, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 0.0, |
|
"loss": 0.6272, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"eval_loss": 0.996656596660614, |
|
"eval_runtime": 10.8023, |
|
"eval_samples_per_second": 9.257, |
|
"eval_steps_per_second": 9.257, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.444444444444445, |
|
"step": 500, |
|
"total_flos": 2.953325949045965e+16, |
|
"train_loss": 0.8754597247838974, |
|
"train_runtime": 1354.7094, |
|
"train_samples_per_second": 2.953, |
|
"train_steps_per_second": 0.369 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 5, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.953325949045965e+16, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|