|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.7575757575757576, |
|
"eval_steps": 38, |
|
"global_step": 225, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003367003367003367, |
|
"grad_norm": 2.0, |
|
"learning_rate": 2.2727272727272729e-07, |
|
"loss": 1.0833, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003367003367003367, |
|
"eval_loss": 1.032954454421997, |
|
"eval_runtime": 8.7985, |
|
"eval_samples_per_second": 56.828, |
|
"eval_steps_per_second": 3.637, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006734006734006734, |
|
"grad_norm": 2.265625, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 0.996, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010101010101010102, |
|
"grad_norm": 1.640625, |
|
"learning_rate": 6.818181818181818e-07, |
|
"loss": 1.0489, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.013468013468013467, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 1.0546, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.016835016835016835, |
|
"grad_norm": 2.453125, |
|
"learning_rate": 1.1363636363636364e-06, |
|
"loss": 1.0341, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.020202020202020204, |
|
"grad_norm": 1.9921875, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 1.0136, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02356902356902357, |
|
"grad_norm": 2.640625, |
|
"learning_rate": 1.590909090909091e-06, |
|
"loss": 1.0305, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.026936026936026935, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 1.0226, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030303030303030304, |
|
"grad_norm": 2.3125, |
|
"learning_rate": 2.0454545454545457e-06, |
|
"loss": 1.0745, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03367003367003367, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 1.0232, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 2.53125, |
|
"learning_rate": 2.5e-06, |
|
"loss": 1.0106, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.04040404040404041, |
|
"grad_norm": 2.125, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 1.0541, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.04377104377104377, |
|
"grad_norm": 2.15625, |
|
"learning_rate": 2.954545454545455e-06, |
|
"loss": 1.0693, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04713804713804714, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 1.0338, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.050505050505050504, |
|
"grad_norm": 2.0, |
|
"learning_rate": 3.409090909090909e-06, |
|
"loss": 1.0464, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.05387205387205387, |
|
"grad_norm": 2.34375, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.0684, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.05723905723905724, |
|
"grad_norm": 1.8125, |
|
"learning_rate": 3.863636363636364e-06, |
|
"loss": 1.0117, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06060606060606061, |
|
"grad_norm": 1.90625, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.0138, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06397306397306397, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.3181818181818185e-06, |
|
"loss": 0.9873, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06734006734006734, |
|
"grad_norm": 1.59375, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.0524, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0707070707070707, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 4.772727272727273e-06, |
|
"loss": 1.0479, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 5e-06, |
|
"loss": 0.9894, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07744107744107744, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 5.2272727272727274e-06, |
|
"loss": 0.983, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.08080808080808081, |
|
"grad_norm": 1.6328125, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.0563, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08417508417508418, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 5.681818181818183e-06, |
|
"loss": 0.9966, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08754208754208755, |
|
"grad_norm": 1.25, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 0.9922, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 1.3984375, |
|
"learning_rate": 6.136363636363637e-06, |
|
"loss": 1.0677, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09427609427609428, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.0325, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09764309764309764, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 6.590909090909091e-06, |
|
"loss": 1.0159, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10101010101010101, |
|
"grad_norm": 1.3125, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.0384, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10437710437710437, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 7.045454545454546e-06, |
|
"loss": 0.9949, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.10774410774410774, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.0284, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.1111111111111111, |
|
"grad_norm": 1.15625, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 1.0245, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11447811447811448, |
|
"grad_norm": 1.1875, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 1.0103, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.11784511784511785, |
|
"grad_norm": 1.125, |
|
"learning_rate": 7.954545454545455e-06, |
|
"loss": 1.0129, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.12121212121212122, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.965, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.12457912457912458, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 8.40909090909091e-06, |
|
"loss": 1.0027, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.12794612794612795, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 1.0093, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.12794612794612795, |
|
"eval_loss": 0.9909602999687195, |
|
"eval_runtime": 8.9064, |
|
"eval_samples_per_second": 56.139, |
|
"eval_steps_per_second": 3.593, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13131313131313133, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 8.863636363636365e-06, |
|
"loss": 0.98, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.13468013468013468, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13804713804713806, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 9.318181818181819e-06, |
|
"loss": 0.9658, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1414141414141414, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 0.9656, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1447811447811448, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.772727272727273e-06, |
|
"loss": 0.9528, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.14814814814814814, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 1e-05, |
|
"loss": 0.9546, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15151515151515152, |
|
"grad_norm": 0.984375, |
|
"learning_rate": 9.999965606811854e-06, |
|
"loss": 0.9406, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15488215488215487, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.99986242772057e-06, |
|
"loss": 0.9975, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.15824915824915825, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.99969046414561e-06, |
|
"loss": 0.9567, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.16161616161616163, |
|
"grad_norm": 0.96484375, |
|
"learning_rate": 9.999449718452724e-06, |
|
"loss": 0.9707, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.16498316498316498, |
|
"grad_norm": 1.0, |
|
"learning_rate": 9.999140193953923e-06, |
|
"loss": 0.9559, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.16835016835016836, |
|
"grad_norm": 0.99609375, |
|
"learning_rate": 9.998761894907414e-06, |
|
"loss": 0.9398, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1717171717171717, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 9.998314826517564e-06, |
|
"loss": 0.9329, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1750841750841751, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 9.997798994934812e-06, |
|
"loss": 0.9478, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.17845117845117844, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 9.997214407255602e-06, |
|
"loss": 0.9062, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 9.996561071522264e-06, |
|
"loss": 0.9089, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18518518518518517, |
|
"grad_norm": 1.0078125, |
|
"learning_rate": 9.995838996722916e-06, |
|
"loss": 0.9561, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.18855218855218855, |
|
"grad_norm": 0.796875, |
|
"learning_rate": 9.995048192791341e-06, |
|
"loss": 0.8948, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1919191919191919, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.994188670606845e-06, |
|
"loss": 0.9095, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19528619528619529, |
|
"grad_norm": 0.8515625, |
|
"learning_rate": 9.993260441994116e-06, |
|
"loss": 0.9137, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.19865319865319866, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.992263519723046e-06, |
|
"loss": 0.9012, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.20202020202020202, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 9.99119791750857e-06, |
|
"loss": 0.9074, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2053872053872054, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 9.990063650010473e-06, |
|
"loss": 0.9106, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.20875420875420875, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.988860732833183e-06, |
|
"loss": 0.9012, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21212121212121213, |
|
"grad_norm": 0.93359375, |
|
"learning_rate": 9.987589182525562e-06, |
|
"loss": 0.8928, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.21548821548821548, |
|
"grad_norm": 0.921875, |
|
"learning_rate": 9.98624901658068e-06, |
|
"loss": 0.9487, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.21885521885521886, |
|
"grad_norm": 0.92578125, |
|
"learning_rate": 9.984840253435569e-06, |
|
"loss": 0.8887, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.2222222222222222, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.983362912470967e-06, |
|
"loss": 0.9152, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.2255892255892256, |
|
"grad_norm": 0.83984375, |
|
"learning_rate": 9.981817014011066e-06, |
|
"loss": 0.8849, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.22895622895622897, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.980202579323212e-06, |
|
"loss": 0.9386, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.23232323232323232, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 9.978519630617632e-06, |
|
"loss": 0.8651, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2356902356902357, |
|
"grad_norm": 0.7109375, |
|
"learning_rate": 9.976768191047109e-06, |
|
"loss": 0.9044, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.23905723905723905, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.974948284706684e-06, |
|
"loss": 0.8702, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.24242424242424243, |
|
"grad_norm": 0.91796875, |
|
"learning_rate": 9.973059936633308e-06, |
|
"loss": 0.8748, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.24579124579124578, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.971103172805504e-06, |
|
"loss": 0.8443, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.24915824915824916, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 9.969078020143013e-06, |
|
"loss": 0.8605, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25252525252525254, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 9.966984506506415e-06, |
|
"loss": 0.8661, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2558922558922559, |
|
"grad_norm": 0.953125, |
|
"learning_rate": 9.964822660696753e-06, |
|
"loss": 0.9169, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2558922558922559, |
|
"eval_loss": 0.8667835593223572, |
|
"eval_runtime": 8.6819, |
|
"eval_samples_per_second": 57.591, |
|
"eval_steps_per_second": 3.686, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.25925925925925924, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.96259251245514e-06, |
|
"loss": 0.8719, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26262626262626265, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.960294092462332e-06, |
|
"loss": 0.8438, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.265993265993266, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.957927432338332e-06, |
|
"loss": 0.8723, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.26936026936026936, |
|
"grad_norm": 1.3828125, |
|
"learning_rate": 9.95549256464193e-06, |
|
"loss": 0.8987, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2727272727272727, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 9.952989522870275e-06, |
|
"loss": 0.8188, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.2760942760942761, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 9.950418341458398e-06, |
|
"loss": 0.8521, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.27946127946127947, |
|
"grad_norm": 0.828125, |
|
"learning_rate": 9.947779055778752e-06, |
|
"loss": 0.8305, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2828282828282828, |
|
"grad_norm": 0.9453125, |
|
"learning_rate": 9.945071702140716e-06, |
|
"loss": 0.856, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.28619528619528617, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.9422963177901e-06, |
|
"loss": 0.8512, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2895622895622896, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 9.939452940908627e-06, |
|
"loss": 0.818, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29292929292929293, |
|
"grad_norm": 0.640625, |
|
"learning_rate": 9.936541610613417e-06, |
|
"loss": 0.8071, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2962962962962963, |
|
"grad_norm": 0.82421875, |
|
"learning_rate": 9.933562366956445e-06, |
|
"loss": 0.8559, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2996632996632997, |
|
"grad_norm": 1.046875, |
|
"learning_rate": 9.930515250923985e-06, |
|
"loss": 0.8646, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30303030303030304, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.927400304436048e-06, |
|
"loss": 0.8179, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3063973063973064, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 9.924217570345814e-06, |
|
"loss": 0.8093, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.30976430976430974, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.920967092439028e-06, |
|
"loss": 0.8073, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.31313131313131315, |
|
"grad_norm": 0.84375, |
|
"learning_rate": 9.917648915433413e-06, |
|
"loss": 0.9031, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3164983164983165, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.914263084978041e-06, |
|
"loss": 0.8568, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.31986531986531985, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 9.910809647652716e-06, |
|
"loss": 0.7819, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.32323232323232326, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 9.907288650967324e-06, |
|
"loss": 0.8236, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3265993265993266, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 9.903700143361185e-06, |
|
"loss": 0.8418, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.32996632996632996, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.900044174202389e-06, |
|
"loss": 0.8313, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 9.896320793787106e-06, |
|
"loss": 0.8158, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3367003367003367, |
|
"grad_norm": 0.76171875, |
|
"learning_rate": 9.892530053338909e-06, |
|
"loss": 0.7956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.3400673400673401, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 9.888672005008054e-06, |
|
"loss": 0.8154, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3434343434343434, |
|
"grad_norm": 0.6328125, |
|
"learning_rate": 9.884746701870778e-06, |
|
"loss": 0.8322, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3468013468013468, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 9.880754197928553e-06, |
|
"loss": 0.8133, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3501683501683502, |
|
"grad_norm": 0.76953125, |
|
"learning_rate": 9.876694548107358e-06, |
|
"loss": 0.7882, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.35353535353535354, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 9.87256780825691e-06, |
|
"loss": 0.8264, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3569023569023569, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 9.868374035149906e-06, |
|
"loss": 0.7894, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3602693602693603, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.864113286481237e-06, |
|
"loss": 0.7807, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 9.859785620867197e-06, |
|
"loss": 0.7895, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.367003367003367, |
|
"grad_norm": 1.0625, |
|
"learning_rate": 9.855391097844671e-06, |
|
"loss": 0.7936, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37037037037037035, |
|
"grad_norm": 0.8984375, |
|
"learning_rate": 9.850929777870324e-06, |
|
"loss": 0.8024, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.37373737373737376, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 9.846401722319759e-06, |
|
"loss": 0.7542, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3771043771043771, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 9.841806993486686e-06, |
|
"loss": 0.7689, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.38047138047138046, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 9.837145654582055e-06, |
|
"loss": 0.7133, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 9.832417769733185e-06, |
|
"loss": 0.795, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3838383838383838, |
|
"eval_loss": 0.7676474452018738, |
|
"eval_runtime": 8.7789, |
|
"eval_samples_per_second": 56.955, |
|
"eval_steps_per_second": 3.645, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.3872053872053872, |
|
"grad_norm": 0.86328125, |
|
"learning_rate": 9.827623403982893e-06, |
|
"loss": 0.7992, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.39057239057239057, |
|
"grad_norm": 0.7421875, |
|
"learning_rate": 9.822762623288584e-06, |
|
"loss": 0.7678, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3939393939393939, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 9.81783549452136e-06, |
|
"loss": 0.7861, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.39730639730639733, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 9.812842085465086e-06, |
|
"loss": 0.7447, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.4006734006734007, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 9.807782464815463e-06, |
|
"loss": 0.7218, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.40404040404040403, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.802656702179088e-06, |
|
"loss": 0.7637, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.4074074074074074, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.797464868072489e-06, |
|
"loss": 0.7588, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.4107744107744108, |
|
"grad_norm": 0.6171875, |
|
"learning_rate": 9.792207033921152e-06, |
|
"loss": 0.7539, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.41414141414141414, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 9.786883272058554e-06, |
|
"loss": 0.7524, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.4175084175084175, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 9.781493655725149e-06, |
|
"loss": 0.731, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4208754208754209, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 9.776038259067375e-06, |
|
"loss": 0.8022, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.42424242424242425, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.770517157136625e-06, |
|
"loss": 0.7528, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.4276094276094276, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.764930425888216e-06, |
|
"loss": 0.7037, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.43097643097643096, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.759278142180348e-06, |
|
"loss": 0.7519, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.43434343434343436, |
|
"grad_norm": 0.61328125, |
|
"learning_rate": 9.753560383773046e-06, |
|
"loss": 0.8163, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4377104377104377, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.747777229327085e-06, |
|
"loss": 0.7292, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44107744107744107, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 9.741928758402912e-06, |
|
"loss": 0.7351, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4444444444444444, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.736015051459551e-06, |
|
"loss": 0.7429, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4478114478114478, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 9.730036189853497e-06, |
|
"loss": 0.7616, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.4511784511784512, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.723992255837596e-06, |
|
"loss": 0.7263, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.45454545454545453, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.717883332559911e-06, |
|
"loss": 0.7427, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.45791245791245794, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 9.71170950406258e-06, |
|
"loss": 0.7261, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4612794612794613, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.705470855280661e-06, |
|
"loss": 0.754, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.46464646464646464, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 9.699167472040965e-06, |
|
"loss": 0.7554, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.468013468013468, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 9.692799441060868e-06, |
|
"loss": 0.7488, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.4713804713804714, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.686366849947126e-06, |
|
"loss": 0.7066, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.47474747474747475, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 9.679869787194664e-06, |
|
"loss": 0.7818, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4781144781144781, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 9.673308342185366e-06, |
|
"loss": 0.7289, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.48148148148148145, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 9.666682605186834e-06, |
|
"loss": 0.7301, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.48484848484848486, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.659992667351157e-06, |
|
"loss": 0.7673, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4882154882154882, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.653238620713652e-06, |
|
"loss": 0.6884, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.49158249158249157, |
|
"grad_norm": 0.6484375, |
|
"learning_rate": 9.646420558191596e-06, |
|
"loss": 0.7043, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.494949494949495, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.639538573582952e-06, |
|
"loss": 0.7122, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4983164983164983, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 9.632592761565078e-06, |
|
"loss": 0.7232, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5016835016835017, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 9.625583217693419e-06, |
|
"loss": 0.7311, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5050505050505051, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 9.618510038400203e-06, |
|
"loss": 0.7322, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5084175084175084, |
|
"grad_norm": 0.75390625, |
|
"learning_rate": 9.611373320993104e-06, |
|
"loss": 0.6983, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.5117845117845118, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.604173163653906e-06, |
|
"loss": 0.6999, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5117845117845118, |
|
"eval_loss": 0.7242563366889954, |
|
"eval_runtime": 8.7081, |
|
"eval_samples_per_second": 57.418, |
|
"eval_steps_per_second": 3.675, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.5151515151515151, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 9.596909665437155e-06, |
|
"loss": 0.7469, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5185185185185185, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.589582926268798e-06, |
|
"loss": 0.7176, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.5218855218855218, |
|
"grad_norm": 0.4921875, |
|
"learning_rate": 9.582193046944799e-06, |
|
"loss": 0.7455, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.5252525252525253, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 9.574740129129767e-06, |
|
"loss": 0.7204, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5286195286195287, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.567224275355538e-06, |
|
"loss": 0.7321, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.531986531986532, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.559645589019787e-06, |
|
"loss": 0.7455, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.5353535353535354, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 9.552004174384582e-06, |
|
"loss": 0.7464, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5387205387205387, |
|
"grad_norm": 0.5234375, |
|
"learning_rate": 9.544300136574973e-06, |
|
"loss": 0.7138, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.5420875420875421, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.536533581577526e-06, |
|
"loss": 0.7321, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.5454545454545454, |
|
"grad_norm": 0.57421875, |
|
"learning_rate": 9.528704616238875e-06, |
|
"loss": 0.7205, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5488215488215489, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.520813348264252e-06, |
|
"loss": 0.7107, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.5521885521885522, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.512859886216003e-06, |
|
"loss": 0.6975, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.5555555555555556, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 9.504844339512096e-06, |
|
"loss": 0.7209, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5589225589225589, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.496766818424612e-06, |
|
"loss": 0.6932, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5622895622895623, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 9.488627434078232e-06, |
|
"loss": 0.6676, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5656565656565656, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 9.480426298448706e-06, |
|
"loss": 0.6909, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.569023569023569, |
|
"grad_norm": 0.4609375, |
|
"learning_rate": 9.472163524361317e-06, |
|
"loss": 0.7142, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5723905723905723, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 9.46383922548932e-06, |
|
"loss": 0.7168, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5757575757575758, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.455453516352385e-06, |
|
"loss": 0.6618, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5791245791245792, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 9.447006512315025e-06, |
|
"loss": 0.694, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5824915824915825, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 9.438498329584995e-06, |
|
"loss": 0.6858, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5858585858585859, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.429929085211711e-06, |
|
"loss": 0.7419, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5892255892255892, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 9.421298897084622e-06, |
|
"loss": 0.6734, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5925925925925926, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 9.412607883931608e-06, |
|
"loss": 0.7188, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5959595959595959, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.403856165317322e-06, |
|
"loss": 0.6732, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5993265993265994, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 9.39504386164157e-06, |
|
"loss": 0.6594, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.6026936026936027, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 9.38617109413764e-06, |
|
"loss": 0.7153, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.6060606060606061, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 9.377237984870636e-06, |
|
"loss": 0.6762, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6094276094276094, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 9.3682446567358e-06, |
|
"loss": 0.6839, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.6127946127946128, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.359191233456821e-06, |
|
"loss": 0.6949, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.6161616161616161, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.350077839584139e-06, |
|
"loss": 0.7045, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6195286195286195, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 9.34090460049322e-06, |
|
"loss": 0.668, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.622895622895623, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.33167164238284e-06, |
|
"loss": 0.7144, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.6262626262626263, |
|
"grad_norm": 0.44921875, |
|
"learning_rate": 9.322379092273345e-06, |
|
"loss": 0.7084, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6296296296296297, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.313027078004903e-06, |
|
"loss": 0.7555, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.632996632996633, |
|
"grad_norm": 0.44140625, |
|
"learning_rate": 9.303615728235753e-06, |
|
"loss": 0.6963, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.6363636363636364, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.29414517244042e-06, |
|
"loss": 0.6785, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.6397306397306397, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 9.284615540907947e-06, |
|
"loss": 0.7246, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6397306397306397, |
|
"eval_loss": 0.6989265084266663, |
|
"eval_runtime": 8.6934, |
|
"eval_samples_per_second": 57.515, |
|
"eval_steps_per_second": 3.681, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.6430976430976431, |
|
"grad_norm": 0.54296875, |
|
"learning_rate": 9.275026964740101e-06, |
|
"loss": 0.6995, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.6464646464646465, |
|
"grad_norm": 0.427734375, |
|
"learning_rate": 9.265379575849561e-06, |
|
"loss": 0.6988, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6498316498316499, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 9.255673506958114e-06, |
|
"loss": 0.6569, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.6531986531986532, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 9.245908891594818e-06, |
|
"loss": 0.6645, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.6565656565656566, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.236085864094178e-06, |
|
"loss": 0.6745, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6599326599326599, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 9.226204559594284e-06, |
|
"loss": 0.7157, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.6632996632996633, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 9.216265114034964e-06, |
|
"loss": 0.7298, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.451171875, |
|
"learning_rate": 9.206267664155906e-06, |
|
"loss": 0.6439, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.67003367003367, |
|
"grad_norm": 0.447265625, |
|
"learning_rate": 9.19621234749478e-06, |
|
"loss": 0.6719, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6734006734006734, |
|
"grad_norm": 0.41796875, |
|
"learning_rate": 9.186099302385345e-06, |
|
"loss": 0.6496, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6767676767676768, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.175928667955547e-06, |
|
"loss": 0.709, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6801346801346801, |
|
"grad_norm": 0.50390625, |
|
"learning_rate": 9.1657005841256e-06, |
|
"loss": 0.7054, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6835016835016835, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.155415191606074e-06, |
|
"loss": 0.6941, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6868686868686869, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.145072631895942e-06, |
|
"loss": 0.6919, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.6902356902356902, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 9.134673047280644e-06, |
|
"loss": 0.7098, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6936026936026936, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 9.12421658083013e-06, |
|
"loss": 0.6902, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.696969696969697, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 9.113703376396885e-06, |
|
"loss": 0.7228, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7003367003367004, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 9.103133578613959e-06, |
|
"loss": 0.6961, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.7037037037037037, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 9.092507332892968e-06, |
|
"loss": 0.6689, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.7070707070707071, |
|
"grad_norm": 0.5390625, |
|
"learning_rate": 9.081824785422099e-06, |
|
"loss": 0.6552, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7104377104377104, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 9.071086083164099e-06, |
|
"loss": 0.6739, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.7138047138047138, |
|
"grad_norm": 0.435546875, |
|
"learning_rate": 9.060291373854252e-06, |
|
"loss": 0.699, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.7171717171717171, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 9.04944080599834e-06, |
|
"loss": 0.6964, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7205387205387206, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 9.038534528870618e-06, |
|
"loss": 0.678, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.7239057239057239, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 9.027572692511739e-06, |
|
"loss": 0.6847, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 9.016555447726704e-06, |
|
"loss": 0.6811, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7306397306397306, |
|
"grad_norm": 0.482421875, |
|
"learning_rate": 9.005482946082784e-06, |
|
"loss": 0.6831, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.734006734006734, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.994355339907429e-06, |
|
"loss": 0.6803, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.7373737373737373, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 8.98317278228618e-06, |
|
"loss": 0.6639, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7407407407407407, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.971935427060563e-06, |
|
"loss": 0.6509, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.7441077441077442, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 8.960643428825965e-06, |
|
"loss": 0.647, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.7474747474747475, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 8.949296942929515e-06, |
|
"loss": 0.6686, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7508417508417509, |
|
"grad_norm": 0.5078125, |
|
"learning_rate": 8.937896125467941e-06, |
|
"loss": 0.6512, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.7542087542087542, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 8.92644113328543e-06, |
|
"loss": 0.6544, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.7575757575757576, |
|
"grad_norm": 0.474609375, |
|
"learning_rate": 8.91493212397146e-06, |
|
"loss": 0.7091, |
|
"step": 225 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 891, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 75, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.09313017331712e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|