|
{ |
|
"best_metric": 0.8594623804092407, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.5145797598627787, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003430531732418525, |
|
"grad_norm": 11.61568832397461, |
|
"learning_rate": 5e-06, |
|
"loss": 5.0126, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.003430531732418525, |
|
"eval_loss": 1.4074695110321045, |
|
"eval_runtime": 40.4473, |
|
"eval_samples_per_second": 12.139, |
|
"eval_steps_per_second": 6.082, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00686106346483705, |
|
"grad_norm": 13.636929512023926, |
|
"learning_rate": 1e-05, |
|
"loss": 4.982, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.010291595197255575, |
|
"grad_norm": 14.98882007598877, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.99, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0137221269296741, |
|
"grad_norm": 10.213421821594238, |
|
"learning_rate": 2e-05, |
|
"loss": 4.9476, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.017152658662092625, |
|
"grad_norm": 8.086559295654297, |
|
"learning_rate": 2.5e-05, |
|
"loss": 4.9681, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02058319039451115, |
|
"grad_norm": 9.853785514831543, |
|
"learning_rate": 3e-05, |
|
"loss": 4.7214, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.024013722126929673, |
|
"grad_norm": 8.948219299316406, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.8078, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0274442538593482, |
|
"grad_norm": 7.905782699584961, |
|
"learning_rate": 4e-05, |
|
"loss": 4.7025, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.030874785591766724, |
|
"grad_norm": 7.111548900604248, |
|
"learning_rate": 4.5e-05, |
|
"loss": 4.6991, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03430531732418525, |
|
"grad_norm": 7.046090602874756, |
|
"learning_rate": 5e-05, |
|
"loss": 4.5764, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03773584905660377, |
|
"grad_norm": 6.978516101837158, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.5503, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0411663807890223, |
|
"grad_norm": 6.4846320152282715, |
|
"learning_rate": 6e-05, |
|
"loss": 4.2789, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.044596912521440824, |
|
"grad_norm": 6.363194942474365, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.2641, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.048027444253859346, |
|
"grad_norm": 7.184693336486816, |
|
"learning_rate": 7e-05, |
|
"loss": 4.4203, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.051457975986277875, |
|
"grad_norm": 6.728307723999023, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.5207, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0548885077186964, |
|
"grad_norm": 5.945248126983643, |
|
"learning_rate": 8e-05, |
|
"loss": 4.2506, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.058319039451114926, |
|
"grad_norm": 6.138426303863525, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.3399, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.06174957118353345, |
|
"grad_norm": 6.568054676055908, |
|
"learning_rate": 9e-05, |
|
"loss": 4.3236, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.06518010291595197, |
|
"grad_norm": 5.957953453063965, |
|
"learning_rate": 9.5e-05, |
|
"loss": 4.2521, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0686106346483705, |
|
"grad_norm": 6.30268669128418, |
|
"learning_rate": 0.0001, |
|
"loss": 4.3019, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07204116638078903, |
|
"grad_norm": 5.6199493408203125, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 3.9036, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.07547169811320754, |
|
"grad_norm": 6.134159564971924, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 4.1892, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.07890222984562607, |
|
"grad_norm": 5.793374538421631, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 4.14, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0823327615780446, |
|
"grad_norm": 6.174600124359131, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 3.9926, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.08576329331046312, |
|
"grad_norm": 6.1931023597717285, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 4.3605, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.08919382504288165, |
|
"grad_norm": 5.781077861785889, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 4.0821, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.09262435677530018, |
|
"grad_norm": 5.663956165313721, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 3.8834, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.09605488850771869, |
|
"grad_norm": 5.542593479156494, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 4.05, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.09948542024013722, |
|
"grad_norm": 5.556862831115723, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 3.7908, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.10291595197255575, |
|
"grad_norm": 5.6052093505859375, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 3.9222, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.10634648370497427, |
|
"grad_norm": 5.743956565856934, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 4.0824, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1097770154373928, |
|
"grad_norm": 5.386566162109375, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 3.8401, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.11320754716981132, |
|
"grad_norm": 5.688109874725342, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 3.7746, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.11663807890222985, |
|
"grad_norm": 5.799955368041992, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 3.8266, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.12006861063464837, |
|
"grad_norm": 5.360304832458496, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 3.5762, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.1234991423670669, |
|
"grad_norm": 6.067436695098877, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 3.8938, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1269296740994854, |
|
"grad_norm": 6.037448406219482, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 3.677, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.13036020583190394, |
|
"grad_norm": 6.000154972076416, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 4.0107, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.13379073756432247, |
|
"grad_norm": 6.464019775390625, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 4.0673, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.137221269296741, |
|
"grad_norm": 5.797187805175781, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 3.724, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.14065180102915953, |
|
"grad_norm": 6.833194732666016, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 4.3818, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.14408233276157806, |
|
"grad_norm": 5.868514060974121, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 3.9079, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14751286449399656, |
|
"grad_norm": 6.458984851837158, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 3.9524, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1509433962264151, |
|
"grad_norm": 6.594856262207031, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 3.6577, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.15437392795883362, |
|
"grad_norm": 7.024727821350098, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 3.9407, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.15780445969125215, |
|
"grad_norm": 7.5185136795043945, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 4.1704, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.16123499142367068, |
|
"grad_norm": 7.434786319732666, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 3.7691, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1646655231560892, |
|
"grad_norm": 7.97662353515625, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 4.1047, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1680960548885077, |
|
"grad_norm": 7.469397068023682, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 4.0323, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.17152658662092624, |
|
"grad_norm": 9.097603797912598, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 4.0814, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17152658662092624, |
|
"eval_loss": 1.0552418231964111, |
|
"eval_runtime": 41.0958, |
|
"eval_samples_per_second": 11.948, |
|
"eval_steps_per_second": 5.986, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.17495711835334476, |
|
"grad_norm": 7.74439811706543, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 4.8988, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1783876500857633, |
|
"grad_norm": 5.500019073486328, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 4.8596, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 3.838832378387451, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 4.4589, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.18524871355060035, |
|
"grad_norm": 3.795142650604248, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 4.2005, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.18867924528301888, |
|
"grad_norm": 3.751390218734741, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 4.1903, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.19210977701543738, |
|
"grad_norm": 3.823895215988159, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 3.9744, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.1955403087478559, |
|
"grad_norm": 3.9442529678344727, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 4.3034, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.19897084048027444, |
|
"grad_norm": 4.070255279541016, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 4.0883, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.20240137221269297, |
|
"grad_norm": 3.7813539505004883, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 3.9695, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2058319039451115, |
|
"grad_norm": 3.8070478439331055, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 3.7485, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.20926243567753003, |
|
"grad_norm": 3.9176182746887207, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 4.011, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.21269296740994853, |
|
"grad_norm": 4.07338809967041, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 4.0232, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.21612349914236706, |
|
"grad_norm": 4.0383782386779785, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 3.978, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2195540308747856, |
|
"grad_norm": 3.6648452281951904, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 3.4885, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.22298456260720412, |
|
"grad_norm": 3.8878605365753174, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.8533, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.22641509433962265, |
|
"grad_norm": 4.185667991638184, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 3.6215, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.22984562607204118, |
|
"grad_norm": 4.071247100830078, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 3.531, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.2332761578044597, |
|
"grad_norm": 4.344533443450928, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 3.7186, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.2367066895368782, |
|
"grad_norm": 4.426985740661621, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 3.6128, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.24013722126929674, |
|
"grad_norm": 4.162020206451416, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 3.3318, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.24356775300171526, |
|
"grad_norm": 4.368765354156494, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 3.4157, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.2469982847341338, |
|
"grad_norm": 4.309731960296631, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 3.5808, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2504288164665523, |
|
"grad_norm": 4.442713260650635, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 3.4605, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.2538593481989708, |
|
"grad_norm": 4.398868083953857, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 3.6249, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.25728987993138935, |
|
"grad_norm": 4.782496929168701, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 3.7258, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2607204116638079, |
|
"grad_norm": 4.372551918029785, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.7134, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.2641509433962264, |
|
"grad_norm": 4.290441989898682, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 3.7541, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.26758147512864494, |
|
"grad_norm": 4.507718086242676, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 3.9408, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.27101200686106347, |
|
"grad_norm": 4.117275714874268, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 3.4982, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.274442538593482, |
|
"grad_norm": 4.557469844818115, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 3.7516, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.27787307032590053, |
|
"grad_norm": 5.15295934677124, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 3.5183, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.28130360205831906, |
|
"grad_norm": 4.726450443267822, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 3.526, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2847341337907376, |
|
"grad_norm": 4.847685813903809, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 3.858, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2881646655231561, |
|
"grad_norm": 4.568885803222656, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.6064, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.2915951972555746, |
|
"grad_norm": 4.916790962219238, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 3.383, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2950257289879931, |
|
"grad_norm": 5.058016777038574, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 3.8242, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.29845626072041165, |
|
"grad_norm": 4.883471965789795, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 3.9667, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.3018867924528302, |
|
"grad_norm": 4.773396968841553, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 3.5179, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.3053173241852487, |
|
"grad_norm": 5.102062225341797, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 3.7781, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.30874785591766724, |
|
"grad_norm": 4.849315643310547, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 3.59, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.31217838765008576, |
|
"grad_norm": 5.05657958984375, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 3.8574, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.3156089193825043, |
|
"grad_norm": 5.094900608062744, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.6297, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.3190394511149228, |
|
"grad_norm": 5.497709274291992, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 3.4883, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.32246998284734135, |
|
"grad_norm": 5.846022605895996, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 3.5978, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.3259005145797599, |
|
"grad_norm": 5.982120990753174, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 3.7544, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.3293310463121784, |
|
"grad_norm": 5.516687870025635, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 3.3577, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.33276157804459694, |
|
"grad_norm": 5.789833068847656, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 3.3941, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.3361921097770154, |
|
"grad_norm": 5.645137310028076, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 3.3852, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.33962264150943394, |
|
"grad_norm": 7.209895610809326, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 3.7848, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.34305317324185247, |
|
"grad_norm": 9.69320011138916, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 3.4986, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.34305317324185247, |
|
"eval_loss": 0.9467170834541321, |
|
"eval_runtime": 41.1492, |
|
"eval_samples_per_second": 11.932, |
|
"eval_steps_per_second": 5.978, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.346483704974271, |
|
"grad_norm": 6.344611644744873, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 4.4591, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.34991423670668953, |
|
"grad_norm": 5.29261589050293, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 4.3903, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.35334476843910806, |
|
"grad_norm": 3.7538249492645264, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 4.1005, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.3567753001715266, |
|
"grad_norm": 3.1446287631988525, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 3.7796, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3602058319039451, |
|
"grad_norm": 2.960186719894409, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 3.916, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 3.2721056938171387, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 3.9345, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.3670668953687822, |
|
"grad_norm": 3.109593629837036, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 3.6805, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.3704974271012007, |
|
"grad_norm": 3.3663551807403564, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 4.1047, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.37392795883361923, |
|
"grad_norm": 3.3839170932769775, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 3.7359, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.37735849056603776, |
|
"grad_norm": 3.2243165969848633, |
|
"learning_rate": 5e-05, |
|
"loss": 3.369, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.38078902229845624, |
|
"grad_norm": 3.484187364578247, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 3.868, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.38421955403087477, |
|
"grad_norm": 3.4150843620300293, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.5268, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3876500857632933, |
|
"grad_norm": 3.528571367263794, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 3.4223, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.3910806174957118, |
|
"grad_norm": 3.6814565658569336, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 3.5602, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.39451114922813035, |
|
"grad_norm": 3.4944686889648438, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 3.3631, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3979416809605489, |
|
"grad_norm": 3.5598247051239014, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 3.2197, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.4013722126929674, |
|
"grad_norm": 3.637119770050049, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 3.6207, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.40480274442538594, |
|
"grad_norm": 3.621417760848999, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 3.66, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.40823327615780447, |
|
"grad_norm": 3.71958065032959, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 3.4363, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.411663807890223, |
|
"grad_norm": 3.881575107574463, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 3.6837, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.41509433962264153, |
|
"grad_norm": 3.5961995124816895, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 3.4126, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.41852487135506006, |
|
"grad_norm": 3.635561943054199, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 3.4925, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.4219554030874786, |
|
"grad_norm": 3.558687448501587, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 3.1293, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.42538593481989706, |
|
"grad_norm": 3.7333993911743164, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.1551, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.4288164665523156, |
|
"grad_norm": 4.045219898223877, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 3.5025, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.4322469982847341, |
|
"grad_norm": 4.082742691040039, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.7588, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.43567753001715265, |
|
"grad_norm": 4.03236722946167, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 3.6083, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.4391080617495712, |
|
"grad_norm": 4.0244879722595215, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.5657, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.4425385934819897, |
|
"grad_norm": 3.9571797847747803, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 3.5047, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.44596912521440824, |
|
"grad_norm": 4.1520161628723145, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 3.4308, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.44939965694682676, |
|
"grad_norm": 3.731208086013794, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 3.3306, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.4528301886792453, |
|
"grad_norm": 4.370545864105225, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 3.4649, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4562607204116638, |
|
"grad_norm": 4.4801025390625, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 3.3989, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.45969125214408235, |
|
"grad_norm": 4.434890270233154, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 3.3249, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4631217838765009, |
|
"grad_norm": 4.163226127624512, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 3.2922, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4665523156089194, |
|
"grad_norm": 4.4532151222229, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 3.7554, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.4699828473413379, |
|
"grad_norm": 4.65369987487793, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 3.5652, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4734133790737564, |
|
"grad_norm": 4.77457857131958, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 3.6568, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.47684391080617494, |
|
"grad_norm": 4.440027713775635, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 3.459, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.48027444253859347, |
|
"grad_norm": 5.082893371582031, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 3.4999, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.483704974271012, |
|
"grad_norm": 4.5789103507995605, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 3.4001, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.48713550600343053, |
|
"grad_norm": 4.885765552520752, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 3.5424, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.49056603773584906, |
|
"grad_norm": 5.264125347137451, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 3.5362, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4939965694682676, |
|
"grad_norm": 4.9319539070129395, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 3.3161, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4974271012006861, |
|
"grad_norm": 5.442240238189697, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 3.5072, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.5008576329331046, |
|
"grad_norm": 5.463189125061035, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.3095, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.5042881646655232, |
|
"grad_norm": 5.8514862060546875, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 3.3437, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.5077186963979416, |
|
"grad_norm": 6.340907096862793, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 3.4434, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.5111492281303602, |
|
"grad_norm": 7.861410140991211, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 4.1433, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.5145797598627787, |
|
"grad_norm": 8.327228546142578, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 4.0453, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5145797598627787, |
|
"eval_loss": 0.8594623804092407, |
|
"eval_runtime": 41.113, |
|
"eval_samples_per_second": 11.943, |
|
"eval_steps_per_second": 5.984, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.2144694189083853e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|