|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.10481081647626035, |
|
"eval_steps": 42, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002096216329525207, |
|
"eval_loss": 1.4208108186721802, |
|
"eval_runtime": 163.5996, |
|
"eval_samples_per_second": 49.114, |
|
"eval_steps_per_second": 6.143, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006288648988575621, |
|
"grad_norm": 0.39683395624160767, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4354, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0012577297977151242, |
|
"grad_norm": 0.4367702305316925, |
|
"learning_rate": 6e-05, |
|
"loss": 1.4342, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0018865946965726864, |
|
"grad_norm": 0.37110769748687744, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3479, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0025154595954302483, |
|
"grad_norm": 0.36730247735977173, |
|
"learning_rate": 9.999588943391597e-05, |
|
"loss": 1.361, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0031443244942878103, |
|
"grad_norm": 0.35089993476867676, |
|
"learning_rate": 9.99743108100344e-05, |
|
"loss": 1.2814, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0037731893931453727, |
|
"grad_norm": 0.38672298192977905, |
|
"learning_rate": 9.993424445916923e-05, |
|
"loss": 1.3016, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.004402054292002935, |
|
"grad_norm": 0.321205198764801, |
|
"learning_rate": 9.987570520365104e-05, |
|
"loss": 1.2163, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.005030919190860497, |
|
"grad_norm": 0.3282659649848938, |
|
"learning_rate": 9.979871469976196e-05, |
|
"loss": 1.2312, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.005659784089718059, |
|
"grad_norm": 0.3265891373157501, |
|
"learning_rate": 9.970330142972401e-05, |
|
"loss": 1.2505, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.006288648988575621, |
|
"grad_norm": 0.3450949192047119, |
|
"learning_rate": 9.95895006911623e-05, |
|
"loss": 1.1982, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0069175138874331835, |
|
"grad_norm": 0.3153296411037445, |
|
"learning_rate": 9.945735458404681e-05, |
|
"loss": 1.1669, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0075463787862907455, |
|
"grad_norm": 0.3419332802295685, |
|
"learning_rate": 9.930691199511775e-05, |
|
"loss": 1.2492, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.008175243685148307, |
|
"grad_norm": 0.2867058515548706, |
|
"learning_rate": 9.91382285798002e-05, |
|
"loss": 1.1652, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00880410858400587, |
|
"grad_norm": 0.3220820426940918, |
|
"learning_rate": 9.895136674161465e-05, |
|
"loss": 1.1996, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00880410858400587, |
|
"eval_loss": 1.1905547380447388, |
|
"eval_runtime": 165.0006, |
|
"eval_samples_per_second": 48.697, |
|
"eval_steps_per_second": 6.091, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.009432973482863432, |
|
"grad_norm": 0.30218949913978577, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 1.1916, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.010061838381720993, |
|
"grad_norm": 0.3343711197376251, |
|
"learning_rate": 9.852339101019574e-05, |
|
"loss": 1.1487, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.010690703280578556, |
|
"grad_norm": 0.3268805146217346, |
|
"learning_rate": 9.828243544427796e-05, |
|
"loss": 1.1781, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.011319568179436117, |
|
"grad_norm": 0.34138286113739014, |
|
"learning_rate": 9.802361805155097e-05, |
|
"loss": 1.1215, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01194843307829368, |
|
"grad_norm": 0.3412030339241028, |
|
"learning_rate": 9.774703458011453e-05, |
|
"loss": 1.2044, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.012577297977151241, |
|
"grad_norm": 0.3168179392814636, |
|
"learning_rate": 9.745278735053343e-05, |
|
"loss": 1.1087, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.013206162876008804, |
|
"grad_norm": 0.3409186005592346, |
|
"learning_rate": 9.714098521798465e-05, |
|
"loss": 1.2062, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.013835027774866367, |
|
"grad_norm": 0.29721856117248535, |
|
"learning_rate": 9.681174353198687e-05, |
|
"loss": 1.1158, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.014463892673723928, |
|
"grad_norm": 0.3369769752025604, |
|
"learning_rate": 9.64651840937276e-05, |
|
"loss": 1.1834, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.015092757572581491, |
|
"grad_norm": 0.31268638372421265, |
|
"learning_rate": 9.610143511100354e-05, |
|
"loss": 1.1367, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.015721622471439052, |
|
"grad_norm": 0.333360493183136, |
|
"learning_rate": 9.572063115079063e-05, |
|
"loss": 1.1805, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.016350487370296613, |
|
"grad_norm": 0.2951726019382477, |
|
"learning_rate": 9.53229130894619e-05, |
|
"loss": 1.1459, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.016979352269154178, |
|
"grad_norm": 0.36082521080970764, |
|
"learning_rate": 9.490842806067095e-05, |
|
"loss": 1.17, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.01760821716801174, |
|
"grad_norm": 0.33050239086151123, |
|
"learning_rate": 9.44773294009206e-05, |
|
"loss": 1.1611, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.01760821716801174, |
|
"eval_loss": 1.147385835647583, |
|
"eval_runtime": 165.0588, |
|
"eval_samples_per_second": 48.68, |
|
"eval_steps_per_second": 6.089, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 0.3466218411922455, |
|
"learning_rate": 9.40297765928369e-05, |
|
"loss": 1.181, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.018865946965726865, |
|
"grad_norm": 0.3219515085220337, |
|
"learning_rate": 9.356593520616948e-05, |
|
"loss": 1.1236, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.019494811864584426, |
|
"grad_norm": 0.35251516103744507, |
|
"learning_rate": 9.308597683653975e-05, |
|
"loss": 1.1749, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.020123676763441987, |
|
"grad_norm": 0.3366820514202118, |
|
"learning_rate": 9.259007904196023e-05, |
|
"loss": 1.1411, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.020752541662299548, |
|
"grad_norm": 0.3501774072647095, |
|
"learning_rate": 9.207842527714767e-05, |
|
"loss": 1.122, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.021381406561157112, |
|
"grad_norm": 0.3260366916656494, |
|
"learning_rate": 9.155120482565521e-05, |
|
"loss": 1.1032, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.022010271460014674, |
|
"grad_norm": 0.3130464255809784, |
|
"learning_rate": 9.10086127298478e-05, |
|
"loss": 1.1025, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.022639136358872235, |
|
"grad_norm": 0.3319336473941803, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.1279, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.0232680012577298, |
|
"grad_norm": 0.3683924973011017, |
|
"learning_rate": 8.987812213377424e-05, |
|
"loss": 1.1384, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.02389686615658736, |
|
"grad_norm": 0.3043256998062134, |
|
"learning_rate": 8.929064185241213e-05, |
|
"loss": 1.106, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.02452573105544492, |
|
"grad_norm": 0.3852984607219696, |
|
"learning_rate": 8.868862620982534e-05, |
|
"loss": 1.123, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.025154595954302483, |
|
"grad_norm": 0.36246711015701294, |
|
"learning_rate": 8.807229791845673e-05, |
|
"loss": 1.1248, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.025783460853160047, |
|
"grad_norm": 0.3592873215675354, |
|
"learning_rate": 8.744188498563641e-05, |
|
"loss": 1.0983, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02641232575201761, |
|
"grad_norm": 0.3461982309818268, |
|
"learning_rate": 8.679762062923175e-05, |
|
"loss": 1.1211, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02641232575201761, |
|
"eval_loss": 1.1246975660324097, |
|
"eval_runtime": 165.051, |
|
"eval_samples_per_second": 48.682, |
|
"eval_steps_per_second": 6.089, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02704119065087517, |
|
"grad_norm": 0.3487012982368469, |
|
"learning_rate": 8.613974319136958e-05, |
|
"loss": 1.1212, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.027670055549732734, |
|
"grad_norm": 0.37176796793937683, |
|
"learning_rate": 8.54684960502629e-05, |
|
"loss": 1.0707, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.028298920448590295, |
|
"grad_norm": 0.3854920268058777, |
|
"learning_rate": 8.478412753017433e-05, |
|
"loss": 1.1204, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.028927785347447856, |
|
"grad_norm": 0.3561486303806305, |
|
"learning_rate": 8.408689080954998e-05, |
|
"loss": 1.1238, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.029556650246305417, |
|
"grad_norm": 0.35637167096138, |
|
"learning_rate": 8.33770438273574e-05, |
|
"loss": 1.1415, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.030185515145162982, |
|
"grad_norm": 0.37608301639556885, |
|
"learning_rate": 8.265484918766243e-05, |
|
"loss": 1.1374, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.030814380044020543, |
|
"grad_norm": 0.3702179789543152, |
|
"learning_rate": 8.192057406248028e-05, |
|
"loss": 1.1001, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.031443244942878104, |
|
"grad_norm": 0.35054582357406616, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 1.122, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03207210984173567, |
|
"grad_norm": 0.35157763957977295, |
|
"learning_rate": 8.041687328877567e-05, |
|
"loss": 1.1126, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.032700974740593226, |
|
"grad_norm": 0.3567587435245514, |
|
"learning_rate": 7.964800392625129e-05, |
|
"loss": 1.1196, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.03332983963945079, |
|
"grad_norm": 0.33376169204711914, |
|
"learning_rate": 7.886816644444098e-05, |
|
"loss": 1.1269, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.033958704538308356, |
|
"grad_norm": 0.33005964756011963, |
|
"learning_rate": 7.807764934001874e-05, |
|
"loss": 1.0881, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.03458756943716591, |
|
"grad_norm": 0.3465365171432495, |
|
"learning_rate": 7.727674506052743e-05, |
|
"loss": 1.0912, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03521643433602348, |
|
"grad_norm": 0.3523496985435486, |
|
"learning_rate": 7.646574989618938e-05, |
|
"loss": 1.0962, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03521643433602348, |
|
"eval_loss": 1.1077728271484375, |
|
"eval_runtime": 165.0063, |
|
"eval_samples_per_second": 48.695, |
|
"eval_steps_per_second": 6.091, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03584529923488104, |
|
"grad_norm": 0.3241373300552368, |
|
"learning_rate": 7.564496387029532e-05, |
|
"loss": 1.1243, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 0.33574438095092773, |
|
"learning_rate": 7.481469062821252e-05, |
|
"loss": 1.1031, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.037103029032596165, |
|
"grad_norm": 0.38013267517089844, |
|
"learning_rate": 7.39752373250527e-05, |
|
"loss": 1.1191, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.03773189393145373, |
|
"grad_norm": 0.35769525170326233, |
|
"learning_rate": 7.312691451204178e-05, |
|
"loss": 1.0992, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03836075883031129, |
|
"grad_norm": 0.32820338010787964, |
|
"learning_rate": 7.227003602163295e-05, |
|
"loss": 1.0621, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.03898962372916885, |
|
"grad_norm": 0.32962238788604736, |
|
"learning_rate": 7.14049188514063e-05, |
|
"loss": 1.0828, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.03961848862802641, |
|
"grad_norm": 0.34197545051574707, |
|
"learning_rate": 7.05318830467969e-05, |
|
"loss": 1.1168, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.040247353526883974, |
|
"grad_norm": 0.37021368741989136, |
|
"learning_rate": 6.965125158269619e-05, |
|
"loss": 1.1374, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.04087621842574154, |
|
"grad_norm": 0.3301369547843933, |
|
"learning_rate": 6.876335024396872e-05, |
|
"loss": 1.054, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.041505083324599096, |
|
"grad_norm": 0.35479190945625305, |
|
"learning_rate": 6.786850750493006e-05, |
|
"loss": 1.0923, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.04213394822345666, |
|
"grad_norm": 0.35211730003356934, |
|
"learning_rate": 6.696705440782938e-05, |
|
"loss": 1.1124, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.042762813122314225, |
|
"grad_norm": 0.4026525616645813, |
|
"learning_rate": 6.605932444038229e-05, |
|
"loss": 1.1229, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.04339167802117178, |
|
"grad_norm": 0.40407177805900574, |
|
"learning_rate": 6.514565341239861e-05, |
|
"loss": 1.066, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.04402054292002935, |
|
"grad_norm": 0.3534255027770996, |
|
"learning_rate": 6.422637933155162e-05, |
|
"loss": 1.0462, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04402054292002935, |
|
"eval_loss": 1.0946003198623657, |
|
"eval_runtime": 165.0602, |
|
"eval_samples_per_second": 48.679, |
|
"eval_steps_per_second": 6.089, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04464940781888691, |
|
"grad_norm": 0.3846144676208496, |
|
"learning_rate": 6.330184227833376e-05, |
|
"loss": 1.0922, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.04527827271774447, |
|
"grad_norm": 0.37346020340919495, |
|
"learning_rate": 6.237238428024572e-05, |
|
"loss": 1.0727, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.045907137616602034, |
|
"grad_norm": 0.3563274145126343, |
|
"learning_rate": 6.143834918526527e-05, |
|
"loss": 1.144, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.0465360025154596, |
|
"grad_norm": 0.3684130907058716, |
|
"learning_rate": 6.0500082534642464e-05, |
|
"loss": 1.0526, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.047164867414317156, |
|
"grad_norm": 0.37623313069343567, |
|
"learning_rate": 5.955793143506863e-05, |
|
"loss": 1.0694, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.04779373231317472, |
|
"grad_norm": 0.335705429315567, |
|
"learning_rate": 5.861224443026595e-05, |
|
"loss": 1.0766, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.04842259721203228, |
|
"grad_norm": 0.35654446482658386, |
|
"learning_rate": 5.766337137204579e-05, |
|
"loss": 1.091, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.04905146211088984, |
|
"grad_norm": 0.33855140209198, |
|
"learning_rate": 5.6711663290882776e-05, |
|
"loss": 1.0715, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.04968032700974741, |
|
"grad_norm": 0.3658079206943512, |
|
"learning_rate": 5.575747226605298e-05, |
|
"loss": 1.0915, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.050309191908604965, |
|
"grad_norm": 0.3918761610984802, |
|
"learning_rate": 5.480115129538409e-05, |
|
"loss": 1.073, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.05093805680746253, |
|
"grad_norm": 0.37080520391464233, |
|
"learning_rate": 5.384305416466584e-05, |
|
"loss": 1.0234, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.051566921706320094, |
|
"grad_norm": 0.315390020608902, |
|
"learning_rate": 5.288353531676873e-05, |
|
"loss": 1.0456, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.05219578660517765, |
|
"grad_norm": 0.34985366463661194, |
|
"learning_rate": 5.192294972051992e-05, |
|
"loss": 1.0505, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.05282465150403522, |
|
"grad_norm": 0.35391372442245483, |
|
"learning_rate": 5.0961652739384356e-05, |
|
"loss": 1.0728, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.05282465150403522, |
|
"eval_loss": 1.0837104320526123, |
|
"eval_runtime": 164.9887, |
|
"eval_samples_per_second": 48.7, |
|
"eval_steps_per_second": 6.091, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.05345351640289278, |
|
"grad_norm": 0.36929386854171753, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0279, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.05408238130175034, |
|
"grad_norm": 0.34936413168907166, |
|
"learning_rate": 4.903834726061565e-05, |
|
"loss": 1.0564, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.0547112462006079, |
|
"grad_norm": 0.33937400579452515, |
|
"learning_rate": 4.807705027948008e-05, |
|
"loss": 1.0349, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.05534011109946547, |
|
"grad_norm": 0.3877185583114624, |
|
"learning_rate": 4.711646468323129e-05, |
|
"loss": 1.0518, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.055968975998323026, |
|
"grad_norm": 0.36920610070228577, |
|
"learning_rate": 4.6156945835334184e-05, |
|
"loss": 1.0907, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.05659784089718059, |
|
"grad_norm": 0.3572535216808319, |
|
"learning_rate": 4.5198848704615914e-05, |
|
"loss": 1.0811, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.05722670579603815, |
|
"grad_norm": 0.3565980792045593, |
|
"learning_rate": 4.424252773394704e-05, |
|
"loss": 1.0598, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.05785557069489571, |
|
"grad_norm": 0.3740125298500061, |
|
"learning_rate": 4.328833670911724e-05, |
|
"loss": 1.0403, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.05848443559375328, |
|
"grad_norm": 0.382012277841568, |
|
"learning_rate": 4.23366286279542e-05, |
|
"loss": 1.0877, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.059113300492610835, |
|
"grad_norm": 0.359355092048645, |
|
"learning_rate": 4.138775556973406e-05, |
|
"loss": 1.0644, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.0597421653914684, |
|
"grad_norm": 0.35905787348747253, |
|
"learning_rate": 4.04420685649314e-05, |
|
"loss": 1.0783, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.060371030290325964, |
|
"grad_norm": 0.4150475263595581, |
|
"learning_rate": 3.9499917465357534e-05, |
|
"loss": 1.0996, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.06099989518918352, |
|
"grad_norm": 0.3718733489513397, |
|
"learning_rate": 3.856165081473474e-05, |
|
"loss": 1.0191, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.061628760088041086, |
|
"grad_norm": 0.36830800771713257, |
|
"learning_rate": 3.762761571975429e-05, |
|
"loss": 1.1456, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.061628760088041086, |
|
"eval_loss": 1.0759488344192505, |
|
"eval_runtime": 164.9861, |
|
"eval_samples_per_second": 48.701, |
|
"eval_steps_per_second": 6.091, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.06225762498689865, |
|
"grad_norm": 0.36433523893356323, |
|
"learning_rate": 3.6698157721666246e-05, |
|
"loss": 1.0619, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.06288648988575621, |
|
"grad_norm": 0.382252961397171, |
|
"learning_rate": 3.5773620668448384e-05, |
|
"loss": 1.0544, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.06351535478461377, |
|
"grad_norm": 0.4051714539527893, |
|
"learning_rate": 3.48543465876014e-05, |
|
"loss": 0.9961, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.06414421968347134, |
|
"grad_norm": 0.34751641750335693, |
|
"learning_rate": 3.3940675559617724e-05, |
|
"loss": 1.0396, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.0647730845823289, |
|
"grad_norm": 0.38340121507644653, |
|
"learning_rate": 3.303294559217063e-05, |
|
"loss": 1.0916, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.06540194948118645, |
|
"grad_norm": 0.41020599007606506, |
|
"learning_rate": 3.213149249506997e-05, |
|
"loss": 1.0738, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.06603081438004402, |
|
"grad_norm": 0.3864920735359192, |
|
"learning_rate": 3.12366497560313e-05, |
|
"loss": 1.0936, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.06665967927890158, |
|
"grad_norm": 0.35983094573020935, |
|
"learning_rate": 3.0348748417303823e-05, |
|
"loss": 1.1001, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.06728854417775915, |
|
"grad_norm": 0.39053815603256226, |
|
"learning_rate": 2.9468116953203107e-05, |
|
"loss": 1.0366, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.06791740907661671, |
|
"grad_norm": 0.3519478142261505, |
|
"learning_rate": 2.8595081148593738e-05, |
|
"loss": 1.044, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.06854627397547428, |
|
"grad_norm": 0.37353023886680603, |
|
"learning_rate": 2.772996397836704e-05, |
|
"loss": 1.0449, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.06917513887433183, |
|
"grad_norm": 0.357658714056015, |
|
"learning_rate": 2.687308548795825e-05, |
|
"loss": 1.0519, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.06980400377318939, |
|
"grad_norm": 0.40195825695991516, |
|
"learning_rate": 2.6024762674947313e-05, |
|
"loss": 1.0915, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.07043286867204696, |
|
"grad_norm": 0.3929975628852844, |
|
"learning_rate": 2.5185309371787513e-05, |
|
"loss": 1.0375, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.07043286867204696, |
|
"eval_loss": 1.0703336000442505, |
|
"eval_runtime": 165.0152, |
|
"eval_samples_per_second": 48.692, |
|
"eval_steps_per_second": 6.09, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.07106173357090452, |
|
"grad_norm": 0.4022517502307892, |
|
"learning_rate": 2.43550361297047e-05, |
|
"loss": 1.0749, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.07169059846976208, |
|
"grad_norm": 0.35164332389831543, |
|
"learning_rate": 2.353425010381063e-05, |
|
"loss": 1.0352, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.07231946336861964, |
|
"grad_norm": 0.40566059947013855, |
|
"learning_rate": 2.272325493947257e-05, |
|
"loss": 1.0625, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.0729483282674772, |
|
"grad_norm": 0.37188711762428284, |
|
"learning_rate": 2.192235065998126e-05, |
|
"loss": 1.072, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.07357719316633476, |
|
"grad_norm": 0.3737729489803314, |
|
"learning_rate": 2.1131833555559037e-05, |
|
"loss": 1.0546, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.07420605806519233, |
|
"grad_norm": 0.35681530833244324, |
|
"learning_rate": 2.0351996073748713e-05, |
|
"loss": 1.0669, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.0748349229640499, |
|
"grad_norm": 0.3745366334915161, |
|
"learning_rate": 1.9583126711224343e-05, |
|
"loss": 1.0731, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.07546378786290746, |
|
"grad_norm": 0.3998556137084961, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 1.1069, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.07609265276176501, |
|
"grad_norm": 0.4358106255531311, |
|
"learning_rate": 1.807942593751973e-05, |
|
"loss": 1.0876, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.07672151766062257, |
|
"grad_norm": 0.3841058313846588, |
|
"learning_rate": 1.7345150812337564e-05, |
|
"loss": 1.0822, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.07735038255948014, |
|
"grad_norm": 0.4276648759841919, |
|
"learning_rate": 1.66229561726426e-05, |
|
"loss": 1.0894, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.0779792474583377, |
|
"grad_norm": 0.40756258368492126, |
|
"learning_rate": 1.5913109190450032e-05, |
|
"loss": 1.0673, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.07860811235719527, |
|
"grad_norm": 0.34232285618782043, |
|
"learning_rate": 1.5215872469825682e-05, |
|
"loss": 1.0659, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.07923697725605282, |
|
"grad_norm": 0.35964226722717285, |
|
"learning_rate": 1.4531503949737108e-05, |
|
"loss": 1.0067, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.07923697725605282, |
|
"eval_loss": 1.0661753416061401, |
|
"eval_runtime": 165.0357, |
|
"eval_samples_per_second": 48.686, |
|
"eval_steps_per_second": 6.09, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.07986584215491038, |
|
"grad_norm": 0.3825712203979492, |
|
"learning_rate": 1.3860256808630428e-05, |
|
"loss": 1.1019, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.08049470705376795, |
|
"grad_norm": 0.39544931054115295, |
|
"learning_rate": 1.3202379370768252e-05, |
|
"loss": 1.0646, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.08112357195262551, |
|
"grad_norm": 0.3967124819755554, |
|
"learning_rate": 1.2558115014363592e-05, |
|
"loss": 1.0394, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.08175243685148308, |
|
"grad_norm": 0.39682498574256897, |
|
"learning_rate": 1.1927702081543279e-05, |
|
"loss": 1.0968, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.08238130175034064, |
|
"grad_norm": 0.3636738359928131, |
|
"learning_rate": 1.1311373790174657e-05, |
|
"loss": 1.031, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.08301016664919819, |
|
"grad_norm": 0.32566869258880615, |
|
"learning_rate": 1.0709358147587884e-05, |
|
"loss": 1.094, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.08363903154805576, |
|
"grad_norm": 0.38560715317726135, |
|
"learning_rate": 1.0121877866225781e-05, |
|
"loss": 1.1144, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.08426789644691332, |
|
"grad_norm": 0.32941296696662903, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.0141, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.08489676134577089, |
|
"grad_norm": 0.36348846554756165, |
|
"learning_rate": 8.991387270152201e-06, |
|
"loss": 1.043, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.08552562624462845, |
|
"grad_norm": 0.37229597568511963, |
|
"learning_rate": 8.448795174344804e-06, |
|
"loss": 1.077, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.08615449114348601, |
|
"grad_norm": 0.3720276653766632, |
|
"learning_rate": 7.921574722852343e-06, |
|
"loss": 0.9869, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.08678335604234357, |
|
"grad_norm": 0.3702375590801239, |
|
"learning_rate": 7.409920958039795e-06, |
|
"loss": 0.9772, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.08741222094120113, |
|
"grad_norm": 0.37576282024383545, |
|
"learning_rate": 6.9140231634602485e-06, |
|
"loss": 1.0302, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.0880410858400587, |
|
"grad_norm": 0.39544618129730225, |
|
"learning_rate": 6.43406479383053e-06, |
|
"loss": 1.0528, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0880410858400587, |
|
"eval_loss": 1.0637660026550293, |
|
"eval_runtime": 165.1277, |
|
"eval_samples_per_second": 48.659, |
|
"eval_steps_per_second": 6.086, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.08866995073891626, |
|
"grad_norm": 0.4187450110912323, |
|
"learning_rate": 5.9702234071631e-06, |
|
"loss": 1.0578, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.08929881563777382, |
|
"grad_norm": 0.3692411780357361, |
|
"learning_rate": 5.5226705990794155e-06, |
|
"loss": 1.0522, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.08992768053663137, |
|
"grad_norm": 0.4078660309314728, |
|
"learning_rate": 5.091571939329048e-06, |
|
"loss": 1.0434, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.09055654543548894, |
|
"grad_norm": 0.4134742319583893, |
|
"learning_rate": 4.677086910538092e-06, |
|
"loss": 1.074, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.0911854103343465, |
|
"grad_norm": 0.36238303780555725, |
|
"learning_rate": 4.279368849209381e-06, |
|
"loss": 1.0455, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.09181427523320407, |
|
"grad_norm": 0.4054090976715088, |
|
"learning_rate": 3.898564888996476e-06, |
|
"loss": 1.0706, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.09244314013206163, |
|
"grad_norm": 0.3747381567955017, |
|
"learning_rate": 3.534815906272404e-06, |
|
"loss": 1.0354, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.0930720050309192, |
|
"grad_norm": 0.3801961839199066, |
|
"learning_rate": 3.18825646801314e-06, |
|
"loss": 1.0818, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.09370086992977675, |
|
"grad_norm": 0.3710772395133972, |
|
"learning_rate": 2.8590147820153513e-06, |
|
"loss": 1.0605, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.09432973482863431, |
|
"grad_norm": 0.3717288672924042, |
|
"learning_rate": 2.547212649466568e-06, |
|
"loss": 1.0153, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.09495859972749188, |
|
"grad_norm": 0.370351105928421, |
|
"learning_rate": 2.2529654198854835e-06, |
|
"loss": 1.037, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.09558746462634944, |
|
"grad_norm": 0.38800954818725586, |
|
"learning_rate": 1.9763819484490355e-06, |
|
"loss": 1.0668, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.096216329525207, |
|
"grad_norm": 0.39085131883621216, |
|
"learning_rate": 1.7175645557220566e-06, |
|
"loss": 1.0601, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.09684519442406456, |
|
"grad_norm": 0.38424739241600037, |
|
"learning_rate": 1.4766089898042678e-06, |
|
"loss": 1.0742, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.09684519442406456, |
|
"eval_loss": 1.0629161596298218, |
|
"eval_runtime": 165.0993, |
|
"eval_samples_per_second": 48.668, |
|
"eval_steps_per_second": 6.087, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.09747405932292212, |
|
"grad_norm": 0.34824374318122864, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 1.0597, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.09810292422177969, |
|
"grad_norm": 0.3800143599510193, |
|
"learning_rate": 1.0486332583853563e-06, |
|
"loss": 1.0733, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.09873178912063725, |
|
"grad_norm": 0.39402127265930176, |
|
"learning_rate": 8.617714201998084e-07, |
|
"loss": 1.0444, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.09936065401949482, |
|
"grad_norm": 0.3665209114551544, |
|
"learning_rate": 6.93088004882253e-07, |
|
"loss": 1.0652, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.09998951891835238, |
|
"grad_norm": 0.3840101659297943, |
|
"learning_rate": 5.426454159531913e-07, |
|
"loss": 1.0667, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.10061838381720993, |
|
"grad_norm": 0.40634945034980774, |
|
"learning_rate": 4.104993088376974e-07, |
|
"loss": 1.024, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.1012472487160675, |
|
"grad_norm": 0.4399307370185852, |
|
"learning_rate": 2.966985702759828e-07, |
|
"loss": 1.0763, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.10187611361492506, |
|
"grad_norm": 0.3584943115711212, |
|
"learning_rate": 2.012853002380466e-07, |
|
"loss": 1.0044, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.10250497851378262, |
|
"grad_norm": 0.3854651153087616, |
|
"learning_rate": 1.2429479634897267e-07, |
|
"loss": 1.0373, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.10313384341264019, |
|
"grad_norm": 0.3579770028591156, |
|
"learning_rate": 6.575554083078084e-08, |
|
"loss": 1.0518, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.10376270831149775, |
|
"grad_norm": 0.38003799319267273, |
|
"learning_rate": 2.568918996560532e-08, |
|
"loss": 1.1002, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.1043915732103553, |
|
"grad_norm": 0.35773521661758423, |
|
"learning_rate": 4.110566084036816e-09, |
|
"loss": 1.062, |
|
"step": 498 |
|
} |
|
], |
|
"logging_steps": 3, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 42, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.06657392623616e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|