|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.04402054292002935, |
|
"eval_steps": 42, |
|
"global_step": 210, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0002096216329525207, |
|
"eval_loss": 1.4208108186721802, |
|
"eval_runtime": 163.5996, |
|
"eval_samples_per_second": 49.114, |
|
"eval_steps_per_second": 6.143, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006288648988575621, |
|
"grad_norm": 0.39683395624160767, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4354, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0012577297977151242, |
|
"grad_norm": 0.4367702305316925, |
|
"learning_rate": 6e-05, |
|
"loss": 1.4342, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0018865946965726864, |
|
"grad_norm": 0.37110769748687744, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3479, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0025154595954302483, |
|
"grad_norm": 0.36730247735977173, |
|
"learning_rate": 9.999588943391597e-05, |
|
"loss": 1.361, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0031443244942878103, |
|
"grad_norm": 0.35089993476867676, |
|
"learning_rate": 9.99743108100344e-05, |
|
"loss": 1.2814, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0037731893931453727, |
|
"grad_norm": 0.38672298192977905, |
|
"learning_rate": 9.993424445916923e-05, |
|
"loss": 1.3016, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.004402054292002935, |
|
"grad_norm": 0.321205198764801, |
|
"learning_rate": 9.987570520365104e-05, |
|
"loss": 1.2163, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.005030919190860497, |
|
"grad_norm": 0.3282659649848938, |
|
"learning_rate": 9.979871469976196e-05, |
|
"loss": 1.2312, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.005659784089718059, |
|
"grad_norm": 0.3265891373157501, |
|
"learning_rate": 9.970330142972401e-05, |
|
"loss": 1.2505, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.006288648988575621, |
|
"grad_norm": 0.3450949192047119, |
|
"learning_rate": 9.95895006911623e-05, |
|
"loss": 1.1982, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0069175138874331835, |
|
"grad_norm": 0.3153296411037445, |
|
"learning_rate": 9.945735458404681e-05, |
|
"loss": 1.1669, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0075463787862907455, |
|
"grad_norm": 0.3419332802295685, |
|
"learning_rate": 9.930691199511775e-05, |
|
"loss": 1.2492, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.008175243685148307, |
|
"grad_norm": 0.2867058515548706, |
|
"learning_rate": 9.91382285798002e-05, |
|
"loss": 1.1652, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.00880410858400587, |
|
"grad_norm": 0.3220820426940918, |
|
"learning_rate": 9.895136674161465e-05, |
|
"loss": 1.1996, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.00880410858400587, |
|
"eval_loss": 1.1905547380447388, |
|
"eval_runtime": 165.0006, |
|
"eval_samples_per_second": 48.697, |
|
"eval_steps_per_second": 6.091, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.009432973482863432, |
|
"grad_norm": 0.30218949913978577, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 1.1916, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.010061838381720993, |
|
"grad_norm": 0.3343711197376251, |
|
"learning_rate": 9.852339101019574e-05, |
|
"loss": 1.1487, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.010690703280578556, |
|
"grad_norm": 0.3268805146217346, |
|
"learning_rate": 9.828243544427796e-05, |
|
"loss": 1.1781, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.011319568179436117, |
|
"grad_norm": 0.34138286113739014, |
|
"learning_rate": 9.802361805155097e-05, |
|
"loss": 1.1215, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01194843307829368, |
|
"grad_norm": 0.3412030339241028, |
|
"learning_rate": 9.774703458011453e-05, |
|
"loss": 1.2044, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.012577297977151241, |
|
"grad_norm": 0.3168179392814636, |
|
"learning_rate": 9.745278735053343e-05, |
|
"loss": 1.1087, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.013206162876008804, |
|
"grad_norm": 0.3409186005592346, |
|
"learning_rate": 9.714098521798465e-05, |
|
"loss": 1.2062, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.013835027774866367, |
|
"grad_norm": 0.29721856117248535, |
|
"learning_rate": 9.681174353198687e-05, |
|
"loss": 1.1158, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.014463892673723928, |
|
"grad_norm": 0.3369769752025604, |
|
"learning_rate": 9.64651840937276e-05, |
|
"loss": 1.1834, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.015092757572581491, |
|
"grad_norm": 0.31268638372421265, |
|
"learning_rate": 9.610143511100354e-05, |
|
"loss": 1.1367, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.015721622471439052, |
|
"grad_norm": 0.333360493183136, |
|
"learning_rate": 9.572063115079063e-05, |
|
"loss": 1.1805, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.016350487370296613, |
|
"grad_norm": 0.2951726019382477, |
|
"learning_rate": 9.53229130894619e-05, |
|
"loss": 1.1459, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.016979352269154178, |
|
"grad_norm": 0.36082521080970764, |
|
"learning_rate": 9.490842806067095e-05, |
|
"loss": 1.17, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.01760821716801174, |
|
"grad_norm": 0.33050239086151123, |
|
"learning_rate": 9.44773294009206e-05, |
|
"loss": 1.1611, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.01760821716801174, |
|
"eval_loss": 1.147385835647583, |
|
"eval_runtime": 165.0588, |
|
"eval_samples_per_second": 48.68, |
|
"eval_steps_per_second": 6.089, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.0182370820668693, |
|
"grad_norm": 0.3466218411922455, |
|
"learning_rate": 9.40297765928369e-05, |
|
"loss": 1.181, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.018865946965726865, |
|
"grad_norm": 0.3219515085220337, |
|
"learning_rate": 9.356593520616948e-05, |
|
"loss": 1.1236, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.019494811864584426, |
|
"grad_norm": 0.35251516103744507, |
|
"learning_rate": 9.308597683653975e-05, |
|
"loss": 1.1749, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.020123676763441987, |
|
"grad_norm": 0.3366820514202118, |
|
"learning_rate": 9.259007904196023e-05, |
|
"loss": 1.1411, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.020752541662299548, |
|
"grad_norm": 0.3501774072647095, |
|
"learning_rate": 9.207842527714767e-05, |
|
"loss": 1.122, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.021381406561157112, |
|
"grad_norm": 0.3260366916656494, |
|
"learning_rate": 9.155120482565521e-05, |
|
"loss": 1.1032, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.022010271460014674, |
|
"grad_norm": 0.3130464255809784, |
|
"learning_rate": 9.10086127298478e-05, |
|
"loss": 1.1025, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.022639136358872235, |
|
"grad_norm": 0.3319336473941803, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.1279, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.0232680012577298, |
|
"grad_norm": 0.3683924973011017, |
|
"learning_rate": 8.987812213377424e-05, |
|
"loss": 1.1384, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.02389686615658736, |
|
"grad_norm": 0.3043256998062134, |
|
"learning_rate": 8.929064185241213e-05, |
|
"loss": 1.106, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.02452573105544492, |
|
"grad_norm": 0.3852984607219696, |
|
"learning_rate": 8.868862620982534e-05, |
|
"loss": 1.123, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.025154595954302483, |
|
"grad_norm": 0.36246711015701294, |
|
"learning_rate": 8.807229791845673e-05, |
|
"loss": 1.1248, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.025783460853160047, |
|
"grad_norm": 0.3592873215675354, |
|
"learning_rate": 8.744188498563641e-05, |
|
"loss": 1.0983, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.02641232575201761, |
|
"grad_norm": 0.3461982309818268, |
|
"learning_rate": 8.679762062923175e-05, |
|
"loss": 1.1211, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02641232575201761, |
|
"eval_loss": 1.1246975660324097, |
|
"eval_runtime": 165.051, |
|
"eval_samples_per_second": 48.682, |
|
"eval_steps_per_second": 6.089, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02704119065087517, |
|
"grad_norm": 0.3487012982368469, |
|
"learning_rate": 8.613974319136958e-05, |
|
"loss": 1.1212, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.027670055549732734, |
|
"grad_norm": 0.37176796793937683, |
|
"learning_rate": 8.54684960502629e-05, |
|
"loss": 1.0707, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.028298920448590295, |
|
"grad_norm": 0.3854920268058777, |
|
"learning_rate": 8.478412753017433e-05, |
|
"loss": 1.1204, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.028927785347447856, |
|
"grad_norm": 0.3561486303806305, |
|
"learning_rate": 8.408689080954998e-05, |
|
"loss": 1.1238, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.029556650246305417, |
|
"grad_norm": 0.35637167096138, |
|
"learning_rate": 8.33770438273574e-05, |
|
"loss": 1.1415, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.030185515145162982, |
|
"grad_norm": 0.37608301639556885, |
|
"learning_rate": 8.265484918766243e-05, |
|
"loss": 1.1374, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.030814380044020543, |
|
"grad_norm": 0.3702179789543152, |
|
"learning_rate": 8.192057406248028e-05, |
|
"loss": 1.1001, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.031443244942878104, |
|
"grad_norm": 0.35054582357406616, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 1.122, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03207210984173567, |
|
"grad_norm": 0.35157763957977295, |
|
"learning_rate": 8.041687328877567e-05, |
|
"loss": 1.1126, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.032700974740593226, |
|
"grad_norm": 0.3567587435245514, |
|
"learning_rate": 7.964800392625129e-05, |
|
"loss": 1.1196, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.03332983963945079, |
|
"grad_norm": 0.33376169204711914, |
|
"learning_rate": 7.886816644444098e-05, |
|
"loss": 1.1269, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.033958704538308356, |
|
"grad_norm": 0.33005964756011963, |
|
"learning_rate": 7.807764934001874e-05, |
|
"loss": 1.0881, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.03458756943716591, |
|
"grad_norm": 0.3465365171432495, |
|
"learning_rate": 7.727674506052743e-05, |
|
"loss": 1.0912, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.03521643433602348, |
|
"grad_norm": 0.3523496985435486, |
|
"learning_rate": 7.646574989618938e-05, |
|
"loss": 1.0962, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03521643433602348, |
|
"eval_loss": 1.1077728271484375, |
|
"eval_runtime": 165.0063, |
|
"eval_samples_per_second": 48.695, |
|
"eval_steps_per_second": 6.091, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.03584529923488104, |
|
"grad_norm": 0.3241373300552368, |
|
"learning_rate": 7.564496387029532e-05, |
|
"loss": 1.1243, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.0364741641337386, |
|
"grad_norm": 0.33574438095092773, |
|
"learning_rate": 7.481469062821252e-05, |
|
"loss": 1.1031, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.037103029032596165, |
|
"grad_norm": 0.38013267517089844, |
|
"learning_rate": 7.39752373250527e-05, |
|
"loss": 1.1191, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.03773189393145373, |
|
"grad_norm": 0.35769525170326233, |
|
"learning_rate": 7.312691451204178e-05, |
|
"loss": 1.0992, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.03836075883031129, |
|
"grad_norm": 0.32820338010787964, |
|
"learning_rate": 7.227003602163295e-05, |
|
"loss": 1.0621, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.03898962372916885, |
|
"grad_norm": 0.32962238788604736, |
|
"learning_rate": 7.14049188514063e-05, |
|
"loss": 1.0828, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.03961848862802641, |
|
"grad_norm": 0.34197545051574707, |
|
"learning_rate": 7.05318830467969e-05, |
|
"loss": 1.1168, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.040247353526883974, |
|
"grad_norm": 0.37021368741989136, |
|
"learning_rate": 6.965125158269619e-05, |
|
"loss": 1.1374, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.04087621842574154, |
|
"grad_norm": 0.3301369547843933, |
|
"learning_rate": 6.876335024396872e-05, |
|
"loss": 1.054, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.041505083324599096, |
|
"grad_norm": 0.35479190945625305, |
|
"learning_rate": 6.786850750493006e-05, |
|
"loss": 1.0923, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.04213394822345666, |
|
"grad_norm": 0.35211730003356934, |
|
"learning_rate": 6.696705440782938e-05, |
|
"loss": 1.1124, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.042762813122314225, |
|
"grad_norm": 0.4026525616645813, |
|
"learning_rate": 6.605932444038229e-05, |
|
"loss": 1.1229, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.04339167802117178, |
|
"grad_norm": 0.40407177805900574, |
|
"learning_rate": 6.514565341239861e-05, |
|
"loss": 1.066, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.04402054292002935, |
|
"grad_norm": 0.3534255027770996, |
|
"learning_rate": 6.422637933155162e-05, |
|
"loss": 1.0462, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.04402054292002935, |
|
"eval_loss": 1.0946003198623657, |
|
"eval_runtime": 165.0602, |
|
"eval_samples_per_second": 48.679, |
|
"eval_steps_per_second": 6.089, |
|
"step": 210 |
|
} |
|
], |
|
"logging_steps": 3, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 42, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.479610490191872e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|