|
{ |
|
"best_metric": 11.930976867675781, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-75", |
|
"epoch": 0.6500541711809318, |
|
"eval_steps": 25, |
|
"global_step": 75, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00866738894907909, |
|
"grad_norm": 0.014205647632479668, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 11.9344, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00866738894907909, |
|
"eval_loss": 11.943220138549805, |
|
"eval_runtime": 0.2668, |
|
"eval_samples_per_second": 187.436, |
|
"eval_steps_per_second": 48.733, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01733477789815818, |
|
"grad_norm": 0.021280059590935707, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 11.9345, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02600216684723727, |
|
"grad_norm": 0.026625456288456917, |
|
"learning_rate": 0.0001, |
|
"loss": 11.9361, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03466955579631636, |
|
"grad_norm": 0.031291306018829346, |
|
"learning_rate": 9.99571699711836e-05, |
|
"loss": 11.9349, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.04333694474539545, |
|
"grad_norm": 0.036243926733732224, |
|
"learning_rate": 9.982876141412856e-05, |
|
"loss": 11.9361, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05200433369447454, |
|
"grad_norm": 0.04476647451519966, |
|
"learning_rate": 9.961501876182148e-05, |
|
"loss": 11.9369, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06067172264355363, |
|
"grad_norm": 0.05329675227403641, |
|
"learning_rate": 9.931634888554937e-05, |
|
"loss": 11.9366, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.06933911159263272, |
|
"grad_norm": 0.05616913363337517, |
|
"learning_rate": 9.893332032039701e-05, |
|
"loss": 11.9381, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0780065005417118, |
|
"grad_norm": 0.06590206176042557, |
|
"learning_rate": 9.846666218300807e-05, |
|
"loss": 11.9394, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0866738894907909, |
|
"grad_norm": 0.08881666511297226, |
|
"learning_rate": 9.791726278367022e-05, |
|
"loss": 11.9406, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.09534127843986999, |
|
"grad_norm": 0.11582574248313904, |
|
"learning_rate": 9.728616793536588e-05, |
|
"loss": 11.9429, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.10400866738894908, |
|
"grad_norm": 0.15351854264736176, |
|
"learning_rate": 9.657457896300791e-05, |
|
"loss": 11.9471, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.11267605633802817, |
|
"grad_norm": 0.02668151818215847, |
|
"learning_rate": 9.578385041664925e-05, |
|
"loss": 11.9345, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.12134344528710726, |
|
"grad_norm": 0.022617211565375328, |
|
"learning_rate": 9.491548749301997e-05, |
|
"loss": 11.9325, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.13001083423618634, |
|
"grad_norm": 0.027192845940589905, |
|
"learning_rate": 9.397114317029975e-05, |
|
"loss": 11.9352, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.13867822318526543, |
|
"grad_norm": 0.03240914270281792, |
|
"learning_rate": 9.295261506157986e-05, |
|
"loss": 11.9353, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.14734561213434452, |
|
"grad_norm": 0.042475830763578415, |
|
"learning_rate": 9.186184199300464e-05, |
|
"loss": 11.935, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1560130010834236, |
|
"grad_norm": 0.049555882811546326, |
|
"learning_rate": 9.070090031310558e-05, |
|
"loss": 11.9354, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1646803900325027, |
|
"grad_norm": 0.053182654082775116, |
|
"learning_rate": 8.947199994035401e-05, |
|
"loss": 11.935, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1733477789815818, |
|
"grad_norm": 0.06439302861690521, |
|
"learning_rate": 8.817748015645558e-05, |
|
"loss": 11.936, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1820151679306609, |
|
"grad_norm": 0.07124801725149155, |
|
"learning_rate": 8.681980515339464e-05, |
|
"loss": 11.9372, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.19068255687973998, |
|
"grad_norm": 0.09465167671442032, |
|
"learning_rate": 8.540155934270471e-05, |
|
"loss": 11.9363, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.19934994582881907, |
|
"grad_norm": 0.12675674259662628, |
|
"learning_rate": 8.392544243589427e-05, |
|
"loss": 11.9384, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.20801733477789816, |
|
"grad_norm": 0.16636888682842255, |
|
"learning_rate": 8.239426430539243e-05, |
|
"loss": 11.9427, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.21668472372697725, |
|
"grad_norm": 0.2566799223423004, |
|
"learning_rate": 8.081093963579707e-05, |
|
"loss": 11.9439, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.21668472372697725, |
|
"eval_loss": 11.938733100891113, |
|
"eval_runtime": 0.2609, |
|
"eval_samples_per_second": 191.665, |
|
"eval_steps_per_second": 49.833, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.22535211267605634, |
|
"grad_norm": 0.024506624788045883, |
|
"learning_rate": 7.917848237560709e-05, |
|
"loss": 11.932, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.23401950162513543, |
|
"grad_norm": 0.03506021574139595, |
|
"learning_rate": 7.75e-05, |
|
"loss": 11.933, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.24268689057421453, |
|
"grad_norm": 0.04695020988583565, |
|
"learning_rate": 7.577868759557654e-05, |
|
"loss": 11.9338, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.2513542795232936, |
|
"grad_norm": 0.05981919541954994, |
|
"learning_rate": 7.401782177833148e-05, |
|
"loss": 11.9342, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2600216684723727, |
|
"grad_norm": 0.061907246708869934, |
|
"learning_rate": 7.222075445642904e-05, |
|
"loss": 11.9334, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.26868905742145177, |
|
"grad_norm": 0.07472109794616699, |
|
"learning_rate": 7.03909064496551e-05, |
|
"loss": 11.9334, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.27735644637053086, |
|
"grad_norm": 0.09128537774085999, |
|
"learning_rate": 6.853176097769229e-05, |
|
"loss": 11.9335, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.28602383531960995, |
|
"grad_norm": 0.10218062251806259, |
|
"learning_rate": 6.664685702961344e-05, |
|
"loss": 11.9348, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.29469122426868904, |
|
"grad_norm": 0.11169303208589554, |
|
"learning_rate": 6.473978262721463e-05, |
|
"loss": 11.9348, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.30335861321776814, |
|
"grad_norm": 0.14492186903953552, |
|
"learning_rate": 6.281416799501188e-05, |
|
"loss": 11.9349, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.3120260021668472, |
|
"grad_norm": 0.18491142988204956, |
|
"learning_rate": 6.087367864990233e-05, |
|
"loss": 11.9366, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3206933911159263, |
|
"grad_norm": 0.245423823595047, |
|
"learning_rate": 5.8922008423644624e-05, |
|
"loss": 11.9378, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3293607800650054, |
|
"grad_norm": 0.05013355612754822, |
|
"learning_rate": 5.696287243144013e-05, |
|
"loss": 11.9315, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 0.047407366335392, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 11.9323, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3466955579631636, |
|
"grad_norm": 0.055343057960271835, |
|
"learning_rate": 5.303712756855988e-05, |
|
"loss": 11.9334, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.3553629469122427, |
|
"grad_norm": 0.07278693467378616, |
|
"learning_rate": 5.107799157635538e-05, |
|
"loss": 11.9319, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.3640303358613218, |
|
"grad_norm": 0.08519124239683151, |
|
"learning_rate": 4.912632135009769e-05, |
|
"loss": 11.9313, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.37269772481040087, |
|
"grad_norm": 0.09722817689180374, |
|
"learning_rate": 4.718583200498814e-05, |
|
"loss": 11.9318, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.38136511375947996, |
|
"grad_norm": 0.0941312164068222, |
|
"learning_rate": 4.526021737278538e-05, |
|
"loss": 11.9312, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.39003250270855905, |
|
"grad_norm": 0.12124495208263397, |
|
"learning_rate": 4.3353142970386564e-05, |
|
"loss": 11.931, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.39869989165763814, |
|
"grad_norm": 0.12268517166376114, |
|
"learning_rate": 4.146823902230772e-05, |
|
"loss": 11.9317, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.40736728060671723, |
|
"grad_norm": 0.16867737472057343, |
|
"learning_rate": 3.960909355034491e-05, |
|
"loss": 11.9314, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4160346695557963, |
|
"grad_norm": 0.18943016231060028, |
|
"learning_rate": 3.777924554357096e-05, |
|
"loss": 11.933, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4247020585048754, |
|
"grad_norm": 0.24677303433418274, |
|
"learning_rate": 3.598217822166854e-05, |
|
"loss": 11.9336, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4333694474539545, |
|
"grad_norm": 0.34038564562797546, |
|
"learning_rate": 3.422131240442349e-05, |
|
"loss": 11.9339, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4333694474539545, |
|
"eval_loss": 11.933380126953125, |
|
"eval_runtime": 0.2591, |
|
"eval_samples_per_second": 192.978, |
|
"eval_steps_per_second": 50.174, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4420368364030336, |
|
"grad_norm": 0.04358985275030136, |
|
"learning_rate": 3.250000000000001e-05, |
|
"loss": 11.931, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.4507042253521127, |
|
"grad_norm": 0.06944932788610458, |
|
"learning_rate": 3.082151762439293e-05, |
|
"loss": 11.931, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.4593716143011918, |
|
"grad_norm": 0.07410332560539246, |
|
"learning_rate": 2.9189060364202943e-05, |
|
"loss": 11.9319, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.46803900325027087, |
|
"grad_norm": 0.08937946707010269, |
|
"learning_rate": 2.760573569460757e-05, |
|
"loss": 11.9307, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.47670639219934996, |
|
"grad_norm": 0.09634770452976227, |
|
"learning_rate": 2.6074557564105727e-05, |
|
"loss": 11.9313, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.48537378114842905, |
|
"grad_norm": 0.09646016359329224, |
|
"learning_rate": 2.459844065729529e-05, |
|
"loss": 11.9307, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.49404117009750814, |
|
"grad_norm": 0.11758565157651901, |
|
"learning_rate": 2.3180194846605367e-05, |
|
"loss": 11.9291, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5027085590465872, |
|
"grad_norm": 0.12455835193395615, |
|
"learning_rate": 2.1822519843544424e-05, |
|
"loss": 11.9313, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5113759479956663, |
|
"grad_norm": 0.1459084004163742, |
|
"learning_rate": 2.0528000059645997e-05, |
|
"loss": 11.9316, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5200433369447454, |
|
"grad_norm": 0.1777600347995758, |
|
"learning_rate": 1.9299099686894423e-05, |
|
"loss": 11.93, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5287107258938245, |
|
"grad_norm": 0.2380158007144928, |
|
"learning_rate": 1.8138158006995364e-05, |
|
"loss": 11.931, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5373781148429035, |
|
"grad_norm": 0.30322137475013733, |
|
"learning_rate": 1.7047384938420154e-05, |
|
"loss": 11.9312, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.5460455037919827, |
|
"grad_norm": 0.0573463998734951, |
|
"learning_rate": 1.602885682970026e-05, |
|
"loss": 11.9318, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.5547128927410617, |
|
"grad_norm": 0.06148889288306236, |
|
"learning_rate": 1.5084512506980026e-05, |
|
"loss": 11.9306, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.5633802816901409, |
|
"grad_norm": 0.07989097386598587, |
|
"learning_rate": 1.4216149583350754e-05, |
|
"loss": 11.9313, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.5720476706392199, |
|
"grad_norm": 0.0917278379201889, |
|
"learning_rate": 1.3425421036992098e-05, |
|
"loss": 11.9306, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.580715059588299, |
|
"grad_norm": 0.09770037978887558, |
|
"learning_rate": 1.2713832064634126e-05, |
|
"loss": 11.9305, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.5893824485373781, |
|
"grad_norm": 0.1190197691321373, |
|
"learning_rate": 1.2082737216329794e-05, |
|
"loss": 11.9297, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.5980498374864572, |
|
"grad_norm": 0.12440388649702072, |
|
"learning_rate": 1.1533337816991932e-05, |
|
"loss": 11.9297, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6067172264355363, |
|
"grad_norm": 0.1328328549861908, |
|
"learning_rate": 1.1066679679603e-05, |
|
"loss": 11.9293, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6153846153846154, |
|
"grad_norm": 0.1649710088968277, |
|
"learning_rate": 1.0683651114450641e-05, |
|
"loss": 11.9295, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6240520043336945, |
|
"grad_norm": 0.16425499320030212, |
|
"learning_rate": 1.0384981238178534e-05, |
|
"loss": 11.9306, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.6327193932827736, |
|
"grad_norm": 0.1935925930738449, |
|
"learning_rate": 1.017123858587145e-05, |
|
"loss": 11.9294, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.6413867822318526, |
|
"grad_norm": 0.2569132447242737, |
|
"learning_rate": 1.00428300288164e-05, |
|
"loss": 11.9302, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.6500541711809318, |
|
"grad_norm": 0.3602008521556854, |
|
"learning_rate": 1e-05, |
|
"loss": 11.9282, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.6500541711809318, |
|
"eval_loss": 11.930976867675781, |
|
"eval_runtime": 0.2483, |
|
"eval_samples_per_second": 201.391, |
|
"eval_steps_per_second": 52.362, |
|
"step": 75 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 75, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 1, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1567487852544.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|