dada22231's picture
Training in progress, step 50, checkpoint
8f82bb0 verified
{
"best_metric": 1.1199095752090216e-05,
"best_model_checkpoint": "miner_id_24/checkpoint-25",
"epoch": 0.12101043715020421,
"eval_steps": 25,
"global_step": 50,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.002420208743004084,
"grad_norm": 129.8616485595703,
"learning_rate": 5e-05,
"loss": 111.0611,
"step": 1
},
{
"epoch": 0.002420208743004084,
"eval_loss": 7.585880279541016,
"eval_runtime": 2.9412,
"eval_samples_per_second": 17.0,
"eval_steps_per_second": 4.42,
"step": 1
},
{
"epoch": 0.004840417486008168,
"grad_norm": 131.14813232421875,
"learning_rate": 0.0001,
"loss": 112.1903,
"step": 2
},
{
"epoch": 0.007260626229012252,
"grad_norm": 124.35859680175781,
"learning_rate": 9.990365154573717e-05,
"loss": 106.1069,
"step": 3
},
{
"epoch": 0.009680834972016336,
"grad_norm": 120.47513580322266,
"learning_rate": 9.961501876182148e-05,
"loss": 83.1738,
"step": 4
},
{
"epoch": 0.01210104371502042,
"grad_norm": 112.5082015991211,
"learning_rate": 9.913533761814537e-05,
"loss": 61.3049,
"step": 5
},
{
"epoch": 0.014521252458024504,
"grad_norm": 123.15609741210938,
"learning_rate": 9.846666218300807e-05,
"loss": 37.9838,
"step": 6
},
{
"epoch": 0.016941461201028588,
"grad_norm": 94.98416900634766,
"learning_rate": 9.761185582727977e-05,
"loss": 25.1704,
"step": 7
},
{
"epoch": 0.019361669944032673,
"grad_norm": 70.09339141845703,
"learning_rate": 9.657457896300791e-05,
"loss": 14.7809,
"step": 8
},
{
"epoch": 0.02178187868703676,
"grad_norm": 41.6122932434082,
"learning_rate": 9.535927336897098e-05,
"loss": 7.1772,
"step": 9
},
{
"epoch": 0.02420208743004084,
"grad_norm": 18.32343101501465,
"learning_rate": 9.397114317029975e-05,
"loss": 2.5514,
"step": 10
},
{
"epoch": 0.026622296173044926,
"grad_norm": 10.826604843139648,
"learning_rate": 9.241613255361455e-05,
"loss": 1.1709,
"step": 11
},
{
"epoch": 0.029042504916049008,
"grad_norm": 9.567692756652832,
"learning_rate": 9.070090031310558e-05,
"loss": 0.4779,
"step": 12
},
{
"epoch": 0.03146271365905309,
"grad_norm": 6.14838981628418,
"learning_rate": 8.883279133655399e-05,
"loss": 0.2559,
"step": 13
},
{
"epoch": 0.033882922402057175,
"grad_norm": 3.8644745349884033,
"learning_rate": 8.681980515339464e-05,
"loss": 0.0832,
"step": 14
},
{
"epoch": 0.036303131145061264,
"grad_norm": 0.16630400717258453,
"learning_rate": 8.467056167950311e-05,
"loss": 0.0068,
"step": 15
},
{
"epoch": 0.038723339888065346,
"grad_norm": 0.0314423143863678,
"learning_rate": 8.239426430539243e-05,
"loss": 0.0016,
"step": 16
},
{
"epoch": 0.04114354863106943,
"grad_norm": 0.04940410330891609,
"learning_rate": 8.000066048588211e-05,
"loss": 0.0018,
"step": 17
},
{
"epoch": 0.04356375737407352,
"grad_norm": 0.05370471999049187,
"learning_rate": 7.75e-05,
"loss": 0.0016,
"step": 18
},
{
"epoch": 0.0459839661170776,
"grad_norm": 0.022832291200757027,
"learning_rate": 7.490299105985507e-05,
"loss": 0.0008,
"step": 19
},
{
"epoch": 0.04840417486008168,
"grad_norm": 0.01467802096158266,
"learning_rate": 7.222075445642904e-05,
"loss": 0.0005,
"step": 20
},
{
"epoch": 0.05082438360308577,
"grad_norm": 0.007455690763890743,
"learning_rate": 6.946477593864228e-05,
"loss": 0.0003,
"step": 21
},
{
"epoch": 0.05324459234608985,
"grad_norm": 0.011481430381536484,
"learning_rate": 6.664685702961344e-05,
"loss": 0.0004,
"step": 22
},
{
"epoch": 0.055664801089093933,
"grad_norm": 0.015557793900370598,
"learning_rate": 6.377906449072578e-05,
"loss": 0.0005,
"step": 23
},
{
"epoch": 0.058085009832098015,
"grad_norm": 0.009874274022877216,
"learning_rate": 6.087367864990233e-05,
"loss": 0.0003,
"step": 24
},
{
"epoch": 0.060505218575102104,
"grad_norm": 0.013874717056751251,
"learning_rate": 5.794314081535644e-05,
"loss": 0.0005,
"step": 25
},
{
"epoch": 0.060505218575102104,
"eval_loss": 1.1199095752090216e-05,
"eval_runtime": 2.9887,
"eval_samples_per_second": 16.73,
"eval_steps_per_second": 4.35,
"step": 25
},
{
"epoch": 0.06292542731810619,
"grad_norm": 4.933608055114746,
"learning_rate": 5.500000000000001e-05,
"loss": 0.735,
"step": 26
},
{
"epoch": 0.06534563606111027,
"grad_norm": 0.0033409451134502888,
"learning_rate": 5.205685918464356e-05,
"loss": 0.0001,
"step": 27
},
{
"epoch": 0.06776584480411435,
"grad_norm": 0.003721554297953844,
"learning_rate": 4.912632135009769e-05,
"loss": 0.0001,
"step": 28
},
{
"epoch": 0.07018605354711843,
"grad_norm": 0.20777276158332825,
"learning_rate": 4.6220935509274235e-05,
"loss": 0.0009,
"step": 29
},
{
"epoch": 0.07260626229012253,
"grad_norm": 0.0037672524340450764,
"learning_rate": 4.3353142970386564e-05,
"loss": 0.0002,
"step": 30
},
{
"epoch": 0.07502647103312661,
"grad_norm": 0.0027831741608679295,
"learning_rate": 4.053522406135775e-05,
"loss": 0.0001,
"step": 31
},
{
"epoch": 0.07744667977613069,
"grad_norm": 18.835296630859375,
"learning_rate": 3.777924554357096e-05,
"loss": 0.0246,
"step": 32
},
{
"epoch": 0.07986688851913477,
"grad_norm": 4.418979644775391,
"learning_rate": 3.509700894014496e-05,
"loss": 0.0289,
"step": 33
},
{
"epoch": 0.08228709726213886,
"grad_norm": 0.002538805129006505,
"learning_rate": 3.250000000000001e-05,
"loss": 0.0001,
"step": 34
},
{
"epoch": 0.08470730600514294,
"grad_norm": 0.003876280738040805,
"learning_rate": 2.9999339514117912e-05,
"loss": 0.0002,
"step": 35
},
{
"epoch": 0.08712751474814703,
"grad_norm": 0.004658486228436232,
"learning_rate": 2.760573569460757e-05,
"loss": 0.0002,
"step": 36
},
{
"epoch": 0.08954772349115112,
"grad_norm": 0.005029450170695782,
"learning_rate": 2.53294383204969e-05,
"loss": 0.0002,
"step": 37
},
{
"epoch": 0.0919679322341552,
"grad_norm": 7.445037841796875,
"learning_rate": 2.3180194846605367e-05,
"loss": 1.0627,
"step": 38
},
{
"epoch": 0.09438814097715928,
"grad_norm": 0.004267706535756588,
"learning_rate": 2.1167208663446025e-05,
"loss": 0.0001,
"step": 39
},
{
"epoch": 0.09680834972016336,
"grad_norm": 0.0033585778437554836,
"learning_rate": 1.9299099686894423e-05,
"loss": 0.0001,
"step": 40
},
{
"epoch": 0.09922855846316744,
"grad_norm": 0.004535750951617956,
"learning_rate": 1.758386744638546e-05,
"loss": 0.0001,
"step": 41
},
{
"epoch": 0.10164876720617154,
"grad_norm": 0.0062608541920781136,
"learning_rate": 1.602885682970026e-05,
"loss": 0.0001,
"step": 42
},
{
"epoch": 0.10406897594917562,
"grad_norm": 0.007605577353388071,
"learning_rate": 1.464072663102903e-05,
"loss": 0.0001,
"step": 43
},
{
"epoch": 0.1064891846921797,
"grad_norm": 0.006068273447453976,
"learning_rate": 1.3425421036992098e-05,
"loss": 0.0001,
"step": 44
},
{
"epoch": 0.10890939343518379,
"grad_norm": 0.004641711246222258,
"learning_rate": 1.2388144172720251e-05,
"loss": 0.0001,
"step": 45
},
{
"epoch": 0.11132960217818787,
"grad_norm": 0.007844093255698681,
"learning_rate": 1.1533337816991932e-05,
"loss": 0.0002,
"step": 46
},
{
"epoch": 0.11374981092119195,
"grad_norm": 39.069679260253906,
"learning_rate": 1.0864662381854632e-05,
"loss": 0.6565,
"step": 47
},
{
"epoch": 0.11617001966419603,
"grad_norm": 0.010749392211437225,
"learning_rate": 1.0384981238178534e-05,
"loss": 0.0002,
"step": 48
},
{
"epoch": 0.11859022840720013,
"grad_norm": 0.7709367275238037,
"learning_rate": 1.0096348454262845e-05,
"loss": 0.0029,
"step": 49
},
{
"epoch": 0.12101043715020421,
"grad_norm": 17.80816078186035,
"learning_rate": 1e-05,
"loss": 0.0825,
"step": 50
},
{
"epoch": 0.12101043715020421,
"eval_loss": 1.2612003047252074e-05,
"eval_runtime": 2.9705,
"eval_samples_per_second": 16.832,
"eval_steps_per_second": 4.376,
"step": 50
}
],
"logging_steps": 1,
"max_steps": 50,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 25,
"stateful_callbacks": {
"EarlyStoppingCallback": {
"args": {
"early_stopping_patience": 1,
"early_stopping_threshold": 0.0
},
"attributes": {
"early_stopping_patience_counter": 1
}
},
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 5.262443296860406e+17,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}