|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 22744, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 4.890080900457263e-05, |
|
"loss": 0.4414, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 4.780161800914527e-05, |
|
"loss": 0.3578, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 4.6702427013717904e-05, |
|
"loss": 0.3321, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 4.560323601829054e-05, |
|
"loss": 0.3132, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 4.450404502286317e-05, |
|
"loss": 0.3074, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 4.3404854027435806e-05, |
|
"loss": 0.2935, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 4.230566303200844e-05, |
|
"loss": 0.2856, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.120647203658108e-05, |
|
"loss": 0.2876, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 4.010728104115371e-05, |
|
"loss": 0.2751, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.9008090045726345e-05, |
|
"loss": 0.2709, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.790889905029898e-05, |
|
"loss": 0.266, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 3.680970805487162e-05, |
|
"loss": 0.2304, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 3.571051705944425e-05, |
|
"loss": 0.2132, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 3.461132606401688e-05, |
|
"loss": 0.2155, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.351213506858952e-05, |
|
"loss": 0.2111, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.2412944073162155e-05, |
|
"loss": 0.2164, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 3.1313753077734785e-05, |
|
"loss": 0.2107, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 3.0214562082307425e-05, |
|
"loss": 0.2149, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 2.9115371086880054e-05, |
|
"loss": 0.211, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 2.8016180091452694e-05, |
|
"loss": 0.2102, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 2.6916989096025323e-05, |
|
"loss": 0.2099, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 2.5817798100597963e-05, |
|
"loss": 0.2047, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.4718607105170596e-05, |
|
"loss": 0.1848, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 2.3619416109743232e-05, |
|
"loss": 0.1338, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 2.2520225114315865e-05, |
|
"loss": 0.1315, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.14210341188885e-05, |
|
"loss": 0.1354, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.0321843123461134e-05, |
|
"loss": 0.1422, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.922265212803377e-05, |
|
"loss": 0.1354, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.8123461132606403e-05, |
|
"loss": 0.1353, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.702427013717904e-05, |
|
"loss": 0.1352, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.5925079141751672e-05, |
|
"loss": 0.1315, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.4825888146324307e-05, |
|
"loss": 0.134, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.3726697150896941e-05, |
|
"loss": 0.1347, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.2627506155469576e-05, |
|
"loss": 0.1329, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"learning_rate": 1.1528315160042209e-05, |
|
"loss": 0.085, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.0429124164614843e-05, |
|
"loss": 0.0735, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 9.329933169187478e-06, |
|
"loss": 0.0739, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 8.230742173760112e-06, |
|
"loss": 0.0735, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 7.131551178332748e-06, |
|
"loss": 0.0718, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 6.032360182905382e-06, |
|
"loss": 0.0708, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 4.933169187478016e-06, |
|
"loss": 0.0706, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 3.833978192050651e-06, |
|
"loss": 0.0658, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 2.7347871966232856e-06, |
|
"loss": 0.0705, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.63559620119592e-06, |
|
"loss": 0.0692, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 5.364052057685544e-07, |
|
"loss": 0.0641, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 22744, |
|
"total_flos": 8695228732968960.0, |
|
"train_loss": 0.1816996376236853, |
|
"train_runtime": 16807.9309, |
|
"train_samples_per_second": 86.589, |
|
"train_steps_per_second": 1.353 |
|
} |
|
], |
|
"max_steps": 22744, |
|
"num_train_epochs": 4, |
|
"total_flos": 8695228732968960.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|