|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.0, |
|
"global_step": 28824, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 2.947960033305579e-05, |
|
"loss": 2.1557, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 2.8959200666111577e-05, |
|
"loss": 1.6641, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 2.843880099916736e-05, |
|
"loss": 1.6261, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 2.7918401332223147e-05, |
|
"loss": 1.5307, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 2.7398001665278935e-05, |
|
"loss": 1.4856, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.6877601998334723e-05, |
|
"loss": 1.4515, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 2.635720233139051e-05, |
|
"loss": 1.4413, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 2.5836802664446297e-05, |
|
"loss": 1.3974, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 2.531640299750208e-05, |
|
"loss": 1.3848, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 2.479600333055787e-05, |
|
"loss": 1.3741, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.4275603663613655e-05, |
|
"loss": 1.3435, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 2.3755203996669443e-05, |
|
"loss": 1.2997, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 2.323480432972523e-05, |
|
"loss": 1.3027, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 2.2714404662781016e-05, |
|
"loss": 1.3309, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.21940049958368e-05, |
|
"loss": 1.1215, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 2.167360532889259e-05, |
|
"loss": 1.0274, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 2.1153205661948377e-05, |
|
"loss": 1.0156, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 2.0632805995004166e-05, |
|
"loss": 0.9939, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 2.011240632805995e-05, |
|
"loss": 1.0544, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 1.9592006661115735e-05, |
|
"loss": 1.0023, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 1.9071606994171524e-05, |
|
"loss": 1.0157, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 1.8551207327227312e-05, |
|
"loss": 0.9919, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 1.8030807660283097e-05, |
|
"loss": 1.0093, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.7510407993338885e-05, |
|
"loss": 0.9634, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.6990008326394673e-05, |
|
"loss": 1.0209, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.6469608659450458e-05, |
|
"loss": 0.9783, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 1.5949208992506246e-05, |
|
"loss": 0.9689, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 1.542880932556203e-05, |
|
"loss": 0.9864, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 1.490840965861782e-05, |
|
"loss": 0.9052, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 1.4388009991673606e-05, |
|
"loss": 0.673, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 1.3867610324729393e-05, |
|
"loss": 0.6972, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 1.3347210657785179e-05, |
|
"loss": 0.6885, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 1.2826810990840966e-05, |
|
"loss": 0.6846, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 1.2306411323896754e-05, |
|
"loss": 0.674, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.1786011656952539e-05, |
|
"loss": 0.6818, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.1265611990008327e-05, |
|
"loss": 0.6804, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0745212323064114e-05, |
|
"loss": 0.6701, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.02248126561199e-05, |
|
"loss": 0.6729, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 9.704412989175687e-06, |
|
"loss": 0.6802, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 9.184013322231475e-06, |
|
"loss": 0.6547, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 8.66361365528726e-06, |
|
"loss": 0.6525, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 8.143213988343048e-06, |
|
"loss": 0.6627, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.622814321398834e-06, |
|
"loss": 0.6273, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 7.102414654454621e-06, |
|
"loss": 0.48, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.582014987510409e-06, |
|
"loss": 0.4171, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 6.061615320566195e-06, |
|
"loss": 0.4214, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 5.541215653621982e-06, |
|
"loss": 0.4104, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 5.020815986677768e-06, |
|
"loss": 0.403, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"learning_rate": 4.500416319733556e-06, |
|
"loss": 0.4146, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 3.980016652789342e-06, |
|
"loss": 0.3808, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 3.4596169858451292e-06, |
|
"loss": 0.4089, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 2.9392173189009158e-06, |
|
"loss": 0.4005, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 2.4188176519567028e-06, |
|
"loss": 0.4189, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 1.8984179850124897e-06, |
|
"loss": 0.3943, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.3780183180682765e-06, |
|
"loss": 0.3913, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 8.576186511240633e-07, |
|
"loss": 0.3854, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 3.372189841798501e-07, |
|
"loss": 0.4227, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"step": 28824, |
|
"total_flos": 8840905399111680.0, |
|
"train_runtime": 8610.7771, |
|
"train_samples_per_second": 3.347 |
|
} |
|
], |
|
"max_steps": 28824, |
|
"num_train_epochs": 4, |
|
"total_flos": 8840905399111680.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|