|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 27741, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 24.947961807250977, |
|
"learning_rate": 4.909880682022999e-05, |
|
"loss": 1.2957, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 18.850772857666016, |
|
"learning_rate": 4.819761364045997e-05, |
|
"loss": 0.9299, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 23.09844398498535, |
|
"learning_rate": 4.7296420460689956e-05, |
|
"loss": 0.8582, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 15.375988006591797, |
|
"learning_rate": 4.639522728091994e-05, |
|
"loss": 0.7935, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 15.962867736816406, |
|
"learning_rate": 4.5494034101149925e-05, |
|
"loss": 0.7439, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 16.872922897338867, |
|
"learning_rate": 4.459284092137991e-05, |
|
"loss": 0.6964, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 21.044837951660156, |
|
"learning_rate": 4.3691647741609894e-05, |
|
"loss": 0.6701, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 15.188350677490234, |
|
"learning_rate": 4.279045456183988e-05, |
|
"loss": 0.6339, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 12.721724510192871, |
|
"learning_rate": 4.188926138206986e-05, |
|
"loss": 0.6221, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 13.383834838867188, |
|
"learning_rate": 4.098806820229985e-05, |
|
"loss": 0.5805, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 15.311894416809082, |
|
"learning_rate": 4.008687502252983e-05, |
|
"loss": 0.561, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 16.439502716064453, |
|
"learning_rate": 3.9185681842759816e-05, |
|
"loss": 0.5484, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 16.220827102661133, |
|
"learning_rate": 3.82844886629898e-05, |
|
"loss": 0.5265, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 14.172871589660645, |
|
"learning_rate": 3.7383295483219785e-05, |
|
"loss": 0.5022, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 13.379059791564941, |
|
"learning_rate": 3.648210230344977e-05, |
|
"loss": 0.4878, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 21.462778091430664, |
|
"learning_rate": 3.558090912367975e-05, |
|
"loss": 0.4657, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 9.849862098693848, |
|
"learning_rate": 3.467971594390974e-05, |
|
"loss": 0.4566, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 11.99971866607666, |
|
"learning_rate": 3.3778522764139723e-05, |
|
"loss": 0.4384, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 12.555150985717773, |
|
"learning_rate": 3.287732958436971e-05, |
|
"loss": 0.3905, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"grad_norm": 14.604622840881348, |
|
"learning_rate": 3.197613640459969e-05, |
|
"loss": 0.3573, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"grad_norm": 9.172225952148438, |
|
"learning_rate": 3.107494322482968e-05, |
|
"loss": 0.3486, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 10.893487930297852, |
|
"learning_rate": 3.017375004505966e-05, |
|
"loss": 0.3413, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"grad_norm": 9.724763870239258, |
|
"learning_rate": 2.9272556865289646e-05, |
|
"loss": 0.3405, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 9.83249568939209, |
|
"learning_rate": 2.837136368551963e-05, |
|
"loss": 0.3397, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"grad_norm": 10.763433456420898, |
|
"learning_rate": 2.7470170505749615e-05, |
|
"loss": 0.3312, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"grad_norm": 10.305283546447754, |
|
"learning_rate": 2.65689773259796e-05, |
|
"loss": 0.3228, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 8.631694793701172, |
|
"learning_rate": 2.5667784146209584e-05, |
|
"loss": 0.3162, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"grad_norm": 7.709570407867432, |
|
"learning_rate": 2.476659096643957e-05, |
|
"loss": 0.3033, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"grad_norm": 8.473445892333984, |
|
"learning_rate": 2.3865397786669553e-05, |
|
"loss": 0.2987, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 5.8854217529296875, |
|
"learning_rate": 2.2964204606899537e-05, |
|
"loss": 0.292, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"grad_norm": 5.954896926879883, |
|
"learning_rate": 2.2063011427129522e-05, |
|
"loss": 0.2948, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"grad_norm": 6.9837164878845215, |
|
"learning_rate": 2.1161818247359506e-05, |
|
"loss": 0.2848, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 9.53173542022705, |
|
"learning_rate": 2.026062506758949e-05, |
|
"loss": 0.279, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"grad_norm": 8.36210823059082, |
|
"learning_rate": 1.9359431887819472e-05, |
|
"loss": 0.2692, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"grad_norm": 8.573978424072266, |
|
"learning_rate": 1.8458238708049457e-05, |
|
"loss": 0.265, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"grad_norm": 7.375257968902588, |
|
"learning_rate": 1.755704552827944e-05, |
|
"loss": 0.2683, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 6.663829803466797, |
|
"learning_rate": 1.6655852348509426e-05, |
|
"loss": 0.2539, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 5.3059492111206055, |
|
"learning_rate": 1.575465916873941e-05, |
|
"loss": 0.1978, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"grad_norm": 6.28593635559082, |
|
"learning_rate": 1.4853465988969395e-05, |
|
"loss": 0.2004, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"grad_norm": 7.351054668426514, |
|
"learning_rate": 1.3952272809199379e-05, |
|
"loss": 0.1926, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"grad_norm": 6.031446933746338, |
|
"learning_rate": 1.3051079629429367e-05, |
|
"loss": 0.1903, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"grad_norm": 4.734748840332031, |
|
"learning_rate": 1.214988644965935e-05, |
|
"loss": 0.1852, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 6.121973514556885, |
|
"learning_rate": 1.1248693269889334e-05, |
|
"loss": 0.1764, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"grad_norm": 4.245573043823242, |
|
"learning_rate": 1.0347500090119317e-05, |
|
"loss": 0.1808, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"grad_norm": 3.6098544597625732, |
|
"learning_rate": 9.446306910349303e-06, |
|
"loss": 0.1781, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 7.525985240936279, |
|
"learning_rate": 8.545113730579288e-06, |
|
"loss": 0.1837, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"grad_norm": 4.0185546875, |
|
"learning_rate": 7.643920550809272e-06, |
|
"loss": 0.1734, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"grad_norm": 5.020835876464844, |
|
"learning_rate": 6.742727371039257e-06, |
|
"loss": 0.171, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 5.588749408721924, |
|
"learning_rate": 5.84153419126924e-06, |
|
"loss": 0.1699, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"grad_norm": 3.7061429023742676, |
|
"learning_rate": 4.940341011499225e-06, |
|
"loss": 0.1621, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"grad_norm": 4.335537433624268, |
|
"learning_rate": 4.03914783172921e-06, |
|
"loss": 0.1626, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"grad_norm": 5.078334331512451, |
|
"learning_rate": 3.1379546519591943e-06, |
|
"loss": 0.156, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"grad_norm": 5.9595947265625, |
|
"learning_rate": 2.2367614721891784e-06, |
|
"loss": 0.1617, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 5.210651397705078, |
|
"learning_rate": 1.335568292419163e-06, |
|
"loss": 0.1533, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"grad_norm": 3.798105478286743, |
|
"learning_rate": 4.3437511264914753e-07, |
|
"loss": 0.1521, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 27741, |
|
"total_flos": 2.3702775297552e+17, |
|
"train_loss": 0.3772407876341264, |
|
"train_runtime": 10846.9143, |
|
"train_samples_per_second": 163.665, |
|
"train_steps_per_second": 2.558 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 27741, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"total_flos": 2.3702775297552e+17, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|