|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.09637393085170462, |
|
"eval_steps": 100, |
|
"global_step": 400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00024093482712926153, |
|
"eval_loss": 5.2897210121154785, |
|
"eval_runtime": 49.6033, |
|
"eval_samples_per_second": 140.918, |
|
"eval_steps_per_second": 17.62, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0012046741356463078, |
|
"grad_norm": 1.0501099824905396, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 5.2061, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0024093482712926155, |
|
"grad_norm": 0.9017743468284607, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 5.2067, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003614022406938923, |
|
"grad_norm": 1.2880569696426392, |
|
"learning_rate": 5e-05, |
|
"loss": 5.246, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004818696542585231, |
|
"grad_norm": 1.3013567924499512, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 4.9928, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006023370678231539, |
|
"grad_norm": 1.3073419332504272, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 4.7026, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.007228044813877846, |
|
"grad_norm": 1.76248037815094, |
|
"learning_rate": 0.0001, |
|
"loss": 4.2688, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.008432718949524154, |
|
"grad_norm": 1.6442067623138428, |
|
"learning_rate": 9.995494831023409e-05, |
|
"loss": 3.4073, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.009637393085170462, |
|
"grad_norm": 1.499051809310913, |
|
"learning_rate": 9.981987442712633e-05, |
|
"loss": 2.7618, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01084206722081677, |
|
"grad_norm": 1.3366233110427856, |
|
"learning_rate": 9.959502176294383e-05, |
|
"loss": 1.8808, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.012046741356463078, |
|
"grad_norm": 1.3299202919006348, |
|
"learning_rate": 9.928079551738543e-05, |
|
"loss": 1.3071, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.013251415492109385, |
|
"grad_norm": 1.0938165187835693, |
|
"learning_rate": 9.887776194738432e-05, |
|
"loss": 0.8839, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.014456089627755691, |
|
"grad_norm": 1.5741374492645264, |
|
"learning_rate": 9.838664734667495e-05, |
|
"loss": 0.6882, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.015660763763402, |
|
"grad_norm": 1.5812095403671265, |
|
"learning_rate": 9.780833673696254e-05, |
|
"loss": 0.5109, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01686543789904831, |
|
"grad_norm": 1.116866111755371, |
|
"learning_rate": 9.714387227305422e-05, |
|
"loss": 0.3742, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.018070112034694615, |
|
"grad_norm": 1.1462064981460571, |
|
"learning_rate": 9.639445136482548e-05, |
|
"loss": 0.3226, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.019274786170340924, |
|
"grad_norm": 1.208228588104248, |
|
"learning_rate": 9.55614245194068e-05, |
|
"loss": 0.3124, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02047946030598723, |
|
"grad_norm": 1.1284897327423096, |
|
"learning_rate": 9.464629290747842e-05, |
|
"loss": 0.291, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.02168413444163354, |
|
"grad_norm": 0.9293462634086609, |
|
"learning_rate": 9.365070565805941e-05, |
|
"loss": 0.3135, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.022888808577279846, |
|
"grad_norm": 1.147952675819397, |
|
"learning_rate": 9.257645688666556e-05, |
|
"loss": 0.2827, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.024093482712926155, |
|
"grad_norm": 0.8131073117256165, |
|
"learning_rate": 9.142548246219212e-05, |
|
"loss": 0.2992, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.024093482712926155, |
|
"eval_loss": 0.30220097303390503, |
|
"eval_runtime": 49.727, |
|
"eval_samples_per_second": 140.567, |
|
"eval_steps_per_second": 17.576, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.02529815684857246, |
|
"grad_norm": 0.8790974020957947, |
|
"learning_rate": 9.019985651834703e-05, |
|
"loss": 0.3103, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.02650283098421877, |
|
"grad_norm": 0.6411612629890442, |
|
"learning_rate": 8.890178771592199e-05, |
|
"loss": 0.2806, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.027707505119865077, |
|
"grad_norm": 0.6550126671791077, |
|
"learning_rate": 8.753361526263621e-05, |
|
"loss": 0.2977, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.028912179255511383, |
|
"grad_norm": 0.729946494102478, |
|
"learning_rate": 8.609780469772623e-05, |
|
"loss": 0.278, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.030116853391157692, |
|
"grad_norm": 0.673554539680481, |
|
"learning_rate": 8.459694344887732e-05, |
|
"loss": 0.3069, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.031321527526804, |
|
"grad_norm": 0.5400055646896362, |
|
"learning_rate": 8.303373616950408e-05, |
|
"loss": 0.2697, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.03252620166245031, |
|
"grad_norm": 0.5848944783210754, |
|
"learning_rate": 8.141099986478212e-05, |
|
"loss": 0.2821, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.03373087579809662, |
|
"grad_norm": 0.6077755093574524, |
|
"learning_rate": 7.973165881521434e-05, |
|
"loss": 0.3083, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.03493554993374292, |
|
"grad_norm": 0.6799314618110657, |
|
"learning_rate": 7.799873930687978e-05, |
|
"loss": 0.2677, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.03614022406938923, |
|
"grad_norm": 0.8506907224655151, |
|
"learning_rate": 7.621536417786159e-05, |
|
"loss": 0.2917, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.03734489820503554, |
|
"grad_norm": 0.7229334712028503, |
|
"learning_rate": 7.438474719068173e-05, |
|
"loss": 0.2842, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.03854957234068185, |
|
"grad_norm": 0.5894433259963989, |
|
"learning_rate": 7.251018724088367e-05, |
|
"loss": 0.2929, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.03975424647632815, |
|
"grad_norm": 0.4831799864768982, |
|
"learning_rate": 7.059506241219965e-05, |
|
"loss": 0.2746, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.04095892061197446, |
|
"grad_norm": 0.6661810278892517, |
|
"learning_rate": 6.864282388901544e-05, |
|
"loss": 0.2661, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.04216359474762077, |
|
"grad_norm": 0.655542254447937, |
|
"learning_rate": 6.665698973710288e-05, |
|
"loss": 0.2984, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.04336826888326708, |
|
"grad_norm": 0.6977773308753967, |
|
"learning_rate": 6.464113856382752e-05, |
|
"loss": 0.2716, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.04457294301891338, |
|
"grad_norm": 0.7483444213867188, |
|
"learning_rate": 6.259890306925627e-05, |
|
"loss": 0.2806, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.04577761715455969, |
|
"grad_norm": 0.6179220080375671, |
|
"learning_rate": 6.0533963499786314e-05, |
|
"loss": 0.2644, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.046982291290206, |
|
"grad_norm": 0.7248973846435547, |
|
"learning_rate": 5.8450041016092464e-05, |
|
"loss": 0.2754, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.04818696542585231, |
|
"grad_norm": 0.5180511474609375, |
|
"learning_rate": 5.6350890987343944e-05, |
|
"loss": 0.2816, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04818696542585231, |
|
"eval_loss": 0.2891940772533417, |
|
"eval_runtime": 49.7145, |
|
"eval_samples_per_second": 140.603, |
|
"eval_steps_per_second": 17.58, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.04939163956149861, |
|
"grad_norm": 0.717217743396759, |
|
"learning_rate": 5.4240296223775465e-05, |
|
"loss": 0.2947, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.05059631369714492, |
|
"grad_norm": 0.8364290595054626, |
|
"learning_rate": 5.212206015980742e-05, |
|
"loss": 0.2726, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.05180098783279123, |
|
"grad_norm": 0.5089383721351624, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2751, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.05300566196843754, |
|
"grad_norm": 0.6700091361999512, |
|
"learning_rate": 4.78779398401926e-05, |
|
"loss": 0.2774, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.054210336104083844, |
|
"grad_norm": 0.4948834180831909, |
|
"learning_rate": 4.575970377622456e-05, |
|
"loss": 0.281, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.05541501023973015, |
|
"grad_norm": 0.3750030994415283, |
|
"learning_rate": 4.364910901265606e-05, |
|
"loss": 0.2616, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.05661968437537646, |
|
"grad_norm": 0.5234203338623047, |
|
"learning_rate": 4.1549958983907555e-05, |
|
"loss": 0.2533, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.057824358511022765, |
|
"grad_norm": 0.6091147661209106, |
|
"learning_rate": 3.94660365002137e-05, |
|
"loss": 0.2839, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.059029032646669075, |
|
"grad_norm": 0.47072502970695496, |
|
"learning_rate": 3.740109693074375e-05, |
|
"loss": 0.2761, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.060233706782315384, |
|
"grad_norm": 0.39634305238723755, |
|
"learning_rate": 3.5358861436172485e-05, |
|
"loss": 0.2782, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.061438380917961694, |
|
"grad_norm": 0.4202629327774048, |
|
"learning_rate": 3.334301026289712e-05, |
|
"loss": 0.2804, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.062643055053608, |
|
"grad_norm": 0.6114380955696106, |
|
"learning_rate": 3.135717611098458e-05, |
|
"loss": 0.2726, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.06384772918925431, |
|
"grad_norm": 0.37326812744140625, |
|
"learning_rate": 2.9404937587800375e-05, |
|
"loss": 0.268, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.06505240332490062, |
|
"grad_norm": 0.48931899666786194, |
|
"learning_rate": 2.748981275911633e-05, |
|
"loss": 0.2643, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.06625707746054692, |
|
"grad_norm": 0.5798832178115845, |
|
"learning_rate": 2.5615252809318284e-05, |
|
"loss": 0.2539, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.06746175159619323, |
|
"grad_norm": 0.540962815284729, |
|
"learning_rate": 2.3784635822138424e-05, |
|
"loss": 0.2662, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.06866642573183954, |
|
"grad_norm": 0.6189134120941162, |
|
"learning_rate": 2.2001260693120233e-05, |
|
"loss": 0.2811, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.06987109986748584, |
|
"grad_norm": 0.39256712794303894, |
|
"learning_rate": 2.026834118478567e-05, |
|
"loss": 0.2747, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.07107577400313216, |
|
"grad_norm": 0.558190643787384, |
|
"learning_rate": 1.858900013521788e-05, |
|
"loss": 0.2635, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.07228044813877846, |
|
"grad_norm": 0.8916624784469604, |
|
"learning_rate": 1.6966263830495936e-05, |
|
"loss": 0.2721, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07228044813877846, |
|
"eval_loss": 0.2829170823097229, |
|
"eval_runtime": 49.6478, |
|
"eval_samples_per_second": 140.792, |
|
"eval_steps_per_second": 17.604, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.07348512227442477, |
|
"grad_norm": 0.6186990737915039, |
|
"learning_rate": 1.5403056551122697e-05, |
|
"loss": 0.2538, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.07468979641007108, |
|
"grad_norm": 0.5772135257720947, |
|
"learning_rate": 1.3902195302273779e-05, |
|
"loss": 0.261, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.07589447054571738, |
|
"grad_norm": 0.7393772602081299, |
|
"learning_rate": 1.246638473736378e-05, |
|
"loss": 0.2702, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.0770991446813637, |
|
"grad_norm": 0.49082431197166443, |
|
"learning_rate": 1.1098212284078036e-05, |
|
"loss": 0.2562, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.07830381881701, |
|
"grad_norm": 0.2409713864326477, |
|
"learning_rate": 9.800143481652979e-06, |
|
"loss": 0.2712, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.0795084929526563, |
|
"grad_norm": 0.6244598031044006, |
|
"learning_rate": 8.574517537807897e-06, |
|
"loss": 0.266, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.08071316708830262, |
|
"grad_norm": 0.44633162021636963, |
|
"learning_rate": 7.423543113334436e-06, |
|
"loss": 0.2725, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.08191784122394892, |
|
"grad_norm": 0.4065309166908264, |
|
"learning_rate": 6.349294341940593e-06, |
|
"loss": 0.2799, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.08312251535959522, |
|
"grad_norm": 0.3984888792037964, |
|
"learning_rate": 5.353707092521582e-06, |
|
"loss": 0.2763, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.08432718949524154, |
|
"grad_norm": 0.3939869701862335, |
|
"learning_rate": 4.43857548059321e-06, |
|
"loss": 0.276, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.08553186363088784, |
|
"grad_norm": 0.4459209442138672, |
|
"learning_rate": 3.605548635174533e-06, |
|
"loss": 0.2786, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.08673653776653416, |
|
"grad_norm": 0.5801013708114624, |
|
"learning_rate": 2.85612772694579e-06, |
|
"loss": 0.2693, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.08794121190218046, |
|
"grad_norm": 0.41877418756484985, |
|
"learning_rate": 2.191663263037458e-06, |
|
"loss": 0.2655, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.08914588603782676, |
|
"grad_norm": 0.4572800397872925, |
|
"learning_rate": 1.6133526533250565e-06, |
|
"loss": 0.2594, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.09035056017347308, |
|
"grad_norm": 0.6014503240585327, |
|
"learning_rate": 1.1222380526156928e-06, |
|
"loss": 0.2757, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.09155523430911938, |
|
"grad_norm": 0.7533774375915527, |
|
"learning_rate": 7.192044826145771e-07, |
|
"loss": 0.268, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.09275990844476568, |
|
"grad_norm": 0.6264715790748596, |
|
"learning_rate": 4.049782370561583e-07, |
|
"loss": 0.2657, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.093964582580412, |
|
"grad_norm": 0.33361977338790894, |
|
"learning_rate": 1.8012557287367392e-07, |
|
"loss": 0.2725, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0951692567160583, |
|
"grad_norm": 0.5748486518859863, |
|
"learning_rate": 4.5051689765929214e-08, |
|
"loss": 0.2672, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.09637393085170462, |
|
"grad_norm": 0.617462694644928, |
|
"learning_rate": 0.0, |
|
"loss": 0.2686, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.09637393085170462, |
|
"eval_loss": 0.28005144000053406, |
|
"eval_runtime": 50.7695, |
|
"eval_samples_per_second": 137.681, |
|
"eval_steps_per_second": 17.215, |
|
"step": 400 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.2815911801480806e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|