|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 5.788438098554762, |
|
"global_step": 75000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 0.00010077565027123787, |
|
"loss": 0.5268, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00010075885246660077, |
|
"loss": 0.5021, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00010073086073069567, |
|
"loss": 0.4927, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.0001006916819700645, |
|
"loss": 0.4854, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00010064132585148025, |
|
"loss": 0.4834, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00010057980479956167, |
|
"loss": 0.4781, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00010050713399370776, |
|
"loss": 0.4752, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00010042333136435246, |
|
"loss": 0.4725, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00010032881934692084, |
|
"loss": 0.4705, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001002228621438404, |
|
"loss": 0.4686, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"eval_loss": 0.45014095306396484, |
|
"eval_runtime": 54.4029, |
|
"eval_samples_per_second": 91.907, |
|
"eval_steps_per_second": 1.452, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00010010584325708389, |
|
"loss": 0.466, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.997779155931062e-05, |
|
"loss": 0.4638, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.983873864536092e-05, |
|
"loss": 0.4628, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.968871882446063e-05, |
|
"loss": 0.462, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.9528101893266e-05, |
|
"loss": 0.4628, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.935628373962941e-05, |
|
"loss": 0.4592, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.917361771757108e-05, |
|
"loss": 0.459, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.898014889719868e-05, |
|
"loss": 0.4576, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.877592501404869e-05, |
|
"loss": 0.4551, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.856099645730841e-05, |
|
"loss": 0.4551, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"eval_loss": 0.43748965859413147, |
|
"eval_runtime": 115.0161, |
|
"eval_samples_per_second": 43.472, |
|
"eval_steps_per_second": 0.687, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 9.833541625738316e-05, |
|
"loss": 0.4543, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.809924007281187e-05, |
|
"loss": 0.4532, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.78525261765341e-05, |
|
"loss": 0.4529, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.759533544151208e-05, |
|
"loss": 0.4523, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.732827688303682e-05, |
|
"loss": 0.4506, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.705034604088048e-05, |
|
"loss": 0.4502, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 9.676213628592508e-05, |
|
"loss": 0.4493, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.64637187296151e-05, |
|
"loss": 0.449, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.615516700201724e-05, |
|
"loss": 0.448, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.583720443927501e-05, |
|
"loss": 0.4479, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"eval_loss": 0.4363159239292145, |
|
"eval_runtime": 91.0649, |
|
"eval_samples_per_second": 54.906, |
|
"eval_steps_per_second": 0.868, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.550930216588928e-05, |
|
"loss": 0.4482, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 9.517085404351901e-05, |
|
"loss": 0.4455, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.48225907448766e-05, |
|
"loss": 0.4449, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 9.44645981987198e-05, |
|
"loss": 0.4454, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 9.409696473435264e-05, |
|
"loss": 0.4449, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.371978105983142e-05, |
|
"loss": 0.4436, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.333314023958391e-05, |
|
"loss": 0.4442, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 9.293713767144707e-05, |
|
"loss": 0.4429, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 9.25326907757735e-05, |
|
"loss": 0.4427, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 9.211827834753872e-05, |
|
"loss": 0.4428, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"eval_loss": 0.4262467920780182, |
|
"eval_runtime": 57.1744, |
|
"eval_samples_per_second": 87.452, |
|
"eval_steps_per_second": 1.382, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.169565984405851e-05, |
|
"loss": 0.4442, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 9.126411941080688e-05, |
|
"loss": 0.4404, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.082376309845848e-05, |
|
"loss": 0.4418, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.037379052736644e-05, |
|
"loss": 0.44, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.991518638364486e-05, |
|
"loss": 0.4388, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 8.944806382100394e-05, |
|
"loss": 0.4389, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 8.897253809494634e-05, |
|
"loss": 0.4386, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.848872653432954e-05, |
|
"loss": 0.4373, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 8.799674851241666e-05, |
|
"loss": 0.438, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.749672541742298e-05, |
|
"loss": 0.4379, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"eval_loss": 0.415912002325058, |
|
"eval_runtime": 93.156, |
|
"eval_samples_per_second": 53.673, |
|
"eval_steps_per_second": 0.848, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 8.698878062256518e-05, |
|
"loss": 0.4373, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.647407863447136e-05, |
|
"loss": 0.4359, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 8.595068355683772e-05, |
|
"loss": 0.4365, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 8.541974824203516e-05, |
|
"loss": 0.4344, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.488140369040804e-05, |
|
"loss": 0.4344, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.433578273041907e-05, |
|
"loss": 0.4349, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.378301998587581e-05, |
|
"loss": 0.4341, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.32232518427143e-05, |
|
"loss": 0.4346, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.26566164153479e-05, |
|
"loss": 0.433, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.208325351258954e-05, |
|
"loss": 0.4335, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"eval_loss": 0.4103819727897644, |
|
"eval_runtime": 61.8163, |
|
"eval_samples_per_second": 80.885, |
|
"eval_steps_per_second": 1.278, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.150330460315606e-05, |
|
"loss": 0.4323, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.091691278076293e-05, |
|
"loss": 0.4309, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 8.032541429761075e-05, |
|
"loss": 0.4322, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.972658441054002e-05, |
|
"loss": 0.4309, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.912174998963153e-05, |
|
"loss": 0.4309, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 7.851106026872828e-05, |
|
"loss": 0.4299, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 7.789466592638097e-05, |
|
"loss": 0.4299, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.727271904867026e-05, |
|
"loss": 0.4292, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.664537309168187e-05, |
|
"loss": 0.4303, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 7.601405315441079e-05, |
|
"loss": 0.4293, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"eval_loss": 0.407795786857605, |
|
"eval_runtime": 90.2448, |
|
"eval_samples_per_second": 55.405, |
|
"eval_steps_per_second": 0.875, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.537638471730726e-05, |
|
"loss": 0.4278, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.473378509304684e-05, |
|
"loss": 0.4286, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.408641283347138e-05, |
|
"loss": 0.4277, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.343442766799908e-05, |
|
"loss": 0.4275, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 7.27779904642137e-05, |
|
"loss": 0.4286, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.211858881603193e-05, |
|
"loss": 0.4275, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.145507628573625e-05, |
|
"loss": 0.4287, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.078627448111401e-05, |
|
"loss": 0.4272, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 7.011367403035232e-05, |
|
"loss": 0.427, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 6.943744088754813e-05, |
|
"loss": 0.4259, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"eval_loss": 0.40401768684387207, |
|
"eval_runtime": 110.3924, |
|
"eval_samples_per_second": 45.293, |
|
"eval_steps_per_second": 0.716, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 6.875910464865927e-05, |
|
"loss": 0.4267, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 6.807611395649667e-05, |
|
"loss": 0.4258, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 6.73899933097511e-05, |
|
"loss": 0.4242, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 6.670091199842552e-05, |
|
"loss": 0.4243, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 6.600904004302253e-05, |
|
"loss": 0.4238, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.531454815259442e-05, |
|
"loss": 0.4244, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.461760768262325e-05, |
|
"loss": 0.4245, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.391839059274147e-05, |
|
"loss": 0.4226, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.321706940430336e-05, |
|
"loss": 0.4226, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.251522547425089e-05, |
|
"loss": 0.4211, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"eval_loss": 0.40038520097732544, |
|
"eval_runtime": 61.219, |
|
"eval_samples_per_second": 81.674, |
|
"eval_steps_per_second": 1.29, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 6.181021902830414e-05, |
|
"loss": 0.4206, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 6.110362864358936e-05, |
|
"loss": 0.4223, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.039562866071103e-05, |
|
"loss": 0.4216, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.35, |
|
"learning_rate": 5.9686393768070895e-05, |
|
"loss": 0.4202, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 5.897609895876622e-05, |
|
"loss": 0.4201, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 5.826491948741281e-05, |
|
"loss": 0.4208, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 5.7554455195272715e-05, |
|
"loss": 0.4202, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 5.684203388517376e-05, |
|
"loss": 0.4197, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 5.6129254461650316e-05, |
|
"loss": 0.4205, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 5.541629279235954e-05, |
|
"loss": 0.4191, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"eval_loss": 0.3981700539588928, |
|
"eval_runtime": 118.5137, |
|
"eval_samples_per_second": 42.189, |
|
"eval_steps_per_second": 0.667, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 5.470332478992507e-05, |
|
"loss": 0.419, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 5.399052636853316e-05, |
|
"loss": 0.4188, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 5.327807340052843e-05, |
|
"loss": 0.4188, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 5.256756489949676e-05, |
|
"loss": 0.4184, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 5.18563285019876e-05, |
|
"loss": 0.4178, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 5.114596413925449e-05, |
|
"loss": 0.4169, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 5.0436647083074565e-05, |
|
"loss": 0.4176, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 4.972855234681767e-05, |
|
"loss": 0.4171, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 4.902185464226454e-05, |
|
"loss": 0.4171, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 4.831672833649923e-05, |
|
"loss": 0.4167, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"eval_loss": 0.39512524008750916, |
|
"eval_runtime": 60.7929, |
|
"eval_samples_per_second": 82.246, |
|
"eval_steps_per_second": 1.299, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 4.761334740888664e-05, |
|
"loss": 0.4157, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 4.691188540814576e-05, |
|
"loss": 0.4145, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 4.621391194692349e-05, |
|
"loss": 0.4146, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 4.551680180854116e-05, |
|
"loss": 0.4142, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 4.482212788828615e-05, |
|
"loss": 0.4159, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 4.413144300314575e-05, |
|
"loss": 0.4156, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 4.344214935022219e-05, |
|
"loss": 0.4134, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 4.275580380487631e-05, |
|
"loss": 0.4141, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 4.2072575712601445e-05, |
|
"loss": 0.4146, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"learning_rate": 4.1392633649706e-05, |
|
"loss": 0.4137, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.48, |
|
"eval_loss": 0.3925502300262451, |
|
"eval_runtime": 93.7863, |
|
"eval_samples_per_second": 53.313, |
|
"eval_steps_per_second": 0.842, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.53, |
|
"learning_rate": 4.0717494800280484e-05, |
|
"loss": 0.414, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 4.0044619833152034e-05, |
|
"loss": 0.4127, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.61, |
|
"learning_rate": 3.937553126317129e-05, |
|
"loss": 0.4129, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"learning_rate": 3.871039417793112e-05, |
|
"loss": 0.4125, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 3.804937269005464e-05, |
|
"loss": 0.4121, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 3.7392629896702916e-05, |
|
"loss": 0.4121, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 3.674032783933316e-05, |
|
"loss": 0.4119, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 3.609262746371739e-05, |
|
"loss": 0.4116, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 3.544968858023156e-05, |
|
"loss": 0.4107, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 3.481294084680979e-05, |
|
"loss": 0.412, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"eval_loss": 0.38968759775161743, |
|
"eval_runtime": 78.1248, |
|
"eval_samples_per_second": 64.0, |
|
"eval_steps_per_second": 1.011, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 3.4181250060824136e-05, |
|
"loss": 0.4105, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 3.355352132804756e-05, |
|
"loss": 0.4104, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 3.293118057384647e-05, |
|
"loss": 0.4109, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 3.231438135148965e-05, |
|
"loss": 0.41, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 3.17044922751775e-05, |
|
"loss": 0.4098, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 3.109921943108062e-05, |
|
"loss": 0.4084, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 3.0499940127827767e-05, |
|
"loss": 0.4083, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 2.990680222861982e-05, |
|
"loss": 0.4088, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 5.31, |
|
"learning_rate": 2.9319952081358052e-05, |
|
"loss": 0.408, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.8739534482535135e-05, |
|
"loss": 0.4077, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"eval_loss": 0.38781091570854187, |
|
"eval_runtime": 90.5362, |
|
"eval_samples_per_second": 55.227, |
|
"eval_steps_per_second": 0.873, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.8166833668202425e-05, |
|
"loss": 0.4087, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 2.759969559696268e-05, |
|
"loss": 0.4088, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.703941452170851e-05, |
|
"loss": 0.4073, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.648612868341161e-05, |
|
"loss": 0.4084, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"learning_rate": 2.5939974597073566e-05, |
|
"loss": 0.4077, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 2.5402157452548983e-05, |
|
"loss": 0.4072, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 2.487065441284431e-05, |
|
"loss": 0.4079, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.4346681719201903e-05, |
|
"loss": 0.4076, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 2.383036865404354e-05, |
|
"loss": 0.4069, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 2.332184260989229e-05, |
|
"loss": 0.4078, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"eval_loss": 0.3861904740333557, |
|
"eval_runtime": 75.9418, |
|
"eval_samples_per_second": 65.84, |
|
"eval_steps_per_second": 1.04, |
|
"step": 75000 |
|
} |
|
], |
|
"max_steps": 100000, |
|
"num_train_epochs": 9, |
|
"total_flos": 3.5323171476767716e+21, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|