|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 208.6811352253756, |
|
"global_step": 250000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 7.8213, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 6.0676, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 6e-06, |
|
"loss": 5.2769, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 4.7202, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 1e-05, |
|
"loss": 4.3225, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.2e-05, |
|
"loss": 3.9793, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 3.6914, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 3.4487, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 3.76, |
|
"learning_rate": 1.8e-05, |
|
"loss": 3.2435, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 2e-05, |
|
"loss": 3.0638, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 2.9136, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 2.4e-05, |
|
"loss": 2.7839, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 2.665, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 2.5639, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4721, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 2.3918, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 7.1, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 2.3195, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 3.6e-05, |
|
"loss": 2.2511, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 3.8e-05, |
|
"loss": 2.1977, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 4e-05, |
|
"loss": 2.1409, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 4.2e-05, |
|
"loss": 2.093, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 9.18, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 2.0491, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 2.0059, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 10.02, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.9673, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9332, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"eval_loss": 1.8155752420425415, |
|
"eval_runtime": 140.158, |
|
"eval_samples_per_second": 486.158, |
|
"eval_steps_per_second": 3.803, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.8996, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 11.27, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.868, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 11.69, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.8409, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 12.1, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.8127, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 12.52, |
|
"learning_rate": 6e-05, |
|
"loss": 1.787, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.7647, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 13.36, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.7386, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.7192, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 14.19, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.6991, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 14.61, |
|
"learning_rate": 7e-05, |
|
"loss": 1.6812, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 15.03, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.661, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 15.44, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.6418, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 15.86, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.6236, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 16.28, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.6107, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 16.69, |
|
"learning_rate": 8e-05, |
|
"loss": 1.5962, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 17.11, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.5812, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 17.53, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.5685, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 17.95, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.5554, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 18.36, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.5439, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 18.78, |
|
"learning_rate": 9e-05, |
|
"loss": 1.5343, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 19.2, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.521, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.5101, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 20.03, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.5014, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 20.45, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.4894, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4842, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 20.87, |
|
"eval_loss": 1.4012646675109863, |
|
"eval_runtime": 125.2851, |
|
"eval_samples_per_second": 543.871, |
|
"eval_steps_per_second": 4.254, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 21.29, |
|
"learning_rate": 9.977777777777779e-05, |
|
"loss": 1.4737, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 21.7, |
|
"learning_rate": 9.955555555555556e-05, |
|
"loss": 1.4685, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 22.12, |
|
"learning_rate": 9.933333333333334e-05, |
|
"loss": 1.4549, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 22.54, |
|
"learning_rate": 9.911111111111112e-05, |
|
"loss": 1.4472, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 22.95, |
|
"learning_rate": 9.888888888888889e-05, |
|
"loss": 1.4382, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 23.37, |
|
"learning_rate": 9.866666666666668e-05, |
|
"loss": 1.4292, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 23.79, |
|
"learning_rate": 9.844444444444444e-05, |
|
"loss": 1.4244, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 24.21, |
|
"learning_rate": 9.822222222222223e-05, |
|
"loss": 1.4167, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 24.62, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.4092, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 25.04, |
|
"learning_rate": 9.777777777777778e-05, |
|
"loss": 1.4061, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 25.46, |
|
"learning_rate": 9.755555555555555e-05, |
|
"loss": 1.3953, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 25.88, |
|
"learning_rate": 9.733333333333335e-05, |
|
"loss": 1.3929, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 26.29, |
|
"learning_rate": 9.711111111111111e-05, |
|
"loss": 1.3849, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 26.71, |
|
"learning_rate": 9.68888888888889e-05, |
|
"loss": 1.3824, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 27.13, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.3748, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 27.55, |
|
"learning_rate": 9.644444444444445e-05, |
|
"loss": 1.3683, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 27.96, |
|
"learning_rate": 9.622222222222222e-05, |
|
"loss": 1.3655, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 28.38, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.359, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 28.8, |
|
"learning_rate": 9.577777777777777e-05, |
|
"loss": 1.3541, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 29.22, |
|
"learning_rate": 9.555555555555557e-05, |
|
"loss": 1.3509, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 29.63, |
|
"learning_rate": 9.533333333333334e-05, |
|
"loss": 1.3463, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 30.05, |
|
"learning_rate": 9.511111111111112e-05, |
|
"loss": 1.3416, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 30.47, |
|
"learning_rate": 9.488888888888889e-05, |
|
"loss": 1.3358, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 30.88, |
|
"learning_rate": 9.466666666666667e-05, |
|
"loss": 1.3326, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 31.3, |
|
"learning_rate": 9.444444444444444e-05, |
|
"loss": 1.3267, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 31.3, |
|
"eval_loss": 1.268040418624878, |
|
"eval_runtime": 125.1415, |
|
"eval_samples_per_second": 544.496, |
|
"eval_steps_per_second": 4.259, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 31.72, |
|
"learning_rate": 9.422222222222223e-05, |
|
"loss": 1.3276, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 32.14, |
|
"learning_rate": 9.4e-05, |
|
"loss": 1.3216, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 32.55, |
|
"learning_rate": 9.377777777777779e-05, |
|
"loss": 1.3181, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 32.97, |
|
"learning_rate": 9.355555555555556e-05, |
|
"loss": 1.3146, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 33.39, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.3091, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 33.81, |
|
"learning_rate": 9.311111111111111e-05, |
|
"loss": 1.3072, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 34.22, |
|
"learning_rate": 9.28888888888889e-05, |
|
"loss": 1.3041, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 34.64, |
|
"learning_rate": 9.266666666666666e-05, |
|
"loss": 1.2998, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 35.06, |
|
"learning_rate": 9.244444444444445e-05, |
|
"loss": 1.2995, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 35.48, |
|
"learning_rate": 9.222222222222223e-05, |
|
"loss": 1.2942, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 35.89, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.2928, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 36.31, |
|
"learning_rate": 9.177777777777778e-05, |
|
"loss": 1.289, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 36.73, |
|
"learning_rate": 9.155555555555557e-05, |
|
"loss": 1.2874, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 37.15, |
|
"learning_rate": 9.133333333333334e-05, |
|
"loss": 1.2859, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 37.56, |
|
"learning_rate": 9.111111111111112e-05, |
|
"loss": 1.2771, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 37.98, |
|
"learning_rate": 9.088888888888889e-05, |
|
"loss": 1.2809, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 38.4, |
|
"learning_rate": 9.066666666666667e-05, |
|
"loss": 1.2749, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 38.81, |
|
"learning_rate": 9.044444444444445e-05, |
|
"loss": 1.272, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 39.23, |
|
"learning_rate": 9.022222222222224e-05, |
|
"loss": 1.2708, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 39.65, |
|
"learning_rate": 9e-05, |
|
"loss": 1.2685, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 40.07, |
|
"learning_rate": 8.977777777777779e-05, |
|
"loss": 1.2678, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 40.48, |
|
"learning_rate": 8.955555555555556e-05, |
|
"loss": 1.2627, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 40.9, |
|
"learning_rate": 8.933333333333334e-05, |
|
"loss": 1.262, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 41.32, |
|
"learning_rate": 8.911111111111111e-05, |
|
"loss": 1.2555, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 41.74, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 1.2581, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 41.74, |
|
"eval_loss": 1.2052675485610962, |
|
"eval_runtime": 125.1251, |
|
"eval_samples_per_second": 544.567, |
|
"eval_steps_per_second": 4.26, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 42.15, |
|
"learning_rate": 8.866666666666668e-05, |
|
"loss": 1.2531, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 42.57, |
|
"learning_rate": 8.844444444444445e-05, |
|
"loss": 1.2518, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 42.99, |
|
"learning_rate": 8.822222222222223e-05, |
|
"loss": 1.2515, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 43.41, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 1.2481, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 43.82, |
|
"learning_rate": 8.777777777777778e-05, |
|
"loss": 1.2489, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 44.24, |
|
"learning_rate": 8.755555555555556e-05, |
|
"loss": 1.2431, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 44.66, |
|
"learning_rate": 8.733333333333333e-05, |
|
"loss": 1.242, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 45.08, |
|
"learning_rate": 8.711111111111112e-05, |
|
"loss": 1.2403, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 45.49, |
|
"learning_rate": 8.68888888888889e-05, |
|
"loss": 1.2356, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 45.91, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.2381, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 46.33, |
|
"learning_rate": 8.644444444444445e-05, |
|
"loss": 1.2336, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 46.74, |
|
"learning_rate": 8.622222222222222e-05, |
|
"loss": 1.2325, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 47.16, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.2316, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 47.58, |
|
"learning_rate": 8.577777777777777e-05, |
|
"loss": 1.2277, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 48.0, |
|
"learning_rate": 8.555555555555556e-05, |
|
"loss": 1.2287, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 48.41, |
|
"learning_rate": 8.533333333333334e-05, |
|
"loss": 1.2243, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 48.83, |
|
"learning_rate": 8.511111111111112e-05, |
|
"loss": 1.2253, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 49.25, |
|
"learning_rate": 8.488888888888889e-05, |
|
"loss": 1.2209, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 49.67, |
|
"learning_rate": 8.466666666666667e-05, |
|
"loss": 1.2209, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 50.08, |
|
"learning_rate": 8.444444444444444e-05, |
|
"loss": 1.2201, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 50.5, |
|
"learning_rate": 8.422222222222223e-05, |
|
"loss": 1.2164, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 50.92, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.2168, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 51.34, |
|
"learning_rate": 8.377777777777778e-05, |
|
"loss": 1.2139, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 51.75, |
|
"learning_rate": 8.355555555555556e-05, |
|
"loss": 1.212, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 52.17, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.211, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 52.17, |
|
"eval_loss": 1.1704248189926147, |
|
"eval_runtime": 125.3013, |
|
"eval_samples_per_second": 543.801, |
|
"eval_steps_per_second": 4.254, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 52.59, |
|
"learning_rate": 8.311111111111111e-05, |
|
"loss": 1.2118, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 53.01, |
|
"learning_rate": 8.28888888888889e-05, |
|
"loss": 1.2088, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 53.42, |
|
"learning_rate": 8.266666666666667e-05, |
|
"loss": 1.2054, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 53.84, |
|
"learning_rate": 8.244444444444445e-05, |
|
"loss": 1.2065, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 54.26, |
|
"learning_rate": 8.222222222222222e-05, |
|
"loss": 1.2048, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 54.67, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.202, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 55.09, |
|
"learning_rate": 8.177777777777778e-05, |
|
"loss": 1.2035, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 55.51, |
|
"learning_rate": 8.155555555555557e-05, |
|
"loss": 1.1989, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 55.93, |
|
"learning_rate": 8.133333333333334e-05, |
|
"loss": 1.201, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 56.34, |
|
"learning_rate": 8.111111111111112e-05, |
|
"loss": 1.1975, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 56.76, |
|
"learning_rate": 8.088888888888889e-05, |
|
"loss": 1.1971, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 57.18, |
|
"learning_rate": 8.066666666666667e-05, |
|
"loss": 1.1967, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 57.6, |
|
"learning_rate": 8.044444444444444e-05, |
|
"loss": 1.1931, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 58.01, |
|
"learning_rate": 8.022222222222222e-05, |
|
"loss": 1.1942, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 58.43, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1901, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 58.85, |
|
"learning_rate": 7.977777777777779e-05, |
|
"loss": 1.1903, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 59.27, |
|
"learning_rate": 7.955555555555556e-05, |
|
"loss": 1.189, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 59.68, |
|
"learning_rate": 7.933333333333334e-05, |
|
"loss": 1.1855, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 60.1, |
|
"learning_rate": 7.911111111111111e-05, |
|
"loss": 1.1876, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 60.52, |
|
"learning_rate": 7.88888888888889e-05, |
|
"loss": 1.183, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 60.93, |
|
"learning_rate": 7.866666666666666e-05, |
|
"loss": 1.1853, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 61.35, |
|
"learning_rate": 7.844444444444446e-05, |
|
"loss": 1.1813, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 61.77, |
|
"learning_rate": 7.822222222222223e-05, |
|
"loss": 1.1809, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 62.19, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.1796, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 62.6, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 1.18, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 62.6, |
|
"eval_loss": 1.1421879529953003, |
|
"eval_runtime": 125.314, |
|
"eval_samples_per_second": 543.746, |
|
"eval_steps_per_second": 4.253, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 63.02, |
|
"learning_rate": 7.755555555555556e-05, |
|
"loss": 1.1821, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 63.44, |
|
"learning_rate": 7.733333333333333e-05, |
|
"loss": 1.1732, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 63.86, |
|
"learning_rate": 7.711111111111112e-05, |
|
"loss": 1.1786, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 64.27, |
|
"learning_rate": 7.688888888888889e-05, |
|
"loss": 1.1729, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 64.69, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.1737, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 65.11, |
|
"learning_rate": 7.644444444444445e-05, |
|
"loss": 1.1727, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 65.53, |
|
"learning_rate": 7.622222222222223e-05, |
|
"loss": 1.1708, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 65.94, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.1712, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 66.36, |
|
"learning_rate": 7.577777777777779e-05, |
|
"loss": 1.169, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 66.78, |
|
"learning_rate": 7.555555555555556e-05, |
|
"loss": 1.1697, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 67.2, |
|
"learning_rate": 7.533333333333334e-05, |
|
"loss": 1.1681, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 67.61, |
|
"learning_rate": 7.511111111111111e-05, |
|
"loss": 1.1651, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 68.03, |
|
"learning_rate": 7.488888888888889e-05, |
|
"loss": 1.1684, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 68.45, |
|
"learning_rate": 7.466666666666667e-05, |
|
"loss": 1.1644, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 68.86, |
|
"learning_rate": 7.444444444444444e-05, |
|
"loss": 1.1643, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 69.28, |
|
"learning_rate": 7.422222222222223e-05, |
|
"loss": 1.1608, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 69.7, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.164, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 70.12, |
|
"learning_rate": 7.377777777777778e-05, |
|
"loss": 1.1612, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 70.53, |
|
"learning_rate": 7.355555555555556e-05, |
|
"loss": 1.1593, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 70.95, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.1585, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 71.37, |
|
"learning_rate": 7.311111111111111e-05, |
|
"loss": 1.1584, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 71.79, |
|
"learning_rate": 7.28888888888889e-05, |
|
"loss": 1.1571, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 72.2, |
|
"learning_rate": 7.266666666666667e-05, |
|
"loss": 1.1565, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 72.62, |
|
"learning_rate": 7.244444444444445e-05, |
|
"loss": 1.1545, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 73.04, |
|
"learning_rate": 7.222222222222222e-05, |
|
"loss": 1.1545, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 73.04, |
|
"eval_loss": 1.119368314743042, |
|
"eval_runtime": 125.2645, |
|
"eval_samples_per_second": 543.961, |
|
"eval_steps_per_second": 4.255, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 73.46, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.1528, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 73.87, |
|
"learning_rate": 7.177777777777777e-05, |
|
"loss": 1.1546, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 74.29, |
|
"learning_rate": 7.155555555555555e-05, |
|
"loss": 1.152, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 74.71, |
|
"learning_rate": 7.133333333333334e-05, |
|
"loss": 1.15, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 75.13, |
|
"learning_rate": 7.111111111111112e-05, |
|
"loss": 1.1511, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 75.54, |
|
"learning_rate": 7.088888888888889e-05, |
|
"loss": 1.1492, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 75.96, |
|
"learning_rate": 7.066666666666667e-05, |
|
"loss": 1.1495, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 76.38, |
|
"learning_rate": 7.044444444444444e-05, |
|
"loss": 1.1466, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 76.79, |
|
"learning_rate": 7.022222222222222e-05, |
|
"loss": 1.1464, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 77.21, |
|
"learning_rate": 7e-05, |
|
"loss": 1.1458, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 77.63, |
|
"learning_rate": 6.977777777777779e-05, |
|
"loss": 1.1451, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 78.05, |
|
"learning_rate": 6.955555555555556e-05, |
|
"loss": 1.1452, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 78.46, |
|
"learning_rate": 6.933333333333334e-05, |
|
"loss": 1.1428, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 78.88, |
|
"learning_rate": 6.911111111111111e-05, |
|
"loss": 1.1439, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 79.3, |
|
"learning_rate": 6.88888888888889e-05, |
|
"loss": 1.1433, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 79.72, |
|
"learning_rate": 6.866666666666666e-05, |
|
"loss": 1.143, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 80.13, |
|
"learning_rate": 6.844444444444445e-05, |
|
"loss": 1.1394, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 80.55, |
|
"learning_rate": 6.822222222222222e-05, |
|
"loss": 1.1416, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 80.97, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.1392, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 81.39, |
|
"learning_rate": 6.777777777777778e-05, |
|
"loss": 1.1389, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 81.8, |
|
"learning_rate": 6.755555555555557e-05, |
|
"loss": 1.138, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 82.22, |
|
"learning_rate": 6.733333333333333e-05, |
|
"loss": 1.1381, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 82.64, |
|
"learning_rate": 6.711111111111112e-05, |
|
"loss": 1.1357, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 83.06, |
|
"learning_rate": 6.688888888888889e-05, |
|
"loss": 1.137, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 83.47, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.1324, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 83.47, |
|
"eval_loss": 1.1047865152359009, |
|
"eval_runtime": 125.3085, |
|
"eval_samples_per_second": 543.77, |
|
"eval_steps_per_second": 4.254, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 83.89, |
|
"learning_rate": 6.644444444444444e-05, |
|
"loss": 1.1346, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 84.31, |
|
"learning_rate": 6.622222222222224e-05, |
|
"loss": 1.134, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 84.72, |
|
"learning_rate": 6.6e-05, |
|
"loss": 1.1329, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 85.14, |
|
"learning_rate": 6.577777777777779e-05, |
|
"loss": 1.1338, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 85.56, |
|
"learning_rate": 6.555555555555556e-05, |
|
"loss": 1.1304, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 85.98, |
|
"learning_rate": 6.533333333333334e-05, |
|
"loss": 1.1303, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 86.39, |
|
"learning_rate": 6.511111111111111e-05, |
|
"loss": 1.1299, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 86.81, |
|
"learning_rate": 6.488888888888889e-05, |
|
"loss": 1.1298, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 87.23, |
|
"learning_rate": 6.466666666666666e-05, |
|
"loss": 1.1276, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 87.65, |
|
"learning_rate": 6.444444444444446e-05, |
|
"loss": 1.1285, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 88.06, |
|
"learning_rate": 6.422222222222223e-05, |
|
"loss": 1.1263, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 88.48, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 1.1243, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 88.9, |
|
"learning_rate": 6.377777777777778e-05, |
|
"loss": 1.1267, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 89.32, |
|
"learning_rate": 6.355555555555556e-05, |
|
"loss": 1.1236, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 89.73, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.1242, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 90.15, |
|
"learning_rate": 6.311111111111112e-05, |
|
"loss": 1.1226, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 90.57, |
|
"learning_rate": 6.28888888888889e-05, |
|
"loss": 1.1216, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 90.98, |
|
"learning_rate": 6.266666666666667e-05, |
|
"loss": 1.1233, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 91.4, |
|
"learning_rate": 6.244444444444445e-05, |
|
"loss": 1.1216, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 91.82, |
|
"learning_rate": 6.222222222222222e-05, |
|
"loss": 1.1229, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 92.24, |
|
"learning_rate": 6.2e-05, |
|
"loss": 1.1203, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 92.65, |
|
"learning_rate": 6.177777777777779e-05, |
|
"loss": 1.1184, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 93.07, |
|
"learning_rate": 6.155555555555555e-05, |
|
"loss": 1.1217, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 93.49, |
|
"learning_rate": 6.133333333333334e-05, |
|
"loss": 1.1177, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 93.91, |
|
"learning_rate": 6.111111111111112e-05, |
|
"loss": 1.1171, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 93.91, |
|
"eval_loss": 1.0929380655288696, |
|
"eval_runtime": 125.1906, |
|
"eval_samples_per_second": 544.282, |
|
"eval_steps_per_second": 4.258, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 94.32, |
|
"learning_rate": 6.08888888888889e-05, |
|
"loss": 1.1157, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 94.74, |
|
"learning_rate": 6.066666666666667e-05, |
|
"loss": 1.1171, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 95.16, |
|
"learning_rate": 6.044444444444445e-05, |
|
"loss": 1.1163, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 95.58, |
|
"learning_rate": 6.0222222222222225e-05, |
|
"loss": 1.1163, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 95.99, |
|
"learning_rate": 6e-05, |
|
"loss": 1.1163, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 96.41, |
|
"learning_rate": 5.977777777777778e-05, |
|
"loss": 1.1124, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 96.83, |
|
"learning_rate": 5.9555555555555554e-05, |
|
"loss": 1.1139, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 97.25, |
|
"learning_rate": 5.9333333333333343e-05, |
|
"loss": 1.1134, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 97.66, |
|
"learning_rate": 5.911111111111112e-05, |
|
"loss": 1.1119, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 98.08, |
|
"learning_rate": 5.8888888888888896e-05, |
|
"loss": 1.1138, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 98.5, |
|
"learning_rate": 5.866666666666667e-05, |
|
"loss": 1.1088, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 98.91, |
|
"learning_rate": 5.844444444444445e-05, |
|
"loss": 1.112, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 99.33, |
|
"learning_rate": 5.8222222222222224e-05, |
|
"loss": 1.1076, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 99.75, |
|
"learning_rate": 5.8e-05, |
|
"loss": 1.1097, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 100.17, |
|
"learning_rate": 5.7777777777777776e-05, |
|
"loss": 1.1081, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 100.58, |
|
"learning_rate": 5.755555555555556e-05, |
|
"loss": 1.1051, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 101.0, |
|
"learning_rate": 5.7333333333333336e-05, |
|
"loss": 1.1101, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 101.42, |
|
"learning_rate": 5.711111111111112e-05, |
|
"loss": 1.104, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 101.84, |
|
"learning_rate": 5.6888888888888895e-05, |
|
"loss": 1.108, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 102.25, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.1058, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 102.67, |
|
"learning_rate": 5.644444444444445e-05, |
|
"loss": 1.1069, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 103.09, |
|
"learning_rate": 5.622222222222222e-05, |
|
"loss": 1.1042, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 103.51, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 1.1031, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 103.92, |
|
"learning_rate": 5.577777777777778e-05, |
|
"loss": 1.1032, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 104.34, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 1.1044, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 104.34, |
|
"eval_loss": 1.0788438320159912, |
|
"eval_runtime": 125.2368, |
|
"eval_samples_per_second": 544.081, |
|
"eval_steps_per_second": 4.256, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 104.76, |
|
"learning_rate": 5.5333333333333334e-05, |
|
"loss": 1.1023, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 105.18, |
|
"learning_rate": 5.511111111111111e-05, |
|
"loss": 1.1028, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 105.59, |
|
"learning_rate": 5.488888888888889e-05, |
|
"loss": 1.1027, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 106.01, |
|
"learning_rate": 5.466666666666666e-05, |
|
"loss": 1.1019, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 106.43, |
|
"learning_rate": 5.4444444444444446e-05, |
|
"loss": 1.0994, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 106.84, |
|
"learning_rate": 5.422222222222223e-05, |
|
"loss": 1.1012, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 107.26, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 1.0995, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 107.68, |
|
"learning_rate": 5.377777777777778e-05, |
|
"loss": 1.0994, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 108.1, |
|
"learning_rate": 5.355555555555556e-05, |
|
"loss": 1.0999, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 108.51, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.0971, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 108.93, |
|
"learning_rate": 5.311111111111111e-05, |
|
"loss": 1.0982, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 109.35, |
|
"learning_rate": 5.2888888888888885e-05, |
|
"loss": 1.0954, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 109.77, |
|
"learning_rate": 5.266666666666666e-05, |
|
"loss": 1.0979, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 110.18, |
|
"learning_rate": 5.244444444444445e-05, |
|
"loss": 1.0966, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 110.6, |
|
"learning_rate": 5.222222222222223e-05, |
|
"loss": 1.0954, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 111.02, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 1.0959, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 111.44, |
|
"learning_rate": 5.177777777777778e-05, |
|
"loss": 1.0943, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 111.85, |
|
"learning_rate": 5.1555555555555556e-05, |
|
"loss": 1.0941, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 112.27, |
|
"learning_rate": 5.133333333333333e-05, |
|
"loss": 1.0937, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 112.69, |
|
"learning_rate": 5.111111111111111e-05, |
|
"loss": 1.093, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 113.11, |
|
"learning_rate": 5.0888888888888884e-05, |
|
"loss": 1.0931, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 113.52, |
|
"learning_rate": 5.0666666666666674e-05, |
|
"loss": 1.0901, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 113.94, |
|
"learning_rate": 5.044444444444445e-05, |
|
"loss": 1.0948, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 114.36, |
|
"learning_rate": 5.0222222222222226e-05, |
|
"loss": 1.0903, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 114.77, |
|
"learning_rate": 5e-05, |
|
"loss": 1.0922, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 114.77, |
|
"eval_loss": 1.0676383972167969, |
|
"eval_runtime": 125.398, |
|
"eval_samples_per_second": 543.382, |
|
"eval_steps_per_second": 4.25, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 115.19, |
|
"learning_rate": 4.977777777777778e-05, |
|
"loss": 1.0883, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 115.61, |
|
"learning_rate": 4.955555555555556e-05, |
|
"loss": 1.0905, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 116.03, |
|
"learning_rate": 4.933333333333334e-05, |
|
"loss": 1.0912, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 116.44, |
|
"learning_rate": 4.9111111111111114e-05, |
|
"loss": 1.0873, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 116.86, |
|
"learning_rate": 4.888888888888889e-05, |
|
"loss": 1.0893, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 117.28, |
|
"learning_rate": 4.866666666666667e-05, |
|
"loss": 1.0857, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 117.7, |
|
"learning_rate": 4.844444444444445e-05, |
|
"loss": 1.0877, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 118.11, |
|
"learning_rate": 4.8222222222222225e-05, |
|
"loss": 1.0879, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 118.53, |
|
"learning_rate": 4.8e-05, |
|
"loss": 1.0858, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 118.95, |
|
"learning_rate": 4.7777777777777784e-05, |
|
"loss": 1.0876, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 119.37, |
|
"learning_rate": 4.755555555555556e-05, |
|
"loss": 1.0849, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 119.78, |
|
"learning_rate": 4.7333333333333336e-05, |
|
"loss": 1.0851, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 120.2, |
|
"learning_rate": 4.711111111111111e-05, |
|
"loss": 1.0833, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 120.62, |
|
"learning_rate": 4.6888888888888895e-05, |
|
"loss": 1.0841, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 121.04, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.0848, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 121.45, |
|
"learning_rate": 4.644444444444445e-05, |
|
"loss": 1.0832, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 121.87, |
|
"learning_rate": 4.6222222222222224e-05, |
|
"loss": 1.0829, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 122.29, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 1.0832, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 122.7, |
|
"learning_rate": 4.577777777777778e-05, |
|
"loss": 1.0804, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 123.12, |
|
"learning_rate": 4.555555555555556e-05, |
|
"loss": 1.084, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 123.54, |
|
"learning_rate": 4.5333333333333335e-05, |
|
"loss": 1.0809, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 123.96, |
|
"learning_rate": 4.511111111111112e-05, |
|
"loss": 1.0816, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 124.37, |
|
"learning_rate": 4.4888888888888894e-05, |
|
"loss": 1.0781, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 124.79, |
|
"learning_rate": 4.466666666666667e-05, |
|
"loss": 1.0811, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 125.21, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 1.0772, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 125.21, |
|
"eval_loss": 1.0623060464859009, |
|
"eval_runtime": 125.1984, |
|
"eval_samples_per_second": 544.248, |
|
"eval_steps_per_second": 4.257, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 125.63, |
|
"learning_rate": 4.422222222222222e-05, |
|
"loss": 1.0786, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 126.04, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 1.0799, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 126.46, |
|
"learning_rate": 4.377777777777778e-05, |
|
"loss": 1.0782, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 126.88, |
|
"learning_rate": 4.355555555555556e-05, |
|
"loss": 1.0788, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 127.3, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.0769, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 127.71, |
|
"learning_rate": 4.311111111111111e-05, |
|
"loss": 1.0752, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 128.13, |
|
"learning_rate": 4.2888888888888886e-05, |
|
"loss": 1.0765, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 128.55, |
|
"learning_rate": 4.266666666666667e-05, |
|
"loss": 1.075, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 128.96, |
|
"learning_rate": 4.2444444444444445e-05, |
|
"loss": 1.0749, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 129.38, |
|
"learning_rate": 4.222222222222222e-05, |
|
"loss": 1.0743, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 129.8, |
|
"learning_rate": 4.2e-05, |
|
"loss": 1.0732, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 130.22, |
|
"learning_rate": 4.177777777777778e-05, |
|
"loss": 1.0753, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 130.63, |
|
"learning_rate": 4.155555555555556e-05, |
|
"loss": 1.0745, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 131.05, |
|
"learning_rate": 4.133333333333333e-05, |
|
"loss": 1.0738, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 131.47, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 1.073, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 131.89, |
|
"learning_rate": 4.088888888888889e-05, |
|
"loss": 1.0712, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 132.3, |
|
"learning_rate": 4.066666666666667e-05, |
|
"loss": 1.0708, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 132.72, |
|
"learning_rate": 4.0444444444444444e-05, |
|
"loss": 1.072, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 133.14, |
|
"learning_rate": 4.022222222222222e-05, |
|
"loss": 1.0724, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 133.56, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0711, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 133.97, |
|
"learning_rate": 3.977777777777778e-05, |
|
"loss": 1.0713, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 134.39, |
|
"learning_rate": 3.9555555555555556e-05, |
|
"loss": 1.0692, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 134.81, |
|
"learning_rate": 3.933333333333333e-05, |
|
"loss": 1.0701, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 135.23, |
|
"learning_rate": 3.9111111111111115e-05, |
|
"loss": 1.0701, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 135.64, |
|
"learning_rate": 3.888888888888889e-05, |
|
"loss": 1.0709, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 135.64, |
|
"eval_loss": 1.0541090965270996, |
|
"eval_runtime": 125.3801, |
|
"eval_samples_per_second": 543.46, |
|
"eval_steps_per_second": 4.251, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 136.06, |
|
"learning_rate": 3.866666666666667e-05, |
|
"loss": 1.0692, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 136.48, |
|
"learning_rate": 3.844444444444444e-05, |
|
"loss": 1.0683, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 136.89, |
|
"learning_rate": 3.8222222222222226e-05, |
|
"loss": 1.0682, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 137.31, |
|
"learning_rate": 3.8e-05, |
|
"loss": 1.069, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 137.73, |
|
"learning_rate": 3.777777777777778e-05, |
|
"loss": 1.0683, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 138.15, |
|
"learning_rate": 3.7555555555555554e-05, |
|
"loss": 1.0677, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 138.56, |
|
"learning_rate": 3.733333333333334e-05, |
|
"loss": 1.0666, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 138.98, |
|
"learning_rate": 3.7111111111111113e-05, |
|
"loss": 1.0656, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 139.4, |
|
"learning_rate": 3.688888888888889e-05, |
|
"loss": 1.0646, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 139.82, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.0667, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 140.23, |
|
"learning_rate": 3.644444444444445e-05, |
|
"loss": 1.0646, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 140.65, |
|
"learning_rate": 3.6222222222222225e-05, |
|
"loss": 1.0635, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 141.07, |
|
"learning_rate": 3.6e-05, |
|
"loss": 1.066, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 141.49, |
|
"learning_rate": 3.577777777777778e-05, |
|
"loss": 1.0618, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 141.9, |
|
"learning_rate": 3.555555555555556e-05, |
|
"loss": 1.0641, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 142.32, |
|
"learning_rate": 3.5333333333333336e-05, |
|
"loss": 1.0626, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 142.74, |
|
"learning_rate": 3.511111111111111e-05, |
|
"loss": 1.0622, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 143.16, |
|
"learning_rate": 3.4888888888888895e-05, |
|
"loss": 1.0623, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 143.57, |
|
"learning_rate": 3.466666666666667e-05, |
|
"loss": 1.0636, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 143.99, |
|
"learning_rate": 3.444444444444445e-05, |
|
"loss": 1.0624, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 144.41, |
|
"learning_rate": 3.4222222222222224e-05, |
|
"loss": 1.0625, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 144.82, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 1.061, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 145.24, |
|
"learning_rate": 3.377777777777778e-05, |
|
"loss": 1.061, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 145.66, |
|
"learning_rate": 3.355555555555556e-05, |
|
"loss": 1.0604, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 146.08, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.0612, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 146.08, |
|
"eval_loss": 1.0464940071105957, |
|
"eval_runtime": 125.4785, |
|
"eval_samples_per_second": 543.033, |
|
"eval_steps_per_second": 4.248, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 146.49, |
|
"learning_rate": 3.311111111111112e-05, |
|
"loss": 1.0602, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 146.91, |
|
"learning_rate": 3.2888888888888894e-05, |
|
"loss": 1.0588, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 147.33, |
|
"learning_rate": 3.266666666666667e-05, |
|
"loss": 1.0601, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 147.75, |
|
"learning_rate": 3.2444444444444446e-05, |
|
"loss": 1.0575, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 148.16, |
|
"learning_rate": 3.222222222222223e-05, |
|
"loss": 1.0583, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 148.58, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 1.0574, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 149.0, |
|
"learning_rate": 3.177777777777778e-05, |
|
"loss": 1.0572, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 149.42, |
|
"learning_rate": 3.155555555555556e-05, |
|
"loss": 1.0574, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 149.83, |
|
"learning_rate": 3.1333333333333334e-05, |
|
"loss": 1.0568, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 150.25, |
|
"learning_rate": 3.111111111111111e-05, |
|
"loss": 1.057, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 150.67, |
|
"learning_rate": 3.088888888888889e-05, |
|
"loss": 1.0571, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 151.09, |
|
"learning_rate": 3.066666666666667e-05, |
|
"loss": 1.0571, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 151.5, |
|
"learning_rate": 3.044444444444445e-05, |
|
"loss": 1.0555, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 151.92, |
|
"learning_rate": 3.0222222222222225e-05, |
|
"loss": 1.0558, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 152.34, |
|
"learning_rate": 3e-05, |
|
"loss": 1.0538, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 152.75, |
|
"learning_rate": 2.9777777777777777e-05, |
|
"loss": 1.0547, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 153.17, |
|
"learning_rate": 2.955555555555556e-05, |
|
"loss": 1.0545, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 153.59, |
|
"learning_rate": 2.9333333333333336e-05, |
|
"loss": 1.0544, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 154.01, |
|
"learning_rate": 2.9111111111111112e-05, |
|
"loss": 1.0543, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 154.42, |
|
"learning_rate": 2.8888888888888888e-05, |
|
"loss": 1.0527, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 154.84, |
|
"learning_rate": 2.8666666666666668e-05, |
|
"loss": 1.0528, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 155.26, |
|
"learning_rate": 2.8444444444444447e-05, |
|
"loss": 1.0527, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 155.68, |
|
"learning_rate": 2.8222222222222223e-05, |
|
"loss": 1.0527, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 156.09, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 1.0523, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 156.51, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 1.0519, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 156.51, |
|
"eval_loss": 1.0381312370300293, |
|
"eval_runtime": 125.1331, |
|
"eval_samples_per_second": 544.532, |
|
"eval_steps_per_second": 4.259, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 156.93, |
|
"learning_rate": 2.7555555555555555e-05, |
|
"loss": 1.0498, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 157.35, |
|
"learning_rate": 2.733333333333333e-05, |
|
"loss": 1.0497, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 157.76, |
|
"learning_rate": 2.7111111111111114e-05, |
|
"loss": 1.0521, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 158.18, |
|
"learning_rate": 2.688888888888889e-05, |
|
"loss": 1.0511, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 158.6, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.0495, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 159.02, |
|
"learning_rate": 2.6444444444444443e-05, |
|
"loss": 1.0499, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 159.43, |
|
"learning_rate": 2.6222222222222226e-05, |
|
"loss": 1.0472, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 159.85, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 1.0499, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 160.27, |
|
"learning_rate": 2.5777777777777778e-05, |
|
"loss": 1.0485, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 160.68, |
|
"learning_rate": 2.5555555555555554e-05, |
|
"loss": 1.0483, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 161.1, |
|
"learning_rate": 2.5333333333333337e-05, |
|
"loss": 1.0494, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 161.52, |
|
"learning_rate": 2.5111111111111113e-05, |
|
"loss": 1.0469, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 161.94, |
|
"learning_rate": 2.488888888888889e-05, |
|
"loss": 1.0496, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 162.35, |
|
"learning_rate": 2.466666666666667e-05, |
|
"loss": 1.0466, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 162.77, |
|
"learning_rate": 2.4444444444444445e-05, |
|
"loss": 1.0475, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 163.19, |
|
"learning_rate": 2.4222222222222224e-05, |
|
"loss": 1.0472, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 163.61, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.0462, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 164.02, |
|
"learning_rate": 2.377777777777778e-05, |
|
"loss": 1.0459, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 164.44, |
|
"learning_rate": 2.3555555555555556e-05, |
|
"loss": 1.0456, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 164.86, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.0451, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 165.28, |
|
"learning_rate": 2.3111111111111112e-05, |
|
"loss": 1.0464, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 165.69, |
|
"learning_rate": 2.288888888888889e-05, |
|
"loss": 1.0441, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 166.11, |
|
"learning_rate": 2.2666666666666668e-05, |
|
"loss": 1.0442, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 166.53, |
|
"learning_rate": 2.2444444444444447e-05, |
|
"loss": 1.0459, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 166.94, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 1.0434, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 166.94, |
|
"eval_loss": 1.0337976217269897, |
|
"eval_runtime": 125.3173, |
|
"eval_samples_per_second": 543.732, |
|
"eval_steps_per_second": 4.253, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 167.36, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 1.0442, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 167.78, |
|
"learning_rate": 2.177777777777778e-05, |
|
"loss": 1.0451, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 168.2, |
|
"learning_rate": 2.1555555555555555e-05, |
|
"loss": 1.0426, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 168.61, |
|
"learning_rate": 2.1333333333333335e-05, |
|
"loss": 1.0439, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 169.03, |
|
"learning_rate": 2.111111111111111e-05, |
|
"loss": 1.0437, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 169.45, |
|
"learning_rate": 2.088888888888889e-05, |
|
"loss": 1.0437, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 169.87, |
|
"learning_rate": 2.0666666666666666e-05, |
|
"loss": 1.043, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 170.28, |
|
"learning_rate": 2.0444444444444446e-05, |
|
"loss": 1.0435, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 170.7, |
|
"learning_rate": 2.0222222222222222e-05, |
|
"loss": 1.0418, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 171.12, |
|
"learning_rate": 2e-05, |
|
"loss": 1.0424, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 171.54, |
|
"learning_rate": 1.9777777777777778e-05, |
|
"loss": 1.0412, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 171.95, |
|
"learning_rate": 1.9555555555555557e-05, |
|
"loss": 1.0415, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 172.37, |
|
"learning_rate": 1.9333333333333333e-05, |
|
"loss": 1.0423, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 172.79, |
|
"learning_rate": 1.9111111111111113e-05, |
|
"loss": 1.0414, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 173.21, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 1.0395, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 173.62, |
|
"learning_rate": 1.866666666666667e-05, |
|
"loss": 1.0386, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 174.04, |
|
"learning_rate": 1.8444444444444445e-05, |
|
"loss": 1.0417, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 174.46, |
|
"learning_rate": 1.8222222222222224e-05, |
|
"loss": 1.0396, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 174.87, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.0387, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 175.29, |
|
"learning_rate": 1.777777777777778e-05, |
|
"loss": 1.0375, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 175.71, |
|
"learning_rate": 1.7555555555555556e-05, |
|
"loss": 1.0386, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 176.13, |
|
"learning_rate": 1.7333333333333336e-05, |
|
"loss": 1.0392, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 176.54, |
|
"learning_rate": 1.7111111111111112e-05, |
|
"loss": 1.0373, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 176.96, |
|
"learning_rate": 1.688888888888889e-05, |
|
"loss": 1.0375, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 177.38, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.0368, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 177.38, |
|
"eval_loss": 1.0263112783432007, |
|
"eval_runtime": 125.2736, |
|
"eval_samples_per_second": 543.921, |
|
"eval_steps_per_second": 4.255, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 177.8, |
|
"learning_rate": 1.6444444444444447e-05, |
|
"loss": 1.0362, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 178.21, |
|
"learning_rate": 1.6222222222222223e-05, |
|
"loss": 1.0365, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 178.63, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 1.0369, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 179.05, |
|
"learning_rate": 1.577777777777778e-05, |
|
"loss": 1.0358, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 179.47, |
|
"learning_rate": 1.5555555555555555e-05, |
|
"loss": 1.0361, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 179.88, |
|
"learning_rate": 1.5333333333333334e-05, |
|
"loss": 1.0374, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 180.3, |
|
"learning_rate": 1.5111111111111112e-05, |
|
"loss": 1.0342, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 180.72, |
|
"learning_rate": 1.4888888888888888e-05, |
|
"loss": 1.0363, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 181.14, |
|
"learning_rate": 1.4666666666666668e-05, |
|
"loss": 1.0366, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 181.55, |
|
"learning_rate": 1.4444444444444444e-05, |
|
"loss": 1.0354, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 181.97, |
|
"learning_rate": 1.4222222222222224e-05, |
|
"loss": 1.0331, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 182.39, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 1.0354, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 182.8, |
|
"learning_rate": 1.3777777777777778e-05, |
|
"loss": 1.0331, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 183.22, |
|
"learning_rate": 1.3555555555555557e-05, |
|
"loss": 1.0326, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 183.64, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.0337, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 184.06, |
|
"learning_rate": 1.3111111111111113e-05, |
|
"loss": 1.0347, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 184.47, |
|
"learning_rate": 1.2888888888888889e-05, |
|
"loss": 1.0331, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 184.89, |
|
"learning_rate": 1.2666666666666668e-05, |
|
"loss": 1.034, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 185.31, |
|
"learning_rate": 1.2444444444444445e-05, |
|
"loss": 1.0324, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 185.73, |
|
"learning_rate": 1.2222222222222222e-05, |
|
"loss": 1.0333, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 186.14, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.0326, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 186.56, |
|
"learning_rate": 1.1777777777777778e-05, |
|
"loss": 1.0329, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 186.98, |
|
"learning_rate": 1.1555555555555556e-05, |
|
"loss": 1.0334, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 187.4, |
|
"learning_rate": 1.1333333333333334e-05, |
|
"loss": 1.0301, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 187.81, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 1.032, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 187.81, |
|
"eval_loss": 1.0231534242630005, |
|
"eval_runtime": 125.0456, |
|
"eval_samples_per_second": 544.913, |
|
"eval_steps_per_second": 4.262, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 188.23, |
|
"learning_rate": 1.088888888888889e-05, |
|
"loss": 1.0321, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 188.65, |
|
"learning_rate": 1.0666666666666667e-05, |
|
"loss": 1.0316, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 189.07, |
|
"learning_rate": 1.0444444444444445e-05, |
|
"loss": 1.03, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 189.48, |
|
"learning_rate": 1.0222222222222223e-05, |
|
"loss": 1.0279, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 189.9, |
|
"learning_rate": 1e-05, |
|
"loss": 1.0311, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 190.32, |
|
"learning_rate": 9.777777777777779e-06, |
|
"loss": 1.0294, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 190.73, |
|
"learning_rate": 9.555555555555556e-06, |
|
"loss": 1.0284, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 191.15, |
|
"learning_rate": 9.333333333333334e-06, |
|
"loss": 1.0309, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 191.57, |
|
"learning_rate": 9.111111111111112e-06, |
|
"loss": 1.0288, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 191.99, |
|
"learning_rate": 8.88888888888889e-06, |
|
"loss": 1.0301, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 192.4, |
|
"learning_rate": 8.666666666666668e-06, |
|
"loss": 1.0304, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 192.82, |
|
"learning_rate": 8.444444444444446e-06, |
|
"loss": 1.0292, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 193.24, |
|
"learning_rate": 8.222222222222223e-06, |
|
"loss": 1.0288, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 193.66, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 1.0285, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 194.07, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 1.0281, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 194.49, |
|
"learning_rate": 7.555555555555556e-06, |
|
"loss": 1.0294, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 194.91, |
|
"learning_rate": 7.333333333333334e-06, |
|
"loss": 1.0297, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 195.33, |
|
"learning_rate": 7.111111111111112e-06, |
|
"loss": 1.0298, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 195.74, |
|
"learning_rate": 6.888888888888889e-06, |
|
"loss": 1.0302, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 196.16, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.0281, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 196.58, |
|
"learning_rate": 6.4444444444444445e-06, |
|
"loss": 1.0273, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 196.99, |
|
"learning_rate": 6.222222222222222e-06, |
|
"loss": 1.0261, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 197.41, |
|
"learning_rate": 6e-06, |
|
"loss": 1.0259, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 197.83, |
|
"learning_rate": 5.777777777777778e-06, |
|
"loss": 1.0277, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 198.25, |
|
"learning_rate": 5.555555555555556e-06, |
|
"loss": 1.0271, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 198.25, |
|
"eval_loss": 1.0194706916809082, |
|
"eval_runtime": 125.2786, |
|
"eval_samples_per_second": 543.9, |
|
"eval_steps_per_second": 4.255, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 198.66, |
|
"learning_rate": 5.333333333333334e-06, |
|
"loss": 1.0263, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 199.08, |
|
"learning_rate": 5.1111111111111115e-06, |
|
"loss": 1.0266, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 199.5, |
|
"learning_rate": 4.888888888888889e-06, |
|
"loss": 1.0254, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 199.92, |
|
"learning_rate": 4.666666666666667e-06, |
|
"loss": 1.0272, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 200.33, |
|
"learning_rate": 4.444444444444445e-06, |
|
"loss": 1.0253, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 200.75, |
|
"learning_rate": 4.222222222222223e-06, |
|
"loss": 1.026, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 201.17, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 1.0251, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 201.59, |
|
"learning_rate": 3.777777777777778e-06, |
|
"loss": 1.0251, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 202.0, |
|
"learning_rate": 3.555555555555556e-06, |
|
"loss": 1.0258, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 202.42, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.0253, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 202.84, |
|
"learning_rate": 3.111111111111111e-06, |
|
"loss": 1.0243, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 203.26, |
|
"learning_rate": 2.888888888888889e-06, |
|
"loss": 1.0256, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 203.67, |
|
"learning_rate": 2.666666666666667e-06, |
|
"loss": 1.025, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 204.09, |
|
"learning_rate": 2.4444444444444447e-06, |
|
"loss": 1.0229, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 204.51, |
|
"learning_rate": 2.2222222222222225e-06, |
|
"loss": 1.0246, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 204.92, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 1.0247, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 205.34, |
|
"learning_rate": 1.777777777777778e-06, |
|
"loss": 1.0237, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 205.76, |
|
"learning_rate": 1.5555555555555556e-06, |
|
"loss": 1.0237, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 206.18, |
|
"learning_rate": 1.3333333333333334e-06, |
|
"loss": 1.0217, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 206.59, |
|
"learning_rate": 1.1111111111111112e-06, |
|
"loss": 1.0232, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 207.01, |
|
"learning_rate": 8.88888888888889e-07, |
|
"loss": 1.0246, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 207.43, |
|
"learning_rate": 6.666666666666667e-07, |
|
"loss": 1.0225, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 207.85, |
|
"learning_rate": 4.444444444444445e-07, |
|
"loss": 1.0238, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 208.26, |
|
"learning_rate": 2.2222222222222224e-07, |
|
"loss": 1.023, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 208.68, |
|
"learning_rate": 0.0, |
|
"loss": 1.0216, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 208.68, |
|
"eval_loss": 1.0167958736419678, |
|
"eval_runtime": 125.3036, |
|
"eval_samples_per_second": 543.791, |
|
"eval_steps_per_second": 4.254, |
|
"step": 250000 |
|
} |
|
], |
|
"max_steps": 250000, |
|
"num_train_epochs": 209, |
|
"total_flos": 4.212247412736e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|