|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9568377501956837, |
|
"global_step": 105000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.73025856044724e-06, |
|
"loss": 11.2906, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1553692056836711e-05, |
|
"loss": 7.0039, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7377125553226185e-05, |
|
"loss": 6.1153, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 2.3200559049615657e-05, |
|
"loss": 5.7832, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 2.9023992546005126e-05, |
|
"loss": 5.6146, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 5.421965599060059, |
|
"eval_runtime": 411.32, |
|
"eval_samples_per_second": 41.389, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.9239681855833133, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 3.48474260423946e-05, |
|
"loss": 5.5165, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.067085953878407e-05, |
|
"loss": 5.446, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 4.649429303517354e-05, |
|
"loss": 5.3865, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 5.2317726531563014e-05, |
|
"loss": 5.3516, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 5.814116002795248e-05, |
|
"loss": 5.3348, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"eval_loss": 5.211235046386719, |
|
"eval_runtime": 409.8837, |
|
"eval_samples_per_second": 41.534, |
|
"eval_steps_per_second": 5.192, |
|
"eval_wer": 0.9147464109993301, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 6.396459352434195e-05, |
|
"loss": 5.2944, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 6.978802702073143e-05, |
|
"loss": 5.2642, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 7.56114605171209e-05, |
|
"loss": 5.2581, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 8.143489401351036e-05, |
|
"loss": 5.2314, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 8.725832750989984e-05, |
|
"loss": 5.2119, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 5.13909912109375, |
|
"eval_runtime": 409.6938, |
|
"eval_samples_per_second": 41.553, |
|
"eval_steps_per_second": 5.194, |
|
"eval_wer": 0.9114111424157333, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 9.308176100628931e-05, |
|
"loss": 5.1866, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.890519450267879e-05, |
|
"loss": 5.1807, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 9.958877747391876e-05, |
|
"loss": 5.1441, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.908234579155272e-05, |
|
"loss": 5.1273, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 9.857591410918667e-05, |
|
"loss": 5.1076, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 5.045482158660889, |
|
"eval_runtime": 412.0168, |
|
"eval_samples_per_second": 41.319, |
|
"eval_steps_per_second": 5.165, |
|
"eval_wer": 0.9043837403957815, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.806948242682062e-05, |
|
"loss": 5.0971, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.756305074445458e-05, |
|
"loss": 5.0834, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.705661906208853e-05, |
|
"loss": 5.0678, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 9.655018737972249e-05, |
|
"loss": 5.0522, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.604375569735643e-05, |
|
"loss": 5.0557, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"eval_loss": 4.983465671539307, |
|
"eval_runtime": 410.6819, |
|
"eval_samples_per_second": 41.453, |
|
"eval_steps_per_second": 5.182, |
|
"eval_wer": 0.8979804271846081, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 9.553732401499038e-05, |
|
"loss": 5.0332, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.503089233262434e-05, |
|
"loss": 5.0225, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.452446065025829e-05, |
|
"loss": 5.0201, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.401802896789223e-05, |
|
"loss": 5.0077, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.35115972855262e-05, |
|
"loss": 4.9969, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"eval_loss": 4.943966388702393, |
|
"eval_runtime": 411.257, |
|
"eval_samples_per_second": 41.395, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.896159514799699, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.300516560316014e-05, |
|
"loss": 4.9911, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 9.249873392079409e-05, |
|
"loss": 4.9935, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.199230223842805e-05, |
|
"loss": 4.9755, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.1485870556062e-05, |
|
"loss": 4.9771, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9.097943887369594e-05, |
|
"loss": 4.9577, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"eval_loss": 4.906991481781006, |
|
"eval_runtime": 410.7324, |
|
"eval_samples_per_second": 41.448, |
|
"eval_steps_per_second": 5.181, |
|
"eval_wer": 0.8942902538261701, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.04730071913299e-05, |
|
"loss": 4.953, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 8.996657550896385e-05, |
|
"loss": 4.9515, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 8.94601438265978e-05, |
|
"loss": 4.9314, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 8.895371214423176e-05, |
|
"loss": 4.9529, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 8.84472804618657e-05, |
|
"loss": 4.9298, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"eval_loss": 4.876424312591553, |
|
"eval_runtime": 410.0694, |
|
"eval_samples_per_second": 41.515, |
|
"eval_steps_per_second": 5.189, |
|
"eval_wer": 0.8924795338464294, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 8.794084877949965e-05, |
|
"loss": 4.9117, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 8.743542996049834e-05, |
|
"loss": 4.9273, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 8.693001114149701e-05, |
|
"loss": 4.9357, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 8.642357945913097e-05, |
|
"loss": 4.9211, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 8.591714777676492e-05, |
|
"loss": 4.9096, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"eval_loss": 4.8509111404418945, |
|
"eval_runtime": 411.3063, |
|
"eval_samples_per_second": 41.39, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.8897156933115739, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 8.541071609439886e-05, |
|
"loss": 4.9029, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 8.490428441203282e-05, |
|
"loss": 4.9012, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 8.439785272966677e-05, |
|
"loss": 4.8984, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 8.389142104730072e-05, |
|
"loss": 4.8971, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 8.338498936493468e-05, |
|
"loss": 4.896, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"eval_loss": 4.833607196807861, |
|
"eval_runtime": 411.6309, |
|
"eval_samples_per_second": 41.357, |
|
"eval_steps_per_second": 5.17, |
|
"eval_wer": 0.8886487575654107, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.287855768256862e-05, |
|
"loss": 4.8776, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 8.237415172693204e-05, |
|
"loss": 4.8729, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 8.186772004456598e-05, |
|
"loss": 4.8781, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8.136128836219995e-05, |
|
"loss": 4.8726, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 8.085485667983389e-05, |
|
"loss": 4.8724, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"eval_loss": 4.81581449508667, |
|
"eval_runtime": 411.4914, |
|
"eval_samples_per_second": 41.371, |
|
"eval_steps_per_second": 5.171, |
|
"eval_wer": 0.8873070188952817, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 8.034943786083258e-05, |
|
"loss": 4.8579, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 7.984300617846653e-05, |
|
"loss": 4.8618, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 7.933657449610049e-05, |
|
"loss": 4.8674, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 7.883014281373443e-05, |
|
"loss": 4.8535, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 7.832573685809785e-05, |
|
"loss": 4.8488, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"eval_loss": 4.8011603355407715, |
|
"eval_runtime": 410.5634, |
|
"eval_samples_per_second": 41.465, |
|
"eval_steps_per_second": 5.183, |
|
"eval_wer": 0.8869696241498325, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 7.78193051757318e-05, |
|
"loss": 4.8533, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.731287349336574e-05, |
|
"loss": 4.8563, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.68064418109997e-05, |
|
"loss": 4.852, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.630001012863365e-05, |
|
"loss": 4.8502, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.57935784462676e-05, |
|
"loss": 4.829, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"eval_loss": 4.784905910491943, |
|
"eval_runtime": 412.2149, |
|
"eval_samples_per_second": 41.299, |
|
"eval_steps_per_second": 5.162, |
|
"eval_wer": 0.8847121370768731, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.528714676390155e-05, |
|
"loss": 4.8315, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 7.478172794490024e-05, |
|
"loss": 4.8269, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 7.427529626253419e-05, |
|
"loss": 4.823, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 7.376886458016814e-05, |
|
"loss": 4.8282, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 7.32624328978021e-05, |
|
"loss": 4.8415, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"eval_loss": 4.774867057800293, |
|
"eval_runtime": 411.0732, |
|
"eval_samples_per_second": 41.414, |
|
"eval_steps_per_second": 5.177, |
|
"eval_wer": 0.8846529427237791, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 7.275600121543604e-05, |
|
"loss": 4.827, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 7.225058239643473e-05, |
|
"loss": 4.8117, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 7.174415071406868e-05, |
|
"loss": 4.8178, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 7.123771903170264e-05, |
|
"loss": 4.8036, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 7.073128734933657e-05, |
|
"loss": 4.7941, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"eval_loss": 4.765619277954102, |
|
"eval_runtime": 410.9966, |
|
"eval_samples_per_second": 41.421, |
|
"eval_steps_per_second": 5.178, |
|
"eval_wer": 0.8833543257678246, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 7.022586853033526e-05, |
|
"loss": 4.8074, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 6.971943684796922e-05, |
|
"loss": 4.8097, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 6.921401802896789e-05, |
|
"loss": 4.8156, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 6.870758634660185e-05, |
|
"loss": 4.7991, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.82011546642358e-05, |
|
"loss": 4.7913, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"eval_loss": 4.773669719696045, |
|
"eval_runtime": 410.7775, |
|
"eval_samples_per_second": 41.443, |
|
"eval_steps_per_second": 5.18, |
|
"eval_wer": 0.8835782973378091, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 6.769472298186974e-05, |
|
"loss": 4.7988, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 6.71882912995037e-05, |
|
"loss": 4.7993, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 6.668185961713765e-05, |
|
"loss": 4.8082, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 6.617542793477161e-05, |
|
"loss": 4.797, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 6.566899625240556e-05, |
|
"loss": 4.7943, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"eval_loss": 4.763143539428711, |
|
"eval_runtime": 411.3255, |
|
"eval_samples_per_second": 41.388, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.8824108749304336, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 6.51625645700395e-05, |
|
"loss": 4.7916, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 6.465613288767347e-05, |
|
"loss": 4.8082, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 6.414970120530741e-05, |
|
"loss": 4.7677, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 6.364326952294136e-05, |
|
"loss": 4.7947, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 6.313683784057532e-05, |
|
"loss": 4.7802, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"eval_loss": 4.7554545402526855, |
|
"eval_runtime": 410.6533, |
|
"eval_samples_per_second": 41.456, |
|
"eval_steps_per_second": 5.182, |
|
"eval_wer": 0.8823350852509756, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 6.2631419021574e-05, |
|
"loss": 4.79, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 6.212498733920794e-05, |
|
"loss": 4.7812, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 6.161956852020663e-05, |
|
"loss": 4.7822, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.111313683784057e-05, |
|
"loss": 4.7653, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.060670515547453e-05, |
|
"loss": 4.7809, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"eval_loss": 4.742018222808838, |
|
"eval_runtime": 411.2108, |
|
"eval_samples_per_second": 41.4, |
|
"eval_steps_per_second": 5.175, |
|
"eval_wer": 0.8812212383066652, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.010027347310848e-05, |
|
"loss": 4.78, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.9593841790742435e-05, |
|
"loss": 4.7755, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 5.908741010837638e-05, |
|
"loss": 4.7692, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 5.8580978426010335e-05, |
|
"loss": 4.7752, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 5.807454674364429e-05, |
|
"loss": 4.7655, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"eval_loss": 4.738577842712402, |
|
"eval_runtime": 410.9533, |
|
"eval_samples_per_second": 41.426, |
|
"eval_steps_per_second": 5.178, |
|
"eval_wer": 0.8806589572882034, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 5.7568115061278236e-05, |
|
"loss": 4.7728, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 5.706168337891219e-05, |
|
"loss": 4.7684, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 5.655525169654614e-05, |
|
"loss": 4.7651, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.604882001418009e-05, |
|
"loss": 4.7577, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 5.554238833181404e-05, |
|
"loss": 4.7753, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"eval_loss": 4.731768608093262, |
|
"eval_runtime": 411.2946, |
|
"eval_samples_per_second": 41.391, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.880302876466612, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.5035956649448e-05, |
|
"loss": 4.7567, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 5.4529524967081944e-05, |
|
"loss": 4.7649, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 5.40230932847159e-05, |
|
"loss": 4.7431, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 5.351666160234985e-05, |
|
"loss": 4.7517, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.3011242783348524e-05, |
|
"loss": 4.7622, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_loss": 4.7267656326293945, |
|
"eval_runtime": 410.5389, |
|
"eval_samples_per_second": 41.467, |
|
"eval_steps_per_second": 5.183, |
|
"eval_wer": 0.8803810182395704, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 5.250481110098248e-05, |
|
"loss": 4.7505, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 5.1998379418616425e-05, |
|
"loss": 4.7562, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 5.149194773625038e-05, |
|
"loss": 4.7565, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 5.0986528917249065e-05, |
|
"loss": 4.7486, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 5.048009723488302e-05, |
|
"loss": 4.7511, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"eval_loss": 4.725167751312256, |
|
"eval_runtime": 411.131, |
|
"eval_samples_per_second": 41.408, |
|
"eval_steps_per_second": 5.176, |
|
"eval_wer": 0.8796747368301385, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.9973665552516966e-05, |
|
"loss": 4.7501, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 4.946723387015092e-05, |
|
"loss": 4.7481, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 4.896080218778487e-05, |
|
"loss": 4.7376, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.845538336878355e-05, |
|
"loss": 4.7516, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 4.7948951686417507e-05, |
|
"loss": 4.7508, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"eval_loss": 4.714947700500488, |
|
"eval_runtime": 410.7042, |
|
"eval_samples_per_second": 41.451, |
|
"eval_steps_per_second": 5.181, |
|
"eval_wer": 0.878954342859704, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.744252000405145e-05, |
|
"loss": 4.7489, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 4.693608832168541e-05, |
|
"loss": 4.7324, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 4.643066950268409e-05, |
|
"loss": 4.7485, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.592423782031804e-05, |
|
"loss": 4.7337, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.541780613795199e-05, |
|
"loss": 4.7299, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"eval_loss": 4.713499069213867, |
|
"eval_runtime": 410.0852, |
|
"eval_samples_per_second": 41.513, |
|
"eval_steps_per_second": 5.189, |
|
"eval_wer": 0.8791197734359003, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 4.4912387318950675e-05, |
|
"loss": 4.7385, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.440595563658463e-05, |
|
"loss": 4.7339, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 4.389952395421858e-05, |
|
"loss": 4.7351, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 4.339309227185253e-05, |
|
"loss": 4.7472, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.288666058948648e-05, |
|
"loss": 4.7382, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"eval_loss": 4.706689357757568, |
|
"eval_runtime": 411.4243, |
|
"eval_samples_per_second": 41.378, |
|
"eval_steps_per_second": 5.172, |
|
"eval_wer": 0.8785633726511897, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 4.2380228907120436e-05, |
|
"loss": 4.7372, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.187379722475438e-05, |
|
"loss": 4.7344, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 4.136736554238833e-05, |
|
"loss": 4.7225, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 4.086093386002228e-05, |
|
"loss": 4.7339, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.0354502177656236e-05, |
|
"loss": 4.723, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"eval_loss": 4.703372001647949, |
|
"eval_runtime": 411.6733, |
|
"eval_samples_per_second": 41.353, |
|
"eval_steps_per_second": 5.169, |
|
"eval_wer": 0.8783083011782551, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 3.984807049529018e-05, |
|
"loss": 4.7498, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 3.934163881292414e-05, |
|
"loss": 4.7296, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 3.8836219993922824e-05, |
|
"loss": 4.7201, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.832978831155678e-05, |
|
"loss": 4.7328, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 3.782436949255546e-05, |
|
"loss": 4.7321, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"eval_loss": 4.700106143951416, |
|
"eval_runtime": 411.0597, |
|
"eval_samples_per_second": 41.415, |
|
"eval_steps_per_second": 5.177, |
|
"eval_wer": 0.8781452226955593, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 3.7317937810189405e-05, |
|
"loss": 4.7226, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 3.681150612782336e-05, |
|
"loss": 4.7271, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 3.630507444545731e-05, |
|
"loss": 4.7245, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 3.579864276309126e-05, |
|
"loss": 4.717, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 3.529221108072521e-05, |
|
"loss": 4.7261, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"eval_loss": 4.695710182189941, |
|
"eval_runtime": 410.4939, |
|
"eval_samples_per_second": 41.472, |
|
"eval_steps_per_second": 5.184, |
|
"eval_wer": 0.8775924807229604, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 3.4785779398359166e-05, |
|
"loss": 4.7225, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 3.427934771599311e-05, |
|
"loss": 4.7112, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 3.3772916033627066e-05, |
|
"loss": 4.7178, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 3.326648435126102e-05, |
|
"loss": 4.7221, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 3.27610655322597e-05, |
|
"loss": 4.6915, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"eval_loss": 4.690956115722656, |
|
"eval_runtime": 411.7522, |
|
"eval_samples_per_second": 41.345, |
|
"eval_steps_per_second": 5.168, |
|
"eval_wer": 0.8773312676725525, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 3.2254633849893653e-05, |
|
"loss": 4.7246, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 3.17482021675276e-05, |
|
"loss": 4.7043, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 3.1241770485161554e-05, |
|
"loss": 4.7311, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.073533880279551e-05, |
|
"loss": 4.7179, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 3.0228907120429458e-05, |
|
"loss": 4.7233, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"eval_loss": 4.687117576599121, |
|
"eval_runtime": 411.0114, |
|
"eval_samples_per_second": 41.42, |
|
"eval_steps_per_second": 5.177, |
|
"eval_wer": 0.8769715280388493, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 2.9723488301428138e-05, |
|
"loss": 4.7019, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 2.9217056619062088e-05, |
|
"loss": 4.7193, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 2.871062493669604e-05, |
|
"loss": 4.7011, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.8204193254329992e-05, |
|
"loss": 4.7246, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 2.769877443532868e-05, |
|
"loss": 4.7245, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"eval_loss": 4.684831619262695, |
|
"eval_runtime": 411.3597, |
|
"eval_samples_per_second": 41.385, |
|
"eval_steps_per_second": 5.173, |
|
"eval_wer": 0.8767522606558656, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 2.7192342752962626e-05, |
|
"loss": 4.7072, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 2.6685911070596576e-05, |
|
"loss": 4.7038, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 2.6179479388230526e-05, |
|
"loss": 4.7019, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.5674060569229213e-05, |
|
"loss": 4.7023, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 2.5167628886863163e-05, |
|
"loss": 4.6995, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"eval_loss": 4.683341979980469, |
|
"eval_runtime": 411.2621, |
|
"eval_samples_per_second": 41.395, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.8766509899634863, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 2.4661197204497113e-05, |
|
"loss": 4.7139, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 2.4154765522131064e-05, |
|
"loss": 4.6981, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.3648333839765017e-05, |
|
"loss": 4.7053, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 2.31429150207637e-05, |
|
"loss": 4.7152, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 2.263648333839765e-05, |
|
"loss": 4.7084, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_loss": 4.678108215332031, |
|
"eval_runtime": 411.2113, |
|
"eval_samples_per_second": 41.4, |
|
"eval_steps_per_second": 5.175, |
|
"eval_wer": 0.8766087829523399, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.2130051656031605e-05, |
|
"loss": 4.6985, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 2.1623619973665555e-05, |
|
"loss": 4.7024, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 2.1117188291299505e-05, |
|
"loss": 4.7015, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 2.0611769472298185e-05, |
|
"loss": 4.6919, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 2.010533778993214e-05, |
|
"loss": 4.7115, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"eval_loss": 4.676379680633545, |
|
"eval_runtime": 410.905, |
|
"eval_samples_per_second": 41.431, |
|
"eval_steps_per_second": 5.179, |
|
"eval_wer": 0.8762864154709211, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 1.959890610756609e-05, |
|
"loss": 4.7144, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 1.9092474425200043e-05, |
|
"loss": 4.6893, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 1.8587055606198726e-05, |
|
"loss": 4.6977, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 1.808163678719741e-05, |
|
"loss": 4.6987, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 1.757520510483136e-05, |
|
"loss": 4.7072, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"eval_loss": 4.674571514129639, |
|
"eval_runtime": 410.8023, |
|
"eval_samples_per_second": 41.441, |
|
"eval_steps_per_second": 5.18, |
|
"eval_wer": 0.8756789219885069, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 1.706877342246531e-05, |
|
"loss": 4.7077, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 1.656234174009926e-05, |
|
"loss": 4.7049, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.605591005773321e-05, |
|
"loss": 4.7023, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 1.5549478375367164e-05, |
|
"loss": 4.695, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 1.5043046693001114e-05, |
|
"loss": 4.7036, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"eval_loss": 4.672886848449707, |
|
"eval_runtime": 411.4353, |
|
"eval_samples_per_second": 41.377, |
|
"eval_steps_per_second": 5.172, |
|
"eval_wer": 0.8758804179983762, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 1.4536615010635066e-05, |
|
"loss": 4.7079, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 1.4030183328269016e-05, |
|
"loss": 4.7002, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 1.3523751645902968e-05, |
|
"loss": 4.6982, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 1.301731996353692e-05, |
|
"loss": 4.6883, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 1.2511901144535604e-05, |
|
"loss": 4.6907, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"eval_loss": 4.670739650726318, |
|
"eval_runtime": 410.8726, |
|
"eval_samples_per_second": 41.434, |
|
"eval_steps_per_second": 5.179, |
|
"eval_wer": 0.8756105806051335, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 1.2005469462169554e-05, |
|
"loss": 4.7006, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 1.1499037779803506e-05, |
|
"loss": 4.6984, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 1.0992606097437456e-05, |
|
"loss": 4.6942, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 1.048718727843614e-05, |
|
"loss": 4.7016, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 9.98075559607009e-06, |
|
"loss": 4.681, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"eval_loss": 4.668375492095947, |
|
"eval_runtime": 411.3728, |
|
"eval_samples_per_second": 41.383, |
|
"eval_steps_per_second": 5.173, |
|
"eval_wer": 0.875492453242668, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 9.474323913704042e-06, |
|
"loss": 4.6988, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 8.967892231337994e-06, |
|
"loss": 4.7063, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.462473412336676e-06, |
|
"loss": 4.689, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 7.956041729970628e-06, |
|
"loss": 4.6833, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.449610047604579e-06, |
|
"loss": 4.6905, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"eval_loss": 4.668146133422852, |
|
"eval_runtime": 410.5824, |
|
"eval_samples_per_second": 41.463, |
|
"eval_steps_per_second": 5.183, |
|
"eval_wer": 0.8755806567489337, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.94317836523853e-06, |
|
"loss": 4.6848, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.4367466828724815e-06, |
|
"loss": 4.6882, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 5.930315000506432e-06, |
|
"loss": 4.6821, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 5.4238833181403835e-06, |
|
"loss": 4.6908, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 4.917451635774335e-06, |
|
"loss": 4.6975, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"eval_loss": 4.664699554443359, |
|
"eval_runtime": 411.5323, |
|
"eval_samples_per_second": 41.367, |
|
"eval_steps_per_second": 5.171, |
|
"eval_wer": 0.8751601546998029, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 4.411019953408286e-06, |
|
"loss": 4.6878, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 3.9056011344069684e-06, |
|
"loss": 4.6829, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 3.400182315405652e-06, |
|
"loss": 4.6882, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 2.893750633039603e-06, |
|
"loss": 4.6905, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 2.387318950673554e-06, |
|
"loss": 4.6849, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"eval_loss": 4.664451599121094, |
|
"eval_runtime": 411.3214, |
|
"eval_samples_per_second": 41.389, |
|
"eval_steps_per_second": 5.174, |
|
"eval_wer": 0.8751350657024651, |
|
"step": 105000 |
|
} |
|
], |
|
"max_steps": 107316, |
|
"num_train_epochs": 2, |
|
"total_flos": 1.0233532461203251e+18, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|