|
{ |
|
"best_metric": 94.92795389048992, |
|
"best_model_checkpoint": "./whisper-tiny-en/checkpoint-2000", |
|
"epoch": 48.78048780487805, |
|
"eval_steps": 1000, |
|
"global_step": 2000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 76.21949768066406, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 7.5702, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 72.27240753173828, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 7.1361, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 27.645973205566406, |
|
"learning_rate": 1.42e-06, |
|
"loss": 6.4445, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 21.712514877319336, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 5.8148, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.048780487804878, |
|
"grad_norm": 19.066011428833008, |
|
"learning_rate": 2.42e-06, |
|
"loss": 5.1377, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 14.726506233215332, |
|
"learning_rate": 2.92e-06, |
|
"loss": 4.6, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.2682926829268295, |
|
"grad_norm": 13.597981452941895, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 4.3595, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 16.399568557739258, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 4.171, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.487804878048781, |
|
"grad_norm": 15.264267921447754, |
|
"learning_rate": 4.42e-06, |
|
"loss": 3.9727, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 6.097560975609756, |
|
"grad_norm": 14.83893871307373, |
|
"learning_rate": 4.92e-06, |
|
"loss": 3.816, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.7073170731707314, |
|
"grad_norm": 14.460186004638672, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 3.6683, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 7.317073170731708, |
|
"grad_norm": 13.663064002990723, |
|
"learning_rate": 5.92e-06, |
|
"loss": 3.5392, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.926829268292683, |
|
"grad_norm": 14.359073638916016, |
|
"learning_rate": 6.42e-06, |
|
"loss": 3.4807, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 8.536585365853659, |
|
"grad_norm": 12.90373706817627, |
|
"learning_rate": 6.92e-06, |
|
"loss": 3.335, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 9.146341463414634, |
|
"grad_norm": 15.809270858764648, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 3.2196, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 9.75609756097561, |
|
"grad_norm": 14.483990669250488, |
|
"learning_rate": 7.92e-06, |
|
"loss": 3.1224, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 10.365853658536585, |
|
"grad_norm": 15.841504096984863, |
|
"learning_rate": 8.42e-06, |
|
"loss": 2.9858, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 10.975609756097562, |
|
"grad_norm": 16.420425415039062, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 2.9395, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 11.585365853658537, |
|
"grad_norm": 14.952699661254883, |
|
"learning_rate": 9.42e-06, |
|
"loss": 2.7501, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 12.195121951219512, |
|
"grad_norm": 15.765605926513672, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 2.7014, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 12.804878048780488, |
|
"grad_norm": 17.627931594848633, |
|
"learning_rate": 9.967692307692308e-06, |
|
"loss": 2.601, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 13.414634146341463, |
|
"grad_norm": 16.36449432373047, |
|
"learning_rate": 9.92923076923077e-06, |
|
"loss": 2.4699, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 14.024390243902438, |
|
"grad_norm": 15.8140869140625, |
|
"learning_rate": 9.890769230769231e-06, |
|
"loss": 2.3859, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 14.634146341463415, |
|
"grad_norm": 15.983495712280273, |
|
"learning_rate": 9.852307692307693e-06, |
|
"loss": 2.2235, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 15.24390243902439, |
|
"grad_norm": 15.25550651550293, |
|
"learning_rate": 9.813846153846155e-06, |
|
"loss": 2.1583, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 15.853658536585366, |
|
"grad_norm": 18.167879104614258, |
|
"learning_rate": 9.775384615384616e-06, |
|
"loss": 2.0671, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 16.463414634146343, |
|
"grad_norm": 16.86029624938965, |
|
"learning_rate": 9.736923076923078e-06, |
|
"loss": 1.9547, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 17.073170731707318, |
|
"grad_norm": 16.195270538330078, |
|
"learning_rate": 9.698461538461539e-06, |
|
"loss": 1.8877, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 17.682926829268293, |
|
"grad_norm": 17.331113815307617, |
|
"learning_rate": 9.66e-06, |
|
"loss": 1.7336, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 18.29268292682927, |
|
"grad_norm": 18.00438690185547, |
|
"learning_rate": 9.621538461538463e-06, |
|
"loss": 1.6845, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 18.902439024390244, |
|
"grad_norm": 17.1490478515625, |
|
"learning_rate": 9.583076923076923e-06, |
|
"loss": 1.6027, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 19.51219512195122, |
|
"grad_norm": 17.924156188964844, |
|
"learning_rate": 9.544615384615385e-06, |
|
"loss": 1.4647, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 20.121951219512194, |
|
"grad_norm": 17.920028686523438, |
|
"learning_rate": 9.506153846153848e-06, |
|
"loss": 1.4291, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 20.73170731707317, |
|
"grad_norm": 17.502954483032227, |
|
"learning_rate": 9.467692307692308e-06, |
|
"loss": 1.3113, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 21.341463414634145, |
|
"grad_norm": 18.125986099243164, |
|
"learning_rate": 9.42923076923077e-06, |
|
"loss": 1.232, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 21.951219512195124, |
|
"grad_norm": 16.026338577270508, |
|
"learning_rate": 9.39076923076923e-06, |
|
"loss": 1.1903, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 22.5609756097561, |
|
"grad_norm": 16.795467376708984, |
|
"learning_rate": 9.352307692307693e-06, |
|
"loss": 1.0742, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 23.170731707317074, |
|
"grad_norm": 19.86144256591797, |
|
"learning_rate": 9.313846153846155e-06, |
|
"loss": 1.0346, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 23.78048780487805, |
|
"grad_norm": 17.94236183166504, |
|
"learning_rate": 9.275384615384616e-06, |
|
"loss": 0.9589, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 24.390243902439025, |
|
"grad_norm": 17.72712516784668, |
|
"learning_rate": 9.236923076923078e-06, |
|
"loss": 0.8757, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 24.390243902439025, |
|
"eval_loss": 4.123460292816162, |
|
"eval_runtime": 64.6053, |
|
"eval_samples_per_second": 2.492, |
|
"eval_steps_per_second": 0.325, |
|
"eval_wer": 97.89625360230548, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 31.632688522338867, |
|
"learning_rate": 9.19846153846154e-06, |
|
"loss": 0.8421, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 25.609756097560975, |
|
"grad_norm": 18.91576385498047, |
|
"learning_rate": 9.16e-06, |
|
"loss": 0.7294, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 26.21951219512195, |
|
"grad_norm": 20.973003387451172, |
|
"learning_rate": 9.121538461538463e-06, |
|
"loss": 0.7139, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 26.829268292682926, |
|
"grad_norm": 18.86784553527832, |
|
"learning_rate": 9.083076923076923e-06, |
|
"loss": 0.6638, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 27.4390243902439, |
|
"grad_norm": 14.628901481628418, |
|
"learning_rate": 9.044615384615385e-06, |
|
"loss": 0.605, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 28.048780487804876, |
|
"grad_norm": 13.873974800109863, |
|
"learning_rate": 9.006153846153847e-06, |
|
"loss": 0.5613, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 28.658536585365855, |
|
"grad_norm": 15.853562355041504, |
|
"learning_rate": 8.967692307692308e-06, |
|
"loss": 0.4886, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 29.26829268292683, |
|
"grad_norm": 12.60745906829834, |
|
"learning_rate": 8.92923076923077e-06, |
|
"loss": 0.4652, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 29.878048780487806, |
|
"grad_norm": 17.16914176940918, |
|
"learning_rate": 8.890769230769232e-06, |
|
"loss": 0.4324, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 30.48780487804878, |
|
"grad_norm": 11.67048168182373, |
|
"learning_rate": 8.852307692307693e-06, |
|
"loss": 0.3793, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 31.097560975609756, |
|
"grad_norm": 10.136366844177246, |
|
"learning_rate": 8.813846153846155e-06, |
|
"loss": 0.3689, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 31.70731707317073, |
|
"grad_norm": 15.549346923828125, |
|
"learning_rate": 8.775384615384615e-06, |
|
"loss": 0.3165, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 32.31707317073171, |
|
"grad_norm": 10.75722599029541, |
|
"learning_rate": 8.736923076923077e-06, |
|
"loss": 0.2891, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 32.926829268292686, |
|
"grad_norm": 10.687921524047852, |
|
"learning_rate": 8.69846153846154e-06, |
|
"loss": 0.2786, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 33.53658536585366, |
|
"grad_norm": 15.220375061035156, |
|
"learning_rate": 8.66e-06, |
|
"loss": 0.2349, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 34.146341463414636, |
|
"grad_norm": 9.226957321166992, |
|
"learning_rate": 8.621538461538462e-06, |
|
"loss": 0.2279, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 34.75609756097561, |
|
"grad_norm": 10.428820610046387, |
|
"learning_rate": 8.583076923076924e-06, |
|
"loss": 0.2022, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 35.36585365853659, |
|
"grad_norm": 10.70584487915039, |
|
"learning_rate": 8.544615384615385e-06, |
|
"loss": 0.186, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 35.97560975609756, |
|
"grad_norm": 11.436528205871582, |
|
"learning_rate": 8.506153846153847e-06, |
|
"loss": 0.1763, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 36.58536585365854, |
|
"grad_norm": 8.337956428527832, |
|
"learning_rate": 8.467692307692308e-06, |
|
"loss": 0.1482, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 37.19512195121951, |
|
"grad_norm": 8.083637237548828, |
|
"learning_rate": 8.42923076923077e-06, |
|
"loss": 0.1515, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 37.80487804878049, |
|
"grad_norm": 8.621879577636719, |
|
"learning_rate": 8.390769230769232e-06, |
|
"loss": 0.1345, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 38.41463414634146, |
|
"grad_norm": 8.559672355651855, |
|
"learning_rate": 8.352307692307692e-06, |
|
"loss": 0.1204, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 39.02439024390244, |
|
"grad_norm": 6.850173473358154, |
|
"learning_rate": 8.313846153846155e-06, |
|
"loss": 0.1208, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 39.63414634146341, |
|
"grad_norm": 8.43338680267334, |
|
"learning_rate": 8.275384615384617e-06, |
|
"loss": 0.1007, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 40.24390243902439, |
|
"grad_norm": 9.348348617553711, |
|
"learning_rate": 8.236923076923077e-06, |
|
"loss": 0.0995, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 40.853658536585364, |
|
"grad_norm": 6.383693695068359, |
|
"learning_rate": 8.19846153846154e-06, |
|
"loss": 0.0956, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 41.46341463414634, |
|
"grad_norm": 6.797173976898193, |
|
"learning_rate": 8.16e-06, |
|
"loss": 0.0863, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 42.073170731707314, |
|
"grad_norm": 5.753118515014648, |
|
"learning_rate": 8.121538461538462e-06, |
|
"loss": 0.0874, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 42.68292682926829, |
|
"grad_norm": 8.442388534545898, |
|
"learning_rate": 8.083076923076924e-06, |
|
"loss": 0.0737, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 43.292682926829265, |
|
"grad_norm": 4.439198017120361, |
|
"learning_rate": 8.044615384615385e-06, |
|
"loss": 0.0733, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 43.90243902439025, |
|
"grad_norm": 6.787204742431641, |
|
"learning_rate": 8.006153846153847e-06, |
|
"loss": 0.0714, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 44.51219512195122, |
|
"grad_norm": 5.47157621383667, |
|
"learning_rate": 7.967692307692309e-06, |
|
"loss": 0.0627, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 45.1219512195122, |
|
"grad_norm": 5.718803882598877, |
|
"learning_rate": 7.92923076923077e-06, |
|
"loss": 0.0666, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 45.73170731707317, |
|
"grad_norm": 4.490105628967285, |
|
"learning_rate": 7.890769230769232e-06, |
|
"loss": 0.055, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 46.34146341463415, |
|
"grad_norm": 5.873692035675049, |
|
"learning_rate": 7.852307692307692e-06, |
|
"loss": 0.0569, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 46.951219512195124, |
|
"grad_norm": 10.3510103225708, |
|
"learning_rate": 7.813846153846154e-06, |
|
"loss": 0.0601, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 47.5609756097561, |
|
"grad_norm": 5.004169464111328, |
|
"learning_rate": 7.775384615384616e-06, |
|
"loss": 0.0524, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 48.170731707317074, |
|
"grad_norm": 3.3900997638702393, |
|
"learning_rate": 7.736923076923077e-06, |
|
"loss": 0.0508, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 48.78048780487805, |
|
"grad_norm": 6.310654640197754, |
|
"learning_rate": 7.698461538461539e-06, |
|
"loss": 0.0518, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 48.78048780487805, |
|
"eval_loss": 4.87410306930542, |
|
"eval_runtime": 62.3697, |
|
"eval_samples_per_second": 2.581, |
|
"eval_steps_per_second": 0.337, |
|
"eval_wer": 94.92795389048992, |
|
"step": 2000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 7000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 171, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.7244213952512e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|