|
{ |
|
"best_metric": 91.78674351585015, |
|
"best_model_checkpoint": "./whisper-tiny-en/checkpoint-7000", |
|
"epoch": 170.73170731707316, |
|
"eval_steps": 1000, |
|
"global_step": 7000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.6097560975609756, |
|
"grad_norm": 76.21949768066406, |
|
"learning_rate": 4.2000000000000006e-07, |
|
"loss": 7.5702, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 1.2195121951219512, |
|
"grad_norm": 72.27240753173828, |
|
"learning_rate": 9.200000000000001e-07, |
|
"loss": 7.1361, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.8292682926829267, |
|
"grad_norm": 27.645973205566406, |
|
"learning_rate": 1.42e-06, |
|
"loss": 6.4445, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 2.4390243902439024, |
|
"grad_norm": 21.712514877319336, |
|
"learning_rate": 1.9200000000000003e-06, |
|
"loss": 5.8148, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 3.048780487804878, |
|
"grad_norm": 19.066011428833008, |
|
"learning_rate": 2.42e-06, |
|
"loss": 5.1377, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 3.658536585365854, |
|
"grad_norm": 14.726506233215332, |
|
"learning_rate": 2.92e-06, |
|
"loss": 4.6, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 4.2682926829268295, |
|
"grad_norm": 13.597981452941895, |
|
"learning_rate": 3.4200000000000007e-06, |
|
"loss": 4.3595, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 4.878048780487805, |
|
"grad_norm": 16.399568557739258, |
|
"learning_rate": 3.920000000000001e-06, |
|
"loss": 4.171, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 5.487804878048781, |
|
"grad_norm": 15.264267921447754, |
|
"learning_rate": 4.42e-06, |
|
"loss": 3.9727, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 6.097560975609756, |
|
"grad_norm": 14.83893871307373, |
|
"learning_rate": 4.92e-06, |
|
"loss": 3.816, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 6.7073170731707314, |
|
"grad_norm": 14.460186004638672, |
|
"learning_rate": 5.420000000000001e-06, |
|
"loss": 3.6683, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 7.317073170731708, |
|
"grad_norm": 13.663064002990723, |
|
"learning_rate": 5.92e-06, |
|
"loss": 3.5392, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 7.926829268292683, |
|
"grad_norm": 14.359073638916016, |
|
"learning_rate": 6.42e-06, |
|
"loss": 3.4807, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 8.536585365853659, |
|
"grad_norm": 12.90373706817627, |
|
"learning_rate": 6.92e-06, |
|
"loss": 3.335, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 9.146341463414634, |
|
"grad_norm": 15.809270858764648, |
|
"learning_rate": 7.420000000000001e-06, |
|
"loss": 3.2196, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 9.75609756097561, |
|
"grad_norm": 14.483990669250488, |
|
"learning_rate": 7.92e-06, |
|
"loss": 3.1224, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 10.365853658536585, |
|
"grad_norm": 15.841504096984863, |
|
"learning_rate": 8.42e-06, |
|
"loss": 2.9858, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 10.975609756097562, |
|
"grad_norm": 16.420425415039062, |
|
"learning_rate": 8.920000000000001e-06, |
|
"loss": 2.9395, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 11.585365853658537, |
|
"grad_norm": 14.952699661254883, |
|
"learning_rate": 9.42e-06, |
|
"loss": 2.7501, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 12.195121951219512, |
|
"grad_norm": 15.765605926513672, |
|
"learning_rate": 9.920000000000002e-06, |
|
"loss": 2.7014, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 12.804878048780488, |
|
"grad_norm": 17.627931594848633, |
|
"learning_rate": 9.967692307692308e-06, |
|
"loss": 2.601, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 13.414634146341463, |
|
"grad_norm": 16.36449432373047, |
|
"learning_rate": 9.92923076923077e-06, |
|
"loss": 2.4699, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 14.024390243902438, |
|
"grad_norm": 15.8140869140625, |
|
"learning_rate": 9.890769230769231e-06, |
|
"loss": 2.3859, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 14.634146341463415, |
|
"grad_norm": 15.983495712280273, |
|
"learning_rate": 9.852307692307693e-06, |
|
"loss": 2.2235, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 15.24390243902439, |
|
"grad_norm": 15.25550651550293, |
|
"learning_rate": 9.813846153846155e-06, |
|
"loss": 2.1583, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 15.853658536585366, |
|
"grad_norm": 18.167879104614258, |
|
"learning_rate": 9.775384615384616e-06, |
|
"loss": 2.0671, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 16.463414634146343, |
|
"grad_norm": 16.86029624938965, |
|
"learning_rate": 9.736923076923078e-06, |
|
"loss": 1.9547, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 17.073170731707318, |
|
"grad_norm": 16.195270538330078, |
|
"learning_rate": 9.698461538461539e-06, |
|
"loss": 1.8877, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 17.682926829268293, |
|
"grad_norm": 17.331113815307617, |
|
"learning_rate": 9.66e-06, |
|
"loss": 1.7336, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 18.29268292682927, |
|
"grad_norm": 18.00438690185547, |
|
"learning_rate": 9.621538461538463e-06, |
|
"loss": 1.6845, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 18.902439024390244, |
|
"grad_norm": 17.1490478515625, |
|
"learning_rate": 9.583076923076923e-06, |
|
"loss": 1.6027, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 19.51219512195122, |
|
"grad_norm": 17.924156188964844, |
|
"learning_rate": 9.544615384615385e-06, |
|
"loss": 1.4647, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 20.121951219512194, |
|
"grad_norm": 17.920028686523438, |
|
"learning_rate": 9.506153846153848e-06, |
|
"loss": 1.4291, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 20.73170731707317, |
|
"grad_norm": 17.502954483032227, |
|
"learning_rate": 9.467692307692308e-06, |
|
"loss": 1.3113, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 21.341463414634145, |
|
"grad_norm": 18.125986099243164, |
|
"learning_rate": 9.42923076923077e-06, |
|
"loss": 1.232, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 21.951219512195124, |
|
"grad_norm": 16.026338577270508, |
|
"learning_rate": 9.39076923076923e-06, |
|
"loss": 1.1903, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 22.5609756097561, |
|
"grad_norm": 16.795467376708984, |
|
"learning_rate": 9.352307692307693e-06, |
|
"loss": 1.0742, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 23.170731707317074, |
|
"grad_norm": 19.86144256591797, |
|
"learning_rate": 9.313846153846155e-06, |
|
"loss": 1.0346, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 23.78048780487805, |
|
"grad_norm": 17.94236183166504, |
|
"learning_rate": 9.275384615384616e-06, |
|
"loss": 0.9589, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 24.390243902439025, |
|
"grad_norm": 17.72712516784668, |
|
"learning_rate": 9.236923076923078e-06, |
|
"loss": 0.8757, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 24.390243902439025, |
|
"eval_loss": 4.123460292816162, |
|
"eval_runtime": 64.6053, |
|
"eval_samples_per_second": 2.492, |
|
"eval_steps_per_second": 0.325, |
|
"eval_wer": 97.89625360230548, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"grad_norm": 31.632688522338867, |
|
"learning_rate": 9.19846153846154e-06, |
|
"loss": 0.8421, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 25.609756097560975, |
|
"grad_norm": 18.91576385498047, |
|
"learning_rate": 9.16e-06, |
|
"loss": 0.7294, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 26.21951219512195, |
|
"grad_norm": 20.973003387451172, |
|
"learning_rate": 9.121538461538463e-06, |
|
"loss": 0.7139, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 26.829268292682926, |
|
"grad_norm": 18.86784553527832, |
|
"learning_rate": 9.083076923076923e-06, |
|
"loss": 0.6638, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 27.4390243902439, |
|
"grad_norm": 14.628901481628418, |
|
"learning_rate": 9.044615384615385e-06, |
|
"loss": 0.605, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 28.048780487804876, |
|
"grad_norm": 13.873974800109863, |
|
"learning_rate": 9.006153846153847e-06, |
|
"loss": 0.5613, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 28.658536585365855, |
|
"grad_norm": 15.853562355041504, |
|
"learning_rate": 8.967692307692308e-06, |
|
"loss": 0.4886, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 29.26829268292683, |
|
"grad_norm": 12.60745906829834, |
|
"learning_rate": 8.92923076923077e-06, |
|
"loss": 0.4652, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 29.878048780487806, |
|
"grad_norm": 17.16914176940918, |
|
"learning_rate": 8.890769230769232e-06, |
|
"loss": 0.4324, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 30.48780487804878, |
|
"grad_norm": 11.67048168182373, |
|
"learning_rate": 8.852307692307693e-06, |
|
"loss": 0.3793, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 31.097560975609756, |
|
"grad_norm": 10.136366844177246, |
|
"learning_rate": 8.813846153846155e-06, |
|
"loss": 0.3689, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 31.70731707317073, |
|
"grad_norm": 15.549346923828125, |
|
"learning_rate": 8.775384615384615e-06, |
|
"loss": 0.3165, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 32.31707317073171, |
|
"grad_norm": 10.75722599029541, |
|
"learning_rate": 8.736923076923077e-06, |
|
"loss": 0.2891, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 32.926829268292686, |
|
"grad_norm": 10.687921524047852, |
|
"learning_rate": 8.69846153846154e-06, |
|
"loss": 0.2786, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 33.53658536585366, |
|
"grad_norm": 15.220375061035156, |
|
"learning_rate": 8.66e-06, |
|
"loss": 0.2349, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 34.146341463414636, |
|
"grad_norm": 9.226957321166992, |
|
"learning_rate": 8.621538461538462e-06, |
|
"loss": 0.2279, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 34.75609756097561, |
|
"grad_norm": 10.428820610046387, |
|
"learning_rate": 8.583076923076924e-06, |
|
"loss": 0.2022, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 35.36585365853659, |
|
"grad_norm": 10.70584487915039, |
|
"learning_rate": 8.544615384615385e-06, |
|
"loss": 0.186, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 35.97560975609756, |
|
"grad_norm": 11.436528205871582, |
|
"learning_rate": 8.506153846153847e-06, |
|
"loss": 0.1763, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 36.58536585365854, |
|
"grad_norm": 8.337956428527832, |
|
"learning_rate": 8.467692307692308e-06, |
|
"loss": 0.1482, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 37.19512195121951, |
|
"grad_norm": 8.083637237548828, |
|
"learning_rate": 8.42923076923077e-06, |
|
"loss": 0.1515, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 37.80487804878049, |
|
"grad_norm": 8.621879577636719, |
|
"learning_rate": 8.390769230769232e-06, |
|
"loss": 0.1345, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 38.41463414634146, |
|
"grad_norm": 8.559672355651855, |
|
"learning_rate": 8.352307692307692e-06, |
|
"loss": 0.1204, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 39.02439024390244, |
|
"grad_norm": 6.850173473358154, |
|
"learning_rate": 8.313846153846155e-06, |
|
"loss": 0.1208, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 39.63414634146341, |
|
"grad_norm": 8.43338680267334, |
|
"learning_rate": 8.275384615384617e-06, |
|
"loss": 0.1007, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 40.24390243902439, |
|
"grad_norm": 9.348348617553711, |
|
"learning_rate": 8.236923076923077e-06, |
|
"loss": 0.0995, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 40.853658536585364, |
|
"grad_norm": 6.383693695068359, |
|
"learning_rate": 8.19846153846154e-06, |
|
"loss": 0.0956, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 41.46341463414634, |
|
"grad_norm": 6.797173976898193, |
|
"learning_rate": 8.16e-06, |
|
"loss": 0.0863, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 42.073170731707314, |
|
"grad_norm": 5.753118515014648, |
|
"learning_rate": 8.121538461538462e-06, |
|
"loss": 0.0874, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 42.68292682926829, |
|
"grad_norm": 8.442388534545898, |
|
"learning_rate": 8.083076923076924e-06, |
|
"loss": 0.0737, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 43.292682926829265, |
|
"grad_norm": 4.439198017120361, |
|
"learning_rate": 8.044615384615385e-06, |
|
"loss": 0.0733, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 43.90243902439025, |
|
"grad_norm": 6.787204742431641, |
|
"learning_rate": 8.006153846153847e-06, |
|
"loss": 0.0714, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 44.51219512195122, |
|
"grad_norm": 5.47157621383667, |
|
"learning_rate": 7.967692307692309e-06, |
|
"loss": 0.0627, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 45.1219512195122, |
|
"grad_norm": 5.718803882598877, |
|
"learning_rate": 7.92923076923077e-06, |
|
"loss": 0.0666, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 45.73170731707317, |
|
"grad_norm": 4.490105628967285, |
|
"learning_rate": 7.890769230769232e-06, |
|
"loss": 0.055, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 46.34146341463415, |
|
"grad_norm": 5.873692035675049, |
|
"learning_rate": 7.852307692307692e-06, |
|
"loss": 0.0569, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 46.951219512195124, |
|
"grad_norm": 10.3510103225708, |
|
"learning_rate": 7.813846153846154e-06, |
|
"loss": 0.0601, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 47.5609756097561, |
|
"grad_norm": 5.004169464111328, |
|
"learning_rate": 7.775384615384616e-06, |
|
"loss": 0.0524, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 48.170731707317074, |
|
"grad_norm": 3.3900997638702393, |
|
"learning_rate": 7.736923076923077e-06, |
|
"loss": 0.0508, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 48.78048780487805, |
|
"grad_norm": 6.310654640197754, |
|
"learning_rate": 7.698461538461539e-06, |
|
"loss": 0.0518, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 48.78048780487805, |
|
"eval_loss": 4.87410306930542, |
|
"eval_runtime": 62.3697, |
|
"eval_samples_per_second": 2.581, |
|
"eval_steps_per_second": 0.337, |
|
"eval_wer": 94.92795389048992, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 49.390243902439025, |
|
"grad_norm": 5.030487060546875, |
|
"learning_rate": 7.660000000000001e-06, |
|
"loss": 0.0447, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"grad_norm": 9.628267288208008, |
|
"learning_rate": 7.6215384615384625e-06, |
|
"loss": 0.048, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 50.609756097560975, |
|
"grad_norm": 9.77782917022705, |
|
"learning_rate": 7.583076923076924e-06, |
|
"loss": 0.0428, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 51.21951219512195, |
|
"grad_norm": 5.163978099822998, |
|
"learning_rate": 7.544615384615386e-06, |
|
"loss": 0.0444, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 51.829268292682926, |
|
"grad_norm": 3.368098497390747, |
|
"learning_rate": 7.5061538461538465e-06, |
|
"loss": 0.0421, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 52.4390243902439, |
|
"grad_norm": 2.6525158882141113, |
|
"learning_rate": 7.467692307692308e-06, |
|
"loss": 0.0381, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 53.048780487804876, |
|
"grad_norm": 5.2876410484313965, |
|
"learning_rate": 7.42923076923077e-06, |
|
"loss": 0.0421, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 53.65853658536585, |
|
"grad_norm": 2.5063223838806152, |
|
"learning_rate": 7.390769230769231e-06, |
|
"loss": 0.0379, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 54.26829268292683, |
|
"grad_norm": 7.786949157714844, |
|
"learning_rate": 7.3523076923076935e-06, |
|
"loss": 0.0365, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 54.8780487804878, |
|
"grad_norm": 4.044300556182861, |
|
"learning_rate": 7.313846153846155e-06, |
|
"loss": 0.0359, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 55.48780487804878, |
|
"grad_norm": 4.029970169067383, |
|
"learning_rate": 7.275384615384616e-06, |
|
"loss": 0.0348, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 56.09756097560975, |
|
"grad_norm": 2.0436642169952393, |
|
"learning_rate": 7.236923076923078e-06, |
|
"loss": 0.0366, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 56.707317073170735, |
|
"grad_norm": 5.075358867645264, |
|
"learning_rate": 7.198461538461539e-06, |
|
"loss": 0.0345, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 57.31707317073171, |
|
"grad_norm": 1.7547271251678467, |
|
"learning_rate": 7.16e-06, |
|
"loss": 0.0304, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 57.926829268292686, |
|
"grad_norm": 3.1869213581085205, |
|
"learning_rate": 7.121538461538462e-06, |
|
"loss": 0.0354, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 58.53658536585366, |
|
"grad_norm": 2.8294670581817627, |
|
"learning_rate": 7.0830769230769235e-06, |
|
"loss": 0.0281, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 59.146341463414636, |
|
"grad_norm": 2.4291458129882812, |
|
"learning_rate": 7.044615384615386e-06, |
|
"loss": 0.0334, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 59.75609756097561, |
|
"grad_norm": 3.106226921081543, |
|
"learning_rate": 7.006153846153847e-06, |
|
"loss": 0.0306, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 60.36585365853659, |
|
"grad_norm": 2.7606558799743652, |
|
"learning_rate": 6.967692307692308e-06, |
|
"loss": 0.0311, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 60.97560975609756, |
|
"grad_norm": 3.0400702953338623, |
|
"learning_rate": 6.9292307692307705e-06, |
|
"loss": 0.0305, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 61.58536585365854, |
|
"grad_norm": 3.5321948528289795, |
|
"learning_rate": 6.890769230769231e-06, |
|
"loss": 0.0275, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 62.19512195121951, |
|
"grad_norm": 1.7394859790802002, |
|
"learning_rate": 6.852307692307692e-06, |
|
"loss": 0.0325, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 62.80487804878049, |
|
"grad_norm": 3.530212163925171, |
|
"learning_rate": 6.8138461538461545e-06, |
|
"loss": 0.0285, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 63.41463414634146, |
|
"grad_norm": 1.9453763961791992, |
|
"learning_rate": 6.775384615384616e-06, |
|
"loss": 0.0255, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 64.02439024390245, |
|
"grad_norm": 2.1866273880004883, |
|
"learning_rate": 6.736923076923078e-06, |
|
"loss": 0.0311, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 64.63414634146342, |
|
"grad_norm": 2.9744958877563477, |
|
"learning_rate": 6.698461538461539e-06, |
|
"loss": 0.0234, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 65.2439024390244, |
|
"grad_norm": 1.6761668920516968, |
|
"learning_rate": 6.660000000000001e-06, |
|
"loss": 0.027, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 65.85365853658537, |
|
"grad_norm": 1.5864598751068115, |
|
"learning_rate": 6.621538461538463e-06, |
|
"loss": 0.0238, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 66.46341463414635, |
|
"grad_norm": 2.605698347091675, |
|
"learning_rate": 6.583076923076923e-06, |
|
"loss": 0.0236, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 67.07317073170732, |
|
"grad_norm": 1.8222204446792603, |
|
"learning_rate": 6.5446153846153846e-06, |
|
"loss": 0.0299, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 67.6829268292683, |
|
"grad_norm": 2.6547422409057617, |
|
"learning_rate": 6.506153846153847e-06, |
|
"loss": 0.0203, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 68.29268292682927, |
|
"grad_norm": 1.494598627090454, |
|
"learning_rate": 6.467692307692308e-06, |
|
"loss": 0.0268, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 68.90243902439025, |
|
"grad_norm": 1.973913311958313, |
|
"learning_rate": 6.42923076923077e-06, |
|
"loss": 0.0235, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 69.51219512195122, |
|
"grad_norm": 1.9251242876052856, |
|
"learning_rate": 6.3907692307692315e-06, |
|
"loss": 0.0232, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 70.1219512195122, |
|
"grad_norm": 2.214128017425537, |
|
"learning_rate": 6.352307692307693e-06, |
|
"loss": 0.0239, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 70.73170731707317, |
|
"grad_norm": 2.427274703979492, |
|
"learning_rate": 6.313846153846155e-06, |
|
"loss": 0.0223, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 71.34146341463415, |
|
"grad_norm": 2.7526907920837402, |
|
"learning_rate": 6.2753846153846155e-06, |
|
"loss": 0.0215, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 71.95121951219512, |
|
"grad_norm": 2.739189386367798, |
|
"learning_rate": 6.236923076923077e-06, |
|
"loss": 0.0243, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 72.5609756097561, |
|
"grad_norm": 1.61655592918396, |
|
"learning_rate": 6.198461538461539e-06, |
|
"loss": 0.0188, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 73.17073170731707, |
|
"grad_norm": 2.1026246547698975, |
|
"learning_rate": 6.16e-06, |
|
"loss": 0.0234, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 73.17073170731707, |
|
"eval_loss": 5.154399871826172, |
|
"eval_runtime": 55.0306, |
|
"eval_samples_per_second": 2.926, |
|
"eval_steps_per_second": 0.382, |
|
"eval_wer": 93.11239193083574, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 73.78048780487805, |
|
"grad_norm": 2.770653247833252, |
|
"learning_rate": 6.1215384615384625e-06, |
|
"loss": 0.0206, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 74.39024390243902, |
|
"grad_norm": 1.6438905000686646, |
|
"learning_rate": 6.083076923076924e-06, |
|
"loss": 0.019, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"grad_norm": 1.086746096611023, |
|
"learning_rate": 6.044615384615385e-06, |
|
"loss": 0.0229, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 75.60975609756098, |
|
"grad_norm": 2.3001186847686768, |
|
"learning_rate": 6.006153846153847e-06, |
|
"loss": 0.0188, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 76.21951219512195, |
|
"grad_norm": 1.9896184206008911, |
|
"learning_rate": 5.967692307692308e-06, |
|
"loss": 0.0208, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 76.82926829268293, |
|
"grad_norm": 1.5976531505584717, |
|
"learning_rate": 5.929230769230769e-06, |
|
"loss": 0.0206, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 77.4390243902439, |
|
"grad_norm": 2.1733193397521973, |
|
"learning_rate": 5.890769230769231e-06, |
|
"loss": 0.0194, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 78.04878048780488, |
|
"grad_norm": 1.85785710811615, |
|
"learning_rate": 5.8523076923076926e-06, |
|
"loss": 0.0206, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 78.65853658536585, |
|
"grad_norm": 3.4088730812072754, |
|
"learning_rate": 5.813846153846155e-06, |
|
"loss": 0.0174, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 79.26829268292683, |
|
"grad_norm": 2.442415237426758, |
|
"learning_rate": 5.775384615384616e-06, |
|
"loss": 0.0203, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 79.8780487804878, |
|
"grad_norm": 2.473065137863159, |
|
"learning_rate": 5.736923076923077e-06, |
|
"loss": 0.0196, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 80.48780487804878, |
|
"grad_norm": 1.7116321325302124, |
|
"learning_rate": 5.6984615384615395e-06, |
|
"loss": 0.0184, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 81.09756097560975, |
|
"grad_norm": 0.6662659645080566, |
|
"learning_rate": 5.66e-06, |
|
"loss": 0.0191, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 81.70731707317073, |
|
"grad_norm": 1.450646162033081, |
|
"learning_rate": 5.621538461538461e-06, |
|
"loss": 0.0173, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 82.3170731707317, |
|
"grad_norm": 1.3656872510910034, |
|
"learning_rate": 5.5830769230769235e-06, |
|
"loss": 0.0161, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 82.92682926829268, |
|
"grad_norm": 2.8566460609436035, |
|
"learning_rate": 5.544615384615385e-06, |
|
"loss": 0.0217, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 83.53658536585365, |
|
"grad_norm": 2.3349201679229736, |
|
"learning_rate": 5.506153846153847e-06, |
|
"loss": 0.0153, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 84.14634146341463, |
|
"grad_norm": 1.6854404211044312, |
|
"learning_rate": 5.467692307692308e-06, |
|
"loss": 0.0189, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 84.7560975609756, |
|
"grad_norm": 2.3685340881347656, |
|
"learning_rate": 5.42923076923077e-06, |
|
"loss": 0.0188, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 85.36585365853658, |
|
"grad_norm": 1.909439206123352, |
|
"learning_rate": 5.390769230769232e-06, |
|
"loss": 0.0163, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 85.97560975609755, |
|
"grad_norm": 2.5704216957092285, |
|
"learning_rate": 5.352307692307692e-06, |
|
"loss": 0.0186, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 86.58536585365853, |
|
"grad_norm": 2.1618387699127197, |
|
"learning_rate": 5.313846153846154e-06, |
|
"loss": 0.015, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 87.1951219512195, |
|
"grad_norm": 1.4021707773208618, |
|
"learning_rate": 5.275384615384616e-06, |
|
"loss": 0.0191, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 87.8048780487805, |
|
"grad_norm": 1.4248768091201782, |
|
"learning_rate": 5.236923076923077e-06, |
|
"loss": 0.0181, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 88.41463414634147, |
|
"grad_norm": 1.5336748361587524, |
|
"learning_rate": 5.198461538461539e-06, |
|
"loss": 0.0152, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 89.02439024390245, |
|
"grad_norm": 1.369664192199707, |
|
"learning_rate": 5.1600000000000006e-06, |
|
"loss": 0.0193, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 89.63414634146342, |
|
"grad_norm": 2.177635669708252, |
|
"learning_rate": 5.121538461538462e-06, |
|
"loss": 0.0155, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 90.2439024390244, |
|
"grad_norm": 1.5026379823684692, |
|
"learning_rate": 5.083076923076924e-06, |
|
"loss": 0.017, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 90.85365853658537, |
|
"grad_norm": 1.1004050970077515, |
|
"learning_rate": 5.044615384615385e-06, |
|
"loss": 0.0157, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 91.46341463414635, |
|
"grad_norm": 1.5270745754241943, |
|
"learning_rate": 5.006153846153846e-06, |
|
"loss": 0.0148, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 92.07317073170732, |
|
"grad_norm": 2.0228114128112793, |
|
"learning_rate": 4.967692307692308e-06, |
|
"loss": 0.0198, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 92.6829268292683, |
|
"grad_norm": 2.0614633560180664, |
|
"learning_rate": 4.929230769230769e-06, |
|
"loss": 0.0136, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 93.29268292682927, |
|
"grad_norm": 0.9733468294143677, |
|
"learning_rate": 4.8907692307692315e-06, |
|
"loss": 0.0161, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 93.90243902439025, |
|
"grad_norm": 2.1101596355438232, |
|
"learning_rate": 4.852307692307693e-06, |
|
"loss": 0.0165, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 94.51219512195122, |
|
"grad_norm": 1.4769028425216675, |
|
"learning_rate": 4.813846153846154e-06, |
|
"loss": 0.015, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 95.1219512195122, |
|
"grad_norm": 2.2220544815063477, |
|
"learning_rate": 4.7753846153846155e-06, |
|
"loss": 0.019, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 95.73170731707317, |
|
"grad_norm": 1.2263191938400269, |
|
"learning_rate": 4.736923076923078e-06, |
|
"loss": 0.0145, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 96.34146341463415, |
|
"grad_norm": 1.1551388502120972, |
|
"learning_rate": 4.698461538461539e-06, |
|
"loss": 0.0146, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 96.95121951219512, |
|
"grad_norm": 1.4800796508789062, |
|
"learning_rate": 4.66e-06, |
|
"loss": 0.0167, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 97.5609756097561, |
|
"grad_norm": 1.9003641605377197, |
|
"learning_rate": 4.621538461538462e-06, |
|
"loss": 0.0148, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 97.5609756097561, |
|
"eval_loss": 5.350304126739502, |
|
"eval_runtime": 47.6459, |
|
"eval_samples_per_second": 3.379, |
|
"eval_steps_per_second": 0.441, |
|
"eval_wer": 93.42939481268012, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 98.17073170731707, |
|
"grad_norm": 1.1765137910842896, |
|
"learning_rate": 4.583076923076924e-06, |
|
"loss": 0.0162, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 98.78048780487805, |
|
"grad_norm": 2.274876594543457, |
|
"learning_rate": 4.544615384615385e-06, |
|
"loss": 0.0152, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 99.39024390243902, |
|
"grad_norm": 1.4429962635040283, |
|
"learning_rate": 4.506153846153846e-06, |
|
"loss": 0.0141, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"grad_norm": 0.26069772243499756, |
|
"learning_rate": 4.467692307692308e-06, |
|
"loss": 0.0161, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 100.60975609756098, |
|
"grad_norm": 2.214935302734375, |
|
"learning_rate": 4.42923076923077e-06, |
|
"loss": 0.0129, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 101.21951219512195, |
|
"grad_norm": 0.8107747435569763, |
|
"learning_rate": 4.390769230769231e-06, |
|
"loss": 0.0156, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 101.82926829268293, |
|
"grad_norm": 2.0557289123535156, |
|
"learning_rate": 4.3523076923076925e-06, |
|
"loss": 0.0148, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 102.4390243902439, |
|
"grad_norm": 1.3531146049499512, |
|
"learning_rate": 4.313846153846154e-06, |
|
"loss": 0.0153, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 103.04878048780488, |
|
"grad_norm": 0.9748263359069824, |
|
"learning_rate": 4.275384615384616e-06, |
|
"loss": 0.0169, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 103.65853658536585, |
|
"grad_norm": 2.0307469367980957, |
|
"learning_rate": 4.236923076923077e-06, |
|
"loss": 0.0134, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 104.26829268292683, |
|
"grad_norm": 1.2394931316375732, |
|
"learning_rate": 4.198461538461539e-06, |
|
"loss": 0.0153, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 104.8780487804878, |
|
"grad_norm": 1.6185051202774048, |
|
"learning_rate": 4.16e-06, |
|
"loss": 0.0149, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 105.48780487804878, |
|
"grad_norm": 1.6088656187057495, |
|
"learning_rate": 4.121538461538462e-06, |
|
"loss": 0.0145, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 106.09756097560975, |
|
"grad_norm": 1.5969892740249634, |
|
"learning_rate": 4.0830769230769235e-06, |
|
"loss": 0.014, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 106.70731707317073, |
|
"grad_norm": 2.1902647018432617, |
|
"learning_rate": 4.044615384615385e-06, |
|
"loss": 0.014, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 107.3170731707317, |
|
"grad_norm": 1.7784631252288818, |
|
"learning_rate": 4.006153846153846e-06, |
|
"loss": 0.0137, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 107.92682926829268, |
|
"grad_norm": 1.5935354232788086, |
|
"learning_rate": 3.967692307692308e-06, |
|
"loss": 0.0144, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 108.53658536585365, |
|
"grad_norm": 1.9656915664672852, |
|
"learning_rate": 3.92923076923077e-06, |
|
"loss": 0.0146, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 109.14634146341463, |
|
"grad_norm": 1.8089748620986938, |
|
"learning_rate": 3.890769230769231e-06, |
|
"loss": 0.0153, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 109.7560975609756, |
|
"grad_norm": 1.1903936862945557, |
|
"learning_rate": 3.852307692307692e-06, |
|
"loss": 0.0125, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 110.36585365853658, |
|
"grad_norm": 1.5767234563827515, |
|
"learning_rate": 3.813846153846154e-06, |
|
"loss": 0.0131, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 110.97560975609755, |
|
"grad_norm": 1.7390503883361816, |
|
"learning_rate": 3.7753846153846157e-06, |
|
"loss": 0.0142, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 111.58536585365853, |
|
"grad_norm": 1.5686002969741821, |
|
"learning_rate": 3.7369230769230775e-06, |
|
"loss": 0.0101, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 112.1951219512195, |
|
"grad_norm": 1.2499815225601196, |
|
"learning_rate": 3.6984615384615384e-06, |
|
"loss": 0.0154, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 112.8048780487805, |
|
"grad_norm": 1.759077548980713, |
|
"learning_rate": 3.66e-06, |
|
"loss": 0.013, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 113.41463414634147, |
|
"grad_norm": 1.3114948272705078, |
|
"learning_rate": 3.621538461538462e-06, |
|
"loss": 0.0143, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 114.02439024390245, |
|
"grad_norm": 1.6688133478164673, |
|
"learning_rate": 3.5830769230769236e-06, |
|
"loss": 0.0149, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 114.63414634146342, |
|
"grad_norm": 1.8932459354400635, |
|
"learning_rate": 3.5446153846153845e-06, |
|
"loss": 0.0132, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 115.2439024390244, |
|
"grad_norm": 1.6125946044921875, |
|
"learning_rate": 3.5061538461538462e-06, |
|
"loss": 0.013, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 115.85365853658537, |
|
"grad_norm": 1.8814420700073242, |
|
"learning_rate": 3.467692307692308e-06, |
|
"loss": 0.0131, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 116.46341463414635, |
|
"grad_norm": 0.7727325558662415, |
|
"learning_rate": 3.4292307692307697e-06, |
|
"loss": 0.0122, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 117.07317073170732, |
|
"grad_norm": 1.364339828491211, |
|
"learning_rate": 3.3907692307692306e-06, |
|
"loss": 0.0129, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 117.6829268292683, |
|
"grad_norm": 1.7498785257339478, |
|
"learning_rate": 3.3523076923076924e-06, |
|
"loss": 0.0117, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 118.29268292682927, |
|
"grad_norm": 1.0430307388305664, |
|
"learning_rate": 3.313846153846154e-06, |
|
"loss": 0.0145, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 118.90243902439025, |
|
"grad_norm": 1.16744863986969, |
|
"learning_rate": 3.275384615384616e-06, |
|
"loss": 0.0129, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 119.51219512195122, |
|
"grad_norm": 1.569304347038269, |
|
"learning_rate": 3.2369230769230768e-06, |
|
"loss": 0.012, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 120.1219512195122, |
|
"grad_norm": 0.540378749370575, |
|
"learning_rate": 3.1984615384615385e-06, |
|
"loss": 0.0136, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 120.73170731707317, |
|
"grad_norm": 1.8407570123672485, |
|
"learning_rate": 3.1600000000000002e-06, |
|
"loss": 0.0128, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 121.34146341463415, |
|
"grad_norm": 0.7160439491271973, |
|
"learning_rate": 3.121538461538462e-06, |
|
"loss": 0.0127, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 121.95121951219512, |
|
"grad_norm": 1.7198063135147095, |
|
"learning_rate": 3.083076923076923e-06, |
|
"loss": 0.0141, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 121.95121951219512, |
|
"eval_loss": 5.409893035888672, |
|
"eval_runtime": 49.6915, |
|
"eval_samples_per_second": 3.24, |
|
"eval_steps_per_second": 0.423, |
|
"eval_wer": 92.36311239193084, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 122.5609756097561, |
|
"grad_norm": 1.909258246421814, |
|
"learning_rate": 3.0446153846153846e-06, |
|
"loss": 0.0119, |
|
"step": 5025 |
|
}, |
|
{ |
|
"epoch": 123.17073170731707, |
|
"grad_norm": 2.1651320457458496, |
|
"learning_rate": 3.0061538461538464e-06, |
|
"loss": 0.0119, |
|
"step": 5050 |
|
}, |
|
{ |
|
"epoch": 123.78048780487805, |
|
"grad_norm": 0.9315907955169678, |
|
"learning_rate": 2.967692307692308e-06, |
|
"loss": 0.0126, |
|
"step": 5075 |
|
}, |
|
{ |
|
"epoch": 124.39024390243902, |
|
"grad_norm": 1.071990728378296, |
|
"learning_rate": 2.929230769230769e-06, |
|
"loss": 0.01, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 125.0, |
|
"grad_norm": 4.327281475067139, |
|
"learning_rate": 2.8907692307692308e-06, |
|
"loss": 0.0145, |
|
"step": 5125 |
|
}, |
|
{ |
|
"epoch": 125.60975609756098, |
|
"grad_norm": 1.3406645059585571, |
|
"learning_rate": 2.8523076923076925e-06, |
|
"loss": 0.0112, |
|
"step": 5150 |
|
}, |
|
{ |
|
"epoch": 126.21951219512195, |
|
"grad_norm": 1.3894404172897339, |
|
"learning_rate": 2.8138461538461542e-06, |
|
"loss": 0.0131, |
|
"step": 5175 |
|
}, |
|
{ |
|
"epoch": 126.82926829268293, |
|
"grad_norm": 1.4942810535430908, |
|
"learning_rate": 2.775384615384615e-06, |
|
"loss": 0.0126, |
|
"step": 5200 |
|
}, |
|
{ |
|
"epoch": 127.4390243902439, |
|
"grad_norm": 1.401851773262024, |
|
"learning_rate": 2.736923076923077e-06, |
|
"loss": 0.0113, |
|
"step": 5225 |
|
}, |
|
{ |
|
"epoch": 128.0487804878049, |
|
"grad_norm": 1.1281338930130005, |
|
"learning_rate": 2.6984615384615386e-06, |
|
"loss": 0.0126, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 128.65853658536585, |
|
"grad_norm": 1.547263264656067, |
|
"learning_rate": 2.6600000000000004e-06, |
|
"loss": 0.0114, |
|
"step": 5275 |
|
}, |
|
{ |
|
"epoch": 129.26829268292684, |
|
"grad_norm": 0.9242371916770935, |
|
"learning_rate": 2.6215384615384617e-06, |
|
"loss": 0.0119, |
|
"step": 5300 |
|
}, |
|
{ |
|
"epoch": 129.8780487804878, |
|
"grad_norm": 1.8874177932739258, |
|
"learning_rate": 2.583076923076923e-06, |
|
"loss": 0.012, |
|
"step": 5325 |
|
}, |
|
{ |
|
"epoch": 130.4878048780488, |
|
"grad_norm": 1.1711006164550781, |
|
"learning_rate": 2.5446153846153848e-06, |
|
"loss": 0.0135, |
|
"step": 5350 |
|
}, |
|
{ |
|
"epoch": 131.09756097560975, |
|
"grad_norm": 1.4041301012039185, |
|
"learning_rate": 2.5061538461538465e-06, |
|
"loss": 0.0117, |
|
"step": 5375 |
|
}, |
|
{ |
|
"epoch": 131.70731707317074, |
|
"grad_norm": 1.607003092765808, |
|
"learning_rate": 2.467692307692308e-06, |
|
"loss": 0.0108, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 132.3170731707317, |
|
"grad_norm": 1.199080467224121, |
|
"learning_rate": 2.4292307692307696e-06, |
|
"loss": 0.0131, |
|
"step": 5425 |
|
}, |
|
{ |
|
"epoch": 132.9268292682927, |
|
"grad_norm": 1.4485121965408325, |
|
"learning_rate": 2.390769230769231e-06, |
|
"loss": 0.0114, |
|
"step": 5450 |
|
}, |
|
{ |
|
"epoch": 133.53658536585365, |
|
"grad_norm": 0.8223636150360107, |
|
"learning_rate": 2.3523076923076926e-06, |
|
"loss": 0.0104, |
|
"step": 5475 |
|
}, |
|
{ |
|
"epoch": 134.14634146341464, |
|
"grad_norm": 1.429047703742981, |
|
"learning_rate": 2.313846153846154e-06, |
|
"loss": 0.0122, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 134.7560975609756, |
|
"grad_norm": 1.8713955879211426, |
|
"learning_rate": 2.2753846153846157e-06, |
|
"loss": 0.0106, |
|
"step": 5525 |
|
}, |
|
{ |
|
"epoch": 135.3658536585366, |
|
"grad_norm": 1.4633824825286865, |
|
"learning_rate": 2.236923076923077e-06, |
|
"loss": 0.0135, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 135.97560975609755, |
|
"grad_norm": 1.2472089529037476, |
|
"learning_rate": 2.1984615384615388e-06, |
|
"loss": 0.0116, |
|
"step": 5575 |
|
}, |
|
{ |
|
"epoch": 136.58536585365854, |
|
"grad_norm": 2.140740156173706, |
|
"learning_rate": 2.16e-06, |
|
"loss": 0.0112, |
|
"step": 5600 |
|
}, |
|
{ |
|
"epoch": 137.1951219512195, |
|
"grad_norm": 0.7846082448959351, |
|
"learning_rate": 2.121538461538462e-06, |
|
"loss": 0.0123, |
|
"step": 5625 |
|
}, |
|
{ |
|
"epoch": 137.8048780487805, |
|
"grad_norm": 1.6665152311325073, |
|
"learning_rate": 2.083076923076923e-06, |
|
"loss": 0.0116, |
|
"step": 5650 |
|
}, |
|
{ |
|
"epoch": 138.41463414634146, |
|
"grad_norm": 1.8274633884429932, |
|
"learning_rate": 2.044615384615385e-06, |
|
"loss": 0.0109, |
|
"step": 5675 |
|
}, |
|
{ |
|
"epoch": 139.02439024390245, |
|
"grad_norm": 1.6559218168258667, |
|
"learning_rate": 2.006153846153846e-06, |
|
"loss": 0.0115, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 139.6341463414634, |
|
"grad_norm": 1.2243770360946655, |
|
"learning_rate": 1.967692307692308e-06, |
|
"loss": 0.0099, |
|
"step": 5725 |
|
}, |
|
{ |
|
"epoch": 140.2439024390244, |
|
"grad_norm": 1.5521693229675293, |
|
"learning_rate": 1.9292307692307693e-06, |
|
"loss": 0.0116, |
|
"step": 5750 |
|
}, |
|
{ |
|
"epoch": 140.85365853658536, |
|
"grad_norm": 1.4749764204025269, |
|
"learning_rate": 1.890769230769231e-06, |
|
"loss": 0.0103, |
|
"step": 5775 |
|
}, |
|
{ |
|
"epoch": 141.46341463414635, |
|
"grad_norm": 1.0785146951675415, |
|
"learning_rate": 1.8523076923076923e-06, |
|
"loss": 0.0117, |
|
"step": 5800 |
|
}, |
|
{ |
|
"epoch": 142.0731707317073, |
|
"grad_norm": 1.3806939125061035, |
|
"learning_rate": 1.813846153846154e-06, |
|
"loss": 0.0125, |
|
"step": 5825 |
|
}, |
|
{ |
|
"epoch": 142.6829268292683, |
|
"grad_norm": 1.8959143161773682, |
|
"learning_rate": 1.7753846153846154e-06, |
|
"loss": 0.0101, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 143.29268292682926, |
|
"grad_norm": 1.7386155128479004, |
|
"learning_rate": 1.7369230769230771e-06, |
|
"loss": 0.0112, |
|
"step": 5875 |
|
}, |
|
{ |
|
"epoch": 143.90243902439025, |
|
"grad_norm": 1.7782413959503174, |
|
"learning_rate": 1.6984615384615385e-06, |
|
"loss": 0.0122, |
|
"step": 5900 |
|
}, |
|
{ |
|
"epoch": 144.5121951219512, |
|
"grad_norm": 1.167022466659546, |
|
"learning_rate": 1.6600000000000002e-06, |
|
"loss": 0.0096, |
|
"step": 5925 |
|
}, |
|
{ |
|
"epoch": 145.1219512195122, |
|
"grad_norm": 1.1919440031051636, |
|
"learning_rate": 1.6215384615384615e-06, |
|
"loss": 0.0113, |
|
"step": 5950 |
|
}, |
|
{ |
|
"epoch": 145.73170731707316, |
|
"grad_norm": 1.8381915092468262, |
|
"learning_rate": 1.5830769230769233e-06, |
|
"loss": 0.0112, |
|
"step": 5975 |
|
}, |
|
{ |
|
"epoch": 146.34146341463415, |
|
"grad_norm": 1.8982946872711182, |
|
"learning_rate": 1.5446153846153846e-06, |
|
"loss": 0.0112, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 146.34146341463415, |
|
"eval_loss": 5.48370361328125, |
|
"eval_runtime": 58.2862, |
|
"eval_samples_per_second": 2.762, |
|
"eval_steps_per_second": 0.36, |
|
"eval_wer": 92.44956772334294, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 146.9512195121951, |
|
"grad_norm": 1.5436760187149048, |
|
"learning_rate": 1.5061538461538463e-06, |
|
"loss": 0.0107, |
|
"step": 6025 |
|
}, |
|
{ |
|
"epoch": 147.5609756097561, |
|
"grad_norm": 1.2570878267288208, |
|
"learning_rate": 1.4676923076923076e-06, |
|
"loss": 0.0104, |
|
"step": 6050 |
|
}, |
|
{ |
|
"epoch": 148.17073170731706, |
|
"grad_norm": 0.9468504190444946, |
|
"learning_rate": 1.4292307692307694e-06, |
|
"loss": 0.0117, |
|
"step": 6075 |
|
}, |
|
{ |
|
"epoch": 148.78048780487805, |
|
"grad_norm": 1.4775031805038452, |
|
"learning_rate": 1.3907692307692307e-06, |
|
"loss": 0.0106, |
|
"step": 6100 |
|
}, |
|
{ |
|
"epoch": 149.390243902439, |
|
"grad_norm": 0.8422410488128662, |
|
"learning_rate": 1.3523076923076925e-06, |
|
"loss": 0.0099, |
|
"step": 6125 |
|
}, |
|
{ |
|
"epoch": 150.0, |
|
"grad_norm": 5.131112575531006, |
|
"learning_rate": 1.3138461538461538e-06, |
|
"loss": 0.0119, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 150.609756097561, |
|
"grad_norm": 1.6066230535507202, |
|
"learning_rate": 1.2753846153846155e-06, |
|
"loss": 0.0098, |
|
"step": 6175 |
|
}, |
|
{ |
|
"epoch": 151.21951219512195, |
|
"grad_norm": 0.9405758380889893, |
|
"learning_rate": 1.236923076923077e-06, |
|
"loss": 0.0114, |
|
"step": 6200 |
|
}, |
|
{ |
|
"epoch": 151.82926829268294, |
|
"grad_norm": 1.5420202016830444, |
|
"learning_rate": 1.1984615384615386e-06, |
|
"loss": 0.0104, |
|
"step": 6225 |
|
}, |
|
{ |
|
"epoch": 152.4390243902439, |
|
"grad_norm": 1.5503705739974976, |
|
"learning_rate": 1.1600000000000001e-06, |
|
"loss": 0.0111, |
|
"step": 6250 |
|
}, |
|
{ |
|
"epoch": 153.0487804878049, |
|
"grad_norm": 1.3805664777755737, |
|
"learning_rate": 1.1215384615384616e-06, |
|
"loss": 0.0095, |
|
"step": 6275 |
|
}, |
|
{ |
|
"epoch": 153.65853658536585, |
|
"grad_norm": 1.4250017404556274, |
|
"learning_rate": 1.0830769230769232e-06, |
|
"loss": 0.0101, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 154.26829268292684, |
|
"grad_norm": 0.9687316417694092, |
|
"learning_rate": 1.0446153846153847e-06, |
|
"loss": 0.0104, |
|
"step": 6325 |
|
}, |
|
{ |
|
"epoch": 154.8780487804878, |
|
"grad_norm": 1.2869887351989746, |
|
"learning_rate": 1.0061538461538462e-06, |
|
"loss": 0.0105, |
|
"step": 6350 |
|
}, |
|
{ |
|
"epoch": 155.4878048780488, |
|
"grad_norm": 1.079960584640503, |
|
"learning_rate": 9.676923076923078e-07, |
|
"loss": 0.0105, |
|
"step": 6375 |
|
}, |
|
{ |
|
"epoch": 156.09756097560975, |
|
"grad_norm": 0.6645700931549072, |
|
"learning_rate": 9.292307692307693e-07, |
|
"loss": 0.0089, |
|
"step": 6400 |
|
}, |
|
{ |
|
"epoch": 156.70731707317074, |
|
"grad_norm": 0.04561324417591095, |
|
"learning_rate": 8.907692307692308e-07, |
|
"loss": 0.0103, |
|
"step": 6425 |
|
}, |
|
{ |
|
"epoch": 157.3170731707317, |
|
"grad_norm": 0.04663435369729996, |
|
"learning_rate": 8.523076923076924e-07, |
|
"loss": 0.0091, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 157.9268292682927, |
|
"grad_norm": 1.2721596956253052, |
|
"learning_rate": 8.138461538461539e-07, |
|
"loss": 0.0112, |
|
"step": 6475 |
|
}, |
|
{ |
|
"epoch": 158.53658536585365, |
|
"grad_norm": 1.4760843515396118, |
|
"learning_rate": 7.753846153846154e-07, |
|
"loss": 0.0107, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 159.14634146341464, |
|
"grad_norm": 1.8404792547225952, |
|
"learning_rate": 7.36923076923077e-07, |
|
"loss": 0.0099, |
|
"step": 6525 |
|
}, |
|
{ |
|
"epoch": 159.7560975609756, |
|
"grad_norm": 1.6511716842651367, |
|
"learning_rate": 6.984615384615385e-07, |
|
"loss": 0.0099, |
|
"step": 6550 |
|
}, |
|
{ |
|
"epoch": 160.3658536585366, |
|
"grad_norm": 1.2903897762298584, |
|
"learning_rate": 6.6e-07, |
|
"loss": 0.0094, |
|
"step": 6575 |
|
}, |
|
{ |
|
"epoch": 160.97560975609755, |
|
"grad_norm": 1.1578794717788696, |
|
"learning_rate": 6.215384615384616e-07, |
|
"loss": 0.0102, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 161.58536585365854, |
|
"grad_norm": 0.9990088939666748, |
|
"learning_rate": 5.830769230769232e-07, |
|
"loss": 0.0098, |
|
"step": 6625 |
|
}, |
|
{ |
|
"epoch": 162.1951219512195, |
|
"grad_norm": 0.8064598441123962, |
|
"learning_rate": 5.446153846153847e-07, |
|
"loss": 0.0091, |
|
"step": 6650 |
|
}, |
|
{ |
|
"epoch": 162.8048780487805, |
|
"grad_norm": 1.5212252140045166, |
|
"learning_rate": 5.061538461538463e-07, |
|
"loss": 0.0098, |
|
"step": 6675 |
|
}, |
|
{ |
|
"epoch": 163.41463414634146, |
|
"grad_norm": 1.7828702926635742, |
|
"learning_rate": 4.6769230769230775e-07, |
|
"loss": 0.0097, |
|
"step": 6700 |
|
}, |
|
{ |
|
"epoch": 164.02439024390245, |
|
"grad_norm": 1.631642460823059, |
|
"learning_rate": 4.292307692307693e-07, |
|
"loss": 0.01, |
|
"step": 6725 |
|
}, |
|
{ |
|
"epoch": 164.6341463414634, |
|
"grad_norm": 1.3908400535583496, |
|
"learning_rate": 3.907692307692308e-07, |
|
"loss": 0.0098, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 165.2439024390244, |
|
"grad_norm": 1.3206273317337036, |
|
"learning_rate": 3.5230769230769234e-07, |
|
"loss": 0.0092, |
|
"step": 6775 |
|
}, |
|
{ |
|
"epoch": 165.85365853658536, |
|
"grad_norm": 1.2768199443817139, |
|
"learning_rate": 3.138461538461539e-07, |
|
"loss": 0.0101, |
|
"step": 6800 |
|
}, |
|
{ |
|
"epoch": 166.46341463414635, |
|
"grad_norm": 0.04299966245889664, |
|
"learning_rate": 2.753846153846154e-07, |
|
"loss": 0.01, |
|
"step": 6825 |
|
}, |
|
{ |
|
"epoch": 167.0731707317073, |
|
"grad_norm": 1.616281270980835, |
|
"learning_rate": 2.3692307692307694e-07, |
|
"loss": 0.0097, |
|
"step": 6850 |
|
}, |
|
{ |
|
"epoch": 167.6829268292683, |
|
"grad_norm": 2.0464248657226562, |
|
"learning_rate": 1.9846153846153847e-07, |
|
"loss": 0.01, |
|
"step": 6875 |
|
}, |
|
{ |
|
"epoch": 168.29268292682926, |
|
"grad_norm": 1.8667324781417847, |
|
"learning_rate": 1.6e-07, |
|
"loss": 0.0098, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 168.90243902439025, |
|
"grad_norm": 1.6423460245132446, |
|
"learning_rate": 1.2153846153846156e-07, |
|
"loss": 0.0087, |
|
"step": 6925 |
|
}, |
|
{ |
|
"epoch": 169.5121951219512, |
|
"grad_norm": 1.0765767097473145, |
|
"learning_rate": 8.307692307692308e-08, |
|
"loss": 0.0096, |
|
"step": 6950 |
|
}, |
|
{ |
|
"epoch": 170.1219512195122, |
|
"grad_norm": 0.8501229882240295, |
|
"learning_rate": 4.461538461538461e-08, |
|
"loss": 0.0093, |
|
"step": 6975 |
|
}, |
|
{ |
|
"epoch": 170.73170731707316, |
|
"grad_norm": 1.408791184425354, |
|
"learning_rate": 6.153846153846155e-09, |
|
"loss": 0.0104, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 170.73170731707316, |
|
"eval_loss": 5.508469581604004, |
|
"eval_runtime": 57.2472, |
|
"eval_samples_per_second": 2.812, |
|
"eval_steps_per_second": 0.367, |
|
"eval_wer": 91.78674351585015, |
|
"step": 7000 |
|
} |
|
], |
|
"logging_steps": 25, |
|
"max_steps": 7000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 171, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.7029073973248e+18, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|