|
{ |
|
"best_metric": 102.80748663101605, |
|
"best_model_checkpoint": "./whisper-large-v2-amet/checkpoint-3000", |
|
"epoch": 5000.0, |
|
"global_step": 5000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 25.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 2.7655, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 50.0, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 1.7532, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 75.0, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 0.3058, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 100.0, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 0.0006, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 125.0, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.0001, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 150.0, |
|
"learning_rate": 2.9e-06, |
|
"loss": 0.0001, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 175.0, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.0001, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 200.0, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 225.0, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0001, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 250.0, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.0001, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 275.0, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 300.0, |
|
"learning_rate": 5.9e-06, |
|
"loss": 0.0001, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 325.0, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0001, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 350.0, |
|
"learning_rate": 6.9e-06, |
|
"loss": 0.0001, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 375.0, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.0001, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 400.0, |
|
"learning_rate": 7.9e-06, |
|
"loss": 0.0001, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 425.0, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 450.0, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 0.0001, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 475.0, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.0001, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 500.0, |
|
"learning_rate": 9.9e-06, |
|
"loss": 0.0001, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 525.0, |
|
"learning_rate": 9.955555555555556e-06, |
|
"loss": 0.0001, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 550.0, |
|
"learning_rate": 9.9e-06, |
|
"loss": 0.0, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 575.0, |
|
"learning_rate": 9.844444444444446e-06, |
|
"loss": 0.0, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 600.0, |
|
"learning_rate": 9.78888888888889e-06, |
|
"loss": 0.0, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 625.0, |
|
"learning_rate": 9.733333333333334e-06, |
|
"loss": 0.0001, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 650.0, |
|
"learning_rate": 9.677777777777778e-06, |
|
"loss": 0.0001, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 675.0, |
|
"learning_rate": 9.622222222222222e-06, |
|
"loss": 0.0001, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 700.0, |
|
"learning_rate": 9.566666666666668e-06, |
|
"loss": 0.0001, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 725.0, |
|
"learning_rate": 9.511111111111112e-06, |
|
"loss": 0.0, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 750.0, |
|
"learning_rate": 9.455555555555557e-06, |
|
"loss": 0.0, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 775.0, |
|
"learning_rate": 9.4e-06, |
|
"loss": 0.0, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 800.0, |
|
"learning_rate": 9.344444444444446e-06, |
|
"loss": 0.0, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 825.0, |
|
"learning_rate": 9.28888888888889e-06, |
|
"loss": 0.0, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 850.0, |
|
"learning_rate": 9.233333333333334e-06, |
|
"loss": 0.0, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 875.0, |
|
"learning_rate": 9.17777777777778e-06, |
|
"loss": 0.0, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 900.0, |
|
"learning_rate": 9.122222222222223e-06, |
|
"loss": 0.0, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 925.0, |
|
"learning_rate": 9.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 950.0, |
|
"learning_rate": 9.011111111111111e-06, |
|
"loss": 0.0, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 975.0, |
|
"learning_rate": 8.955555555555555e-06, |
|
"loss": 0.0, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1000.0, |
|
"learning_rate": 8.900000000000001e-06, |
|
"loss": 0.0, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1000.0, |
|
"eval_loss": 8.382182121276855, |
|
"eval_runtime": 384.7534, |
|
"eval_samples_per_second": 0.13, |
|
"eval_steps_per_second": 0.003, |
|
"eval_wer": 156.01604278074865, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1025.0, |
|
"learning_rate": 8.844444444444445e-06, |
|
"loss": 0.0, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1050.0, |
|
"learning_rate": 8.788888888888891e-06, |
|
"loss": 0.0, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1075.0, |
|
"learning_rate": 8.733333333333333e-06, |
|
"loss": 0.0, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1100.0, |
|
"learning_rate": 8.677777777777779e-06, |
|
"loss": 0.0, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1125.0, |
|
"learning_rate": 8.622222222222223e-06, |
|
"loss": 0.0, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1150.0, |
|
"learning_rate": 8.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1175.0, |
|
"learning_rate": 8.511111111111113e-06, |
|
"loss": 0.0, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1200.0, |
|
"learning_rate": 8.455555555555555e-06, |
|
"loss": 0.0, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1225.0, |
|
"learning_rate": 8.400000000000001e-06, |
|
"loss": 0.0, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1250.0, |
|
"learning_rate": 8.344444444444445e-06, |
|
"loss": 0.0, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1275.0, |
|
"learning_rate": 8.288888888888889e-06, |
|
"loss": 0.0, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1300.0, |
|
"learning_rate": 8.233333333333335e-06, |
|
"loss": 0.0, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1325.0, |
|
"learning_rate": 8.177777777777779e-06, |
|
"loss": 0.0, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1350.0, |
|
"learning_rate": 8.122222222222223e-06, |
|
"loss": 0.0, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1375.0, |
|
"learning_rate": 8.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 1400.0, |
|
"learning_rate": 8.011111111111113e-06, |
|
"loss": 0.0, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1425.0, |
|
"learning_rate": 7.955555555555557e-06, |
|
"loss": 0.0001, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 1450.0, |
|
"learning_rate": 7.9e-06, |
|
"loss": 0.0, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1475.0, |
|
"learning_rate": 7.844444444444446e-06, |
|
"loss": 0.0, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 1500.0, |
|
"learning_rate": 7.788888888888889e-06, |
|
"loss": 0.0, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1525.0, |
|
"learning_rate": 7.733333333333334e-06, |
|
"loss": 0.0001, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 1550.0, |
|
"learning_rate": 7.677777777777778e-06, |
|
"loss": 0.0, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1575.0, |
|
"learning_rate": 7.622222222222223e-06, |
|
"loss": 0.0, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 1600.0, |
|
"learning_rate": 7.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1625.0, |
|
"learning_rate": 7.511111111111111e-06, |
|
"loss": 0.0, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 1650.0, |
|
"learning_rate": 7.455555555555556e-06, |
|
"loss": 0.0, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1675.0, |
|
"learning_rate": 7.4e-06, |
|
"loss": 0.0, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 1700.0, |
|
"learning_rate": 7.344444444444445e-06, |
|
"loss": 0.0, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1725.0, |
|
"learning_rate": 7.28888888888889e-06, |
|
"loss": 0.0, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 1750.0, |
|
"learning_rate": 7.233333333333334e-06, |
|
"loss": 0.0, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1775.0, |
|
"learning_rate": 7.177777777777778e-06, |
|
"loss": 0.0, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 1800.0, |
|
"learning_rate": 7.122222222222222e-06, |
|
"loss": 0.0, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 1825.0, |
|
"learning_rate": 7.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 1850.0, |
|
"learning_rate": 7.011111111111112e-06, |
|
"loss": 0.0, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 1875.0, |
|
"learning_rate": 6.955555555555557e-06, |
|
"loss": 0.0, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 1900.0, |
|
"learning_rate": 6.9e-06, |
|
"loss": 0.0, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 1925.0, |
|
"learning_rate": 6.844444444444445e-06, |
|
"loss": 0.0, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 1950.0, |
|
"learning_rate": 6.788888888888889e-06, |
|
"loss": 0.0, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 1975.0, |
|
"learning_rate": 6.733333333333334e-06, |
|
"loss": 0.0, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 2000.0, |
|
"learning_rate": 6.677777777777779e-06, |
|
"loss": 0.0, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2000.0, |
|
"eval_loss": 9.796119689941406, |
|
"eval_runtime": 384.86, |
|
"eval_samples_per_second": 0.13, |
|
"eval_steps_per_second": 0.003, |
|
"eval_wer": 110.42780748663101, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2025.0, |
|
"learning_rate": 6.6222222222222236e-06, |
|
"loss": 0.0, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 2050.0, |
|
"learning_rate": 6.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2075.0, |
|
"learning_rate": 6.511111111111112e-06, |
|
"loss": 0.0, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 2100.0, |
|
"learning_rate": 6.455555555555556e-06, |
|
"loss": 0.0, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2125.0, |
|
"learning_rate": 6.4000000000000006e-06, |
|
"loss": 0.0, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 2150.0, |
|
"learning_rate": 6.3444444444444454e-06, |
|
"loss": 0.0, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2175.0, |
|
"learning_rate": 6.28888888888889e-06, |
|
"loss": 0.0, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 2200.0, |
|
"learning_rate": 6.2333333333333335e-06, |
|
"loss": 0.0, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2225.0, |
|
"learning_rate": 6.177777777777778e-06, |
|
"loss": 0.0, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 2250.0, |
|
"learning_rate": 6.1222222222222224e-06, |
|
"loss": 0.0, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2275.0, |
|
"learning_rate": 6.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 2300.0, |
|
"learning_rate": 6.011111111111112e-06, |
|
"loss": 0.0, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2325.0, |
|
"learning_rate": 5.955555555555555e-06, |
|
"loss": 0.0, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 2350.0, |
|
"learning_rate": 5.9e-06, |
|
"loss": 0.0, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2375.0, |
|
"learning_rate": 5.844444444444445e-06, |
|
"loss": 0.0, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 2400.0, |
|
"learning_rate": 5.788888888888889e-06, |
|
"loss": 0.0, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2425.0, |
|
"learning_rate": 5.733333333333334e-06, |
|
"loss": 0.0, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 2450.0, |
|
"learning_rate": 5.677777777777779e-06, |
|
"loss": 0.0, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2475.0, |
|
"learning_rate": 5.622222222222222e-06, |
|
"loss": 0.0, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 2500.0, |
|
"learning_rate": 5.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2525.0, |
|
"learning_rate": 5.511111111111112e-06, |
|
"loss": 0.0, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 2550.0, |
|
"learning_rate": 5.455555555555556e-06, |
|
"loss": 0.0, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2575.0, |
|
"learning_rate": 5.400000000000001e-06, |
|
"loss": 0.0, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 2600.0, |
|
"learning_rate": 5.344444444444446e-06, |
|
"loss": 0.0, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2625.0, |
|
"learning_rate": 5.288888888888889e-06, |
|
"loss": 0.0, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 2650.0, |
|
"learning_rate": 5.233333333333334e-06, |
|
"loss": 0.0, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2675.0, |
|
"learning_rate": 5.177777777777779e-06, |
|
"loss": 0.0, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 2700.0, |
|
"learning_rate": 5.122222222222223e-06, |
|
"loss": 0.0, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 2725.0, |
|
"learning_rate": 5.0666666666666676e-06, |
|
"loss": 0.0, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 2750.0, |
|
"learning_rate": 5.011111111111111e-06, |
|
"loss": 0.0, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 2775.0, |
|
"learning_rate": 4.9555555555555565e-06, |
|
"loss": 0.0, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 2800.0, |
|
"learning_rate": 4.9000000000000005e-06, |
|
"loss": 0.0, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 2825.0, |
|
"learning_rate": 4.8444444444444446e-06, |
|
"loss": 0.0, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 2850.0, |
|
"learning_rate": 4.7888888888888894e-06, |
|
"loss": 0.0, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 2875.0, |
|
"learning_rate": 4.7333333333333335e-06, |
|
"loss": 0.0, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 2900.0, |
|
"learning_rate": 4.677777777777778e-06, |
|
"loss": 0.0, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 2925.0, |
|
"learning_rate": 4.622222222222222e-06, |
|
"loss": 0.0, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 2950.0, |
|
"learning_rate": 4.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 2975.0, |
|
"learning_rate": 4.511111111111111e-06, |
|
"loss": 0.0, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 3000.0, |
|
"learning_rate": 4.455555555555555e-06, |
|
"loss": 0.0, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3000.0, |
|
"eval_loss": 12.001350402832031, |
|
"eval_runtime": 384.6328, |
|
"eval_samples_per_second": 0.13, |
|
"eval_steps_per_second": 0.003, |
|
"eval_wer": 102.80748663101605, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3025.0, |
|
"learning_rate": 4.4e-06, |
|
"loss": 0.0, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 3050.0, |
|
"learning_rate": 4.344444444444445e-06, |
|
"loss": 0.0, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3075.0, |
|
"learning_rate": 4.288888888888889e-06, |
|
"loss": 0.0, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 3100.0, |
|
"learning_rate": 4.233333333333334e-06, |
|
"loss": 0.0, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3125.0, |
|
"learning_rate": 4.177777777777778e-06, |
|
"loss": 0.0, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 3150.0, |
|
"learning_rate": 4.122222222222222e-06, |
|
"loss": 0.0, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3175.0, |
|
"learning_rate": 4.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 3200.0, |
|
"learning_rate": 4.011111111111111e-06, |
|
"loss": 0.0, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3225.0, |
|
"learning_rate": 3.955555555555556e-06, |
|
"loss": 0.0, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 3250.0, |
|
"learning_rate": 3.900000000000001e-06, |
|
"loss": 0.0, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3275.0, |
|
"learning_rate": 3.844444444444445e-06, |
|
"loss": 0.0, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 3300.0, |
|
"learning_rate": 3.7888888888888893e-06, |
|
"loss": 0.0, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3325.0, |
|
"learning_rate": 3.7333333333333337e-06, |
|
"loss": 0.0, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 3350.0, |
|
"learning_rate": 3.6777777777777778e-06, |
|
"loss": 0.0, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3375.0, |
|
"learning_rate": 3.6222222222222226e-06, |
|
"loss": 0.0, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 3400.0, |
|
"learning_rate": 3.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3425.0, |
|
"learning_rate": 3.511111111111111e-06, |
|
"loss": 0.0, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 3450.0, |
|
"learning_rate": 3.455555555555556e-06, |
|
"loss": 0.0, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3475.0, |
|
"learning_rate": 3.4000000000000005e-06, |
|
"loss": 0.0, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 3500.0, |
|
"learning_rate": 3.3444444444444445e-06, |
|
"loss": 0.0, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3525.0, |
|
"learning_rate": 3.2888888888888894e-06, |
|
"loss": 0.0, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 3550.0, |
|
"learning_rate": 3.2333333333333334e-06, |
|
"loss": 0.0, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3575.0, |
|
"learning_rate": 3.177777777777778e-06, |
|
"loss": 0.0, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 3600.0, |
|
"learning_rate": 3.1222222222222228e-06, |
|
"loss": 0.0, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3625.0, |
|
"learning_rate": 3.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 3650.0, |
|
"learning_rate": 3.0111111111111113e-06, |
|
"loss": 0.0, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 3675.0, |
|
"learning_rate": 2.955555555555556e-06, |
|
"loss": 0.0, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 3700.0, |
|
"learning_rate": 2.9e-06, |
|
"loss": 0.0, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 3725.0, |
|
"learning_rate": 2.8444444444444446e-06, |
|
"loss": 0.0, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 3750.0, |
|
"learning_rate": 2.788888888888889e-06, |
|
"loss": 0.0, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 3775.0, |
|
"learning_rate": 2.7333333333333336e-06, |
|
"loss": 0.0, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 3800.0, |
|
"learning_rate": 2.677777777777778e-06, |
|
"loss": 0.0, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 3825.0, |
|
"learning_rate": 2.6222222222222225e-06, |
|
"loss": 0.0, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 3850.0, |
|
"learning_rate": 2.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 3875.0, |
|
"learning_rate": 2.5111111111111114e-06, |
|
"loss": 0.0, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 3900.0, |
|
"learning_rate": 2.455555555555556e-06, |
|
"loss": 0.0, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 3925.0, |
|
"learning_rate": 2.4000000000000003e-06, |
|
"loss": 0.0, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 3950.0, |
|
"learning_rate": 2.3444444444444448e-06, |
|
"loss": 0.0, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 3975.0, |
|
"learning_rate": 2.2888888888888892e-06, |
|
"loss": 0.0, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 4000.0, |
|
"learning_rate": 2.2333333333333333e-06, |
|
"loss": 0.0, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4000.0, |
|
"eval_loss": 12.263284683227539, |
|
"eval_runtime": 384.6578, |
|
"eval_samples_per_second": 0.13, |
|
"eval_steps_per_second": 0.003, |
|
"eval_wer": 103.34224598930481, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4025.0, |
|
"learning_rate": 2.1777777777777777e-06, |
|
"loss": 0.0, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 4050.0, |
|
"learning_rate": 2.1222222222222226e-06, |
|
"loss": 0.0, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 4075.0, |
|
"learning_rate": 2.0666666666666666e-06, |
|
"loss": 0.0, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 4100.0, |
|
"learning_rate": 2.011111111111111e-06, |
|
"loss": 0.0, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 4125.0, |
|
"learning_rate": 1.955555555555556e-06, |
|
"loss": 0.0, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 4150.0, |
|
"learning_rate": 1.9000000000000002e-06, |
|
"loss": 0.0, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 4175.0, |
|
"learning_rate": 1.8444444444444445e-06, |
|
"loss": 0.0, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 4200.0, |
|
"learning_rate": 1.788888888888889e-06, |
|
"loss": 0.0, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 4225.0, |
|
"learning_rate": 1.7333333333333336e-06, |
|
"loss": 0.0, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 4250.0, |
|
"learning_rate": 1.6777777777777779e-06, |
|
"loss": 0.0, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 4275.0, |
|
"learning_rate": 1.6222222222222223e-06, |
|
"loss": 0.0, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 4300.0, |
|
"learning_rate": 1.566666666666667e-06, |
|
"loss": 0.0, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4325.0, |
|
"learning_rate": 1.5111111111111112e-06, |
|
"loss": 0.0, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 4350.0, |
|
"learning_rate": 1.4555555555555557e-06, |
|
"loss": 0.0, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4375.0, |
|
"learning_rate": 1.4000000000000001e-06, |
|
"loss": 0.0, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 4400.0, |
|
"learning_rate": 1.3444444444444446e-06, |
|
"loss": 0.0, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 4425.0, |
|
"learning_rate": 1.288888888888889e-06, |
|
"loss": 0.0, |
|
"step": 4425 |
|
}, |
|
{ |
|
"epoch": 4450.0, |
|
"learning_rate": 1.2333333333333335e-06, |
|
"loss": 0.0, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 4475.0, |
|
"learning_rate": 1.1777777777777778e-06, |
|
"loss": 0.0, |
|
"step": 4475 |
|
}, |
|
{ |
|
"epoch": 4500.0, |
|
"learning_rate": 1.1222222222222222e-06, |
|
"loss": 0.0, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4525.0, |
|
"learning_rate": 1.066666666666667e-06, |
|
"loss": 0.0, |
|
"step": 4525 |
|
}, |
|
{ |
|
"epoch": 4550.0, |
|
"learning_rate": 1.0111111111111111e-06, |
|
"loss": 0.0, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 4575.0, |
|
"learning_rate": 9.555555555555556e-07, |
|
"loss": 0.0, |
|
"step": 4575 |
|
}, |
|
{ |
|
"epoch": 4600.0, |
|
"learning_rate": 9.000000000000001e-07, |
|
"loss": 0.0, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 4625.0, |
|
"learning_rate": 8.444444444444445e-07, |
|
"loss": 0.0, |
|
"step": 4625 |
|
}, |
|
{ |
|
"epoch": 4650.0, |
|
"learning_rate": 7.888888888888889e-07, |
|
"loss": 0.0, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 4675.0, |
|
"learning_rate": 7.333333333333334e-07, |
|
"loss": 0.0, |
|
"step": 4675 |
|
}, |
|
{ |
|
"epoch": 4700.0, |
|
"learning_rate": 6.777777777777779e-07, |
|
"loss": 0.0, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 4725.0, |
|
"learning_rate": 6.222222222222223e-07, |
|
"loss": 0.0, |
|
"step": 4725 |
|
}, |
|
{ |
|
"epoch": 4750.0, |
|
"learning_rate": 5.666666666666667e-07, |
|
"loss": 0.0, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 4775.0, |
|
"learning_rate": 5.111111111111112e-07, |
|
"loss": 0.0, |
|
"step": 4775 |
|
}, |
|
{ |
|
"epoch": 4800.0, |
|
"learning_rate": 4.5555555555555563e-07, |
|
"loss": 0.0, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 4825.0, |
|
"learning_rate": 4.0000000000000003e-07, |
|
"loss": 0.0, |
|
"step": 4825 |
|
}, |
|
{ |
|
"epoch": 4850.0, |
|
"learning_rate": 3.444444444444445e-07, |
|
"loss": 0.0, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 4875.0, |
|
"learning_rate": 2.888888888888889e-07, |
|
"loss": 0.0, |
|
"step": 4875 |
|
}, |
|
{ |
|
"epoch": 4900.0, |
|
"learning_rate": 2.3333333333333336e-07, |
|
"loss": 0.0, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 4925.0, |
|
"learning_rate": 1.777777777777778e-07, |
|
"loss": 0.0, |
|
"step": 4925 |
|
}, |
|
{ |
|
"epoch": 4950.0, |
|
"learning_rate": 1.2222222222222225e-07, |
|
"loss": 0.0, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 4975.0, |
|
"learning_rate": 6.666666666666668e-08, |
|
"loss": 0.0, |
|
"step": 4975 |
|
}, |
|
{ |
|
"epoch": 5000.0, |
|
"learning_rate": 1.1111111111111112e-08, |
|
"loss": 0.0, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5000.0, |
|
"eval_loss": 12.240767478942871, |
|
"eval_runtime": 384.5003, |
|
"eval_samples_per_second": 0.13, |
|
"eval_steps_per_second": 0.003, |
|
"eval_wer": 102.94117647058823, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 5000.0, |
|
"step": 5000, |
|
"total_flos": 1.0615855104e+19, |
|
"train_loss": 0.024147475125084566, |
|
"train_runtime": 5167.5516, |
|
"train_samples_per_second": 123.85, |
|
"train_steps_per_second": 0.968 |
|
} |
|
], |
|
"max_steps": 5000, |
|
"num_train_epochs": 5000, |
|
"total_flos": 1.0615855104e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|