|
{ |
|
"best_metric": 0.9882186616399623, |
|
"best_model_checkpoint": "convnextv2-large-1k-224-finetuned-BreastCancer-Classification-BreakHis-AH-60-20-20-Shuffled/checkpoint-2388", |
|
"epoch": 14.0, |
|
"global_step": 2786, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.96810207336523e-08, |
|
"loss": 0.7048, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.993620414673046e-07, |
|
"loss": 0.6895, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 2.990430622009569e-07, |
|
"loss": 0.6896, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 3.987240829346092e-07, |
|
"loss": 0.6916, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 4.984051036682616e-07, |
|
"loss": 0.6916, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 5.980861244019139e-07, |
|
"loss": 0.6938, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 6.977671451355662e-07, |
|
"loss": 0.6968, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 7.974481658692184e-07, |
|
"loss": 0.6813, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 8.971291866028709e-07, |
|
"loss": 0.6911, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.968102073365232e-07, |
|
"loss": 0.6874, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.0964912280701754e-06, |
|
"loss": 0.6877, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.1961722488038277e-06, |
|
"loss": 0.6815, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.2958532695374803e-06, |
|
"loss": 0.6833, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.3955342902711324e-06, |
|
"loss": 0.6782, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4952153110047847e-06, |
|
"loss": 0.6742, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.5948963317384369e-06, |
|
"loss": 0.6763, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.6945773524720894e-06, |
|
"loss": 0.6771, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.7942583732057418e-06, |
|
"loss": 0.6638, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.8939393939393941e-06, |
|
"loss": 0.665, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.9936204146730465e-06, |
|
"loss": 0.6631, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 2.093301435406699e-06, |
|
"loss": 0.6522, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 2.1929824561403507e-06, |
|
"loss": 0.6581, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 2.292663476874003e-06, |
|
"loss": 0.6432, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 2.3923444976076554e-06, |
|
"loss": 0.6576, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 2.492025518341308e-06, |
|
"loss": 0.6368, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 2.5917065390749605e-06, |
|
"loss": 0.6257, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 2.691387559808613e-06, |
|
"loss": 0.624, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 2.7910685805422648e-06, |
|
"loss": 0.6115, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 2.890749601275917e-06, |
|
"loss": 0.6151, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.9904306220095695e-06, |
|
"loss": 0.6045, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 3.090111642743222e-06, |
|
"loss": 0.5888, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 3.1897926634768737e-06, |
|
"loss": 0.5753, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 3.2894736842105265e-06, |
|
"loss": 0.5607, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 3.389154704944179e-06, |
|
"loss": 0.5578, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 3.488835725677831e-06, |
|
"loss": 0.5461, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 3.5885167464114835e-06, |
|
"loss": 0.5403, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 3.688197767145136e-06, |
|
"loss": 0.5343, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 3.7878787878787882e-06, |
|
"loss": 0.5206, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.88755980861244e-06, |
|
"loss": 0.5059, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_accuracy": 0.9000942507068803, |
|
"eval_loss": 0.4826333522796631, |
|
"eval_runtime": 1914.453, |
|
"eval_samples_per_second": 1.108, |
|
"eval_steps_per_second": 0.069, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 3.987240829346093e-06, |
|
"loss": 0.4989, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 4.086921850079746e-06, |
|
"loss": 0.4963, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 4.186602870813398e-06, |
|
"loss": 0.4905, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 4.2862838915470495e-06, |
|
"loss": 0.4311, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 4.3859649122807014e-06, |
|
"loss": 0.4353, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 4.485645933014354e-06, |
|
"loss": 0.4574, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 4.585326953748006e-06, |
|
"loss": 0.4508, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 4.685007974481659e-06, |
|
"loss": 0.4541, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 4.784688995215311e-06, |
|
"loss": 0.4089, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 4.8843700159489636e-06, |
|
"loss": 0.4251, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 4.984051036682616e-06, |
|
"loss": 0.4276, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 5.083732057416268e-06, |
|
"loss": 0.4303, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 5.183413078149921e-06, |
|
"loss": 0.4144, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 5.283094098883573e-06, |
|
"loss": 0.3988, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 5.382775119617226e-06, |
|
"loss": 0.3718, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 5.482456140350877e-06, |
|
"loss": 0.3631, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 5.5821371610845296e-06, |
|
"loss": 0.3524, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 5.681818181818182e-06, |
|
"loss": 0.3498, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 5.781499202551834e-06, |
|
"loss": 0.3501, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 5.881180223285487e-06, |
|
"loss": 0.3949, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 5.980861244019139e-06, |
|
"loss": 0.3294, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 6.080542264752792e-06, |
|
"loss": 0.3177, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 6.180223285486444e-06, |
|
"loss": 0.3063, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 6.2799043062200955e-06, |
|
"loss": 0.3212, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 6.3795853269537475e-06, |
|
"loss": 0.2946, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 6.4792663476874e-06, |
|
"loss": 0.2695, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 6.578947368421053e-06, |
|
"loss": 0.2869, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 6.678628389154705e-06, |
|
"loss": 0.3035, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 6.778309409888358e-06, |
|
"loss": 0.2946, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 6.87799043062201e-06, |
|
"loss": 0.3297, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 6.977671451355662e-06, |
|
"loss": 0.2265, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.077352472089314e-06, |
|
"loss": 0.292, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.177033492822967e-06, |
|
"loss": 0.3683, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.276714513556619e-06, |
|
"loss": 0.286, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 7.376395534290272e-06, |
|
"loss": 0.2633, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 7.476076555023924e-06, |
|
"loss": 0.2371, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.2734, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 7.675438596491228e-06, |
|
"loss": 0.2586, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 7.77511961722488e-06, |
|
"loss": 0.245, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 7.874800637958534e-06, |
|
"loss": 0.2533, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"eval_accuracy": 0.9514608859566447, |
|
"eval_loss": 0.2124292403459549, |
|
"eval_runtime": 70.7108, |
|
"eval_samples_per_second": 30.01, |
|
"eval_steps_per_second": 1.881, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.974481658692186e-06, |
|
"loss": 0.2251, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 8.074162679425838e-06, |
|
"loss": 0.2319, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 8.173843700159491e-06, |
|
"loss": 0.2642, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 8.273524720893143e-06, |
|
"loss": 0.2336, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 8.373205741626795e-06, |
|
"loss": 0.2232, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 8.472886762360447e-06, |
|
"loss": 0.2451, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.572567783094099e-06, |
|
"loss": 0.2953, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 8.672248803827751e-06, |
|
"loss": 0.2671, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.771929824561403e-06, |
|
"loss": 0.245, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 8.871610845295056e-06, |
|
"loss": 0.2079, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 8.971291866028708e-06, |
|
"loss": 0.1964, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 9.07097288676236e-06, |
|
"loss": 0.1823, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 9.170653907496012e-06, |
|
"loss": 0.2291, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 9.270334928229666e-06, |
|
"loss": 0.2359, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 9.370015948963318e-06, |
|
"loss": 0.2404, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 9.46969696969697e-06, |
|
"loss": 0.2503, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 9.569377990430622e-06, |
|
"loss": 0.2626, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 9.669059011164275e-06, |
|
"loss": 0.2264, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 9.768740031897927e-06, |
|
"loss": 0.1944, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 9.868421052631579e-06, |
|
"loss": 0.2319, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 9.968102073365233e-06, |
|
"loss": 0.273, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.0067783094098885e-05, |
|
"loss": 0.1638, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.0167464114832537e-05, |
|
"loss": 0.2288, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 1.0267145135566188e-05, |
|
"loss": 0.1798, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 1.0366826156299842e-05, |
|
"loss": 0.1836, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 1.0466507177033494e-05, |
|
"loss": 0.3112, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 1.0566188197767146e-05, |
|
"loss": 0.1693, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 1.0665869218500798e-05, |
|
"loss": 0.2139, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 1.0765550239234451e-05, |
|
"loss": 0.2522, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 1.0865231259968102e-05, |
|
"loss": 0.2047, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 1.0964912280701754e-05, |
|
"loss": 0.2797, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.1064593301435407e-05, |
|
"loss": 0.1701, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 1.1164274322169059e-05, |
|
"loss": 0.2006, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.1263955342902711e-05, |
|
"loss": 0.2385, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1363636363636365e-05, |
|
"loss": 0.1963, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 1.1463317384370017e-05, |
|
"loss": 0.1463, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 1.1562998405103668e-05, |
|
"loss": 0.2504, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.166267942583732e-05, |
|
"loss": 0.1663, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.1762360446570974e-05, |
|
"loss": 0.1742, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.1862041467304626e-05, |
|
"loss": 0.2358, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"eval_accuracy": 0.9538171536286523, |
|
"eval_loss": 0.15427447855472565, |
|
"eval_runtime": 70.7075, |
|
"eval_samples_per_second": 30.011, |
|
"eval_steps_per_second": 1.881, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 1.1961722488038278e-05, |
|
"loss": 0.173, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"learning_rate": 1.206140350877193e-05, |
|
"loss": 0.1766, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 1.2161084529505583e-05, |
|
"loss": 0.182, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 1.2260765550239235e-05, |
|
"loss": 0.2666, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.2360446570972887e-05, |
|
"loss": 0.126, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 1.246012759170654e-05, |
|
"loss": 0.1112, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.2559808612440191e-05, |
|
"loss": 0.1267, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 3.19, |
|
"learning_rate": 1.2659489633173843e-05, |
|
"loss": 0.2112, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 3.22, |
|
"learning_rate": 1.2759170653907495e-05, |
|
"loss": 0.1375, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 1.2858851674641149e-05, |
|
"loss": 0.2749, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 1.29585326953748e-05, |
|
"loss": 0.2136, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 1.3058213716108452e-05, |
|
"loss": 0.1766, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 1.3157894736842106e-05, |
|
"loss": 0.1892, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.3257575757575758e-05, |
|
"loss": 0.1417, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 3.37, |
|
"learning_rate": 1.335725677830941e-05, |
|
"loss": 0.2264, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.3456937799043062e-05, |
|
"loss": 0.1516, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 1.3556618819776715e-05, |
|
"loss": 0.206, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 1.3656299840510367e-05, |
|
"loss": 0.1868, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 1.375598086124402e-05, |
|
"loss": 0.2041, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 1.3855661881977671e-05, |
|
"loss": 0.1884, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 1.3955342902711325e-05, |
|
"loss": 0.3096, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 1.4055023923444977e-05, |
|
"loss": 0.1395, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 1.4154704944178629e-05, |
|
"loss": 0.167, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 1.425438596491228e-05, |
|
"loss": 0.1371, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 1.4354066985645934e-05, |
|
"loss": 0.1149, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 1.4453748006379586e-05, |
|
"loss": 0.1051, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 1.4553429027113238e-05, |
|
"loss": 0.1705, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 1.4653110047846892e-05, |
|
"loss": 0.1174, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 1.4752791068580543e-05, |
|
"loss": 0.1356, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 1.4852472089314195e-05, |
|
"loss": 0.3099, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 1.4952153110047847e-05, |
|
"loss": 0.2023, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 1.5051834130781501e-05, |
|
"loss": 0.1293, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.2193, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 1.5251196172248805e-05, |
|
"loss": 0.1946, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 1.5350877192982457e-05, |
|
"loss": 0.3177, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.89, |
|
"learning_rate": 1.545055821371611e-05, |
|
"loss": 0.3677, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 1.555023923444976e-05, |
|
"loss": 0.1895, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 1.5649920255183416e-05, |
|
"loss": 0.2612, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 1.5749601275917068e-05, |
|
"loss": 0.1511, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 1.584928229665072e-05, |
|
"loss": 0.2584, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"eval_accuracy": 0.9641847313854854, |
|
"eval_loss": 0.11363683640956879, |
|
"eval_runtime": 70.6659, |
|
"eval_samples_per_second": 30.029, |
|
"eval_steps_per_second": 1.882, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 1.594896331738437e-05, |
|
"loss": 0.204, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 1.6048644338118024e-05, |
|
"loss": 0.3212, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"learning_rate": 1.6148325358851675e-05, |
|
"loss": 0.2214, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 1.6248006379585327e-05, |
|
"loss": 0.1679, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 1.6347687400318983e-05, |
|
"loss": 0.1288, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 1.6447368421052635e-05, |
|
"loss": 0.1769, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 4.17, |
|
"learning_rate": 1.6547049441786287e-05, |
|
"loss": 0.1685, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 1.664673046251994e-05, |
|
"loss": 0.147, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 1.674641148325359e-05, |
|
"loss": 0.279, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 1.684609250398724e-05, |
|
"loss": 0.3511, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 1.6945773524720894e-05, |
|
"loss": 0.3512, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 4.3, |
|
"learning_rate": 1.7045454545454546e-05, |
|
"loss": 0.1552, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 1.7145135566188198e-05, |
|
"loss": 0.1517, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"learning_rate": 1.724481658692185e-05, |
|
"loss": 0.0997, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 1.7344497607655502e-05, |
|
"loss": 0.1326, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 1.7444178628389154e-05, |
|
"loss": 0.1793, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 1.7543859649122806e-05, |
|
"loss": 0.1459, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 1.7643540669856458e-05, |
|
"loss": 0.1259, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 1.7743221690590113e-05, |
|
"loss": 0.1866, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 1.7842902711323765e-05, |
|
"loss": 0.2281, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 1.7942583732057417e-05, |
|
"loss": 0.1234, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 1.804226475279107e-05, |
|
"loss": 0.1683, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 1.814194577352472e-05, |
|
"loss": 0.1124, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 1.8241626794258373e-05, |
|
"loss": 0.1016, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 1.8341307814992024e-05, |
|
"loss": 0.1383, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 1.844098883572568e-05, |
|
"loss": 0.2442, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 1.8540669856459332e-05, |
|
"loss": 0.1057, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 1.8640350877192984e-05, |
|
"loss": 0.1856, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 1.8740031897926636e-05, |
|
"loss": 0.1272, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 1.8839712918660287e-05, |
|
"loss": 0.1384, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 1.893939393939394e-05, |
|
"loss": 0.2314, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 4.8, |
|
"learning_rate": 1.903907496012759e-05, |
|
"loss": 0.1623, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 1.9138755980861243e-05, |
|
"loss": 0.2701, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 1.92384370015949e-05, |
|
"loss": 0.1866, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.933811802232855e-05, |
|
"loss": 0.2126, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 1.9437799043062202e-05, |
|
"loss": 0.1549, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 1.9537480063795854e-05, |
|
"loss": 0.133, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 1.9637161084529506e-05, |
|
"loss": 0.0753, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 1.9736842105263158e-05, |
|
"loss": 0.0942, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 1.983652312599681e-05, |
|
"loss": 0.1085, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"eval_accuracy": 0.9745523091423186, |
|
"eval_loss": 0.08914583176374435, |
|
"eval_runtime": 70.8532, |
|
"eval_samples_per_second": 29.949, |
|
"eval_steps_per_second": 1.877, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 1.9936204146730465e-05, |
|
"loss": 0.1358, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 2.0035885167464117e-05, |
|
"loss": 0.1491, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 2.013556618819777e-05, |
|
"loss": 0.148, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 2.023524720893142e-05, |
|
"loss": 0.0951, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 5.13, |
|
"learning_rate": 2.0334928229665073e-05, |
|
"loss": 0.0913, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 2.0434609250398725e-05, |
|
"loss": 0.1532, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 2.0534290271132377e-05, |
|
"loss": 0.1276, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 5.2, |
|
"learning_rate": 2.0633971291866032e-05, |
|
"loss": 0.169, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 2.0733652312599684e-05, |
|
"loss": 0.1091, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 0.0604, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 2.0933014354066988e-05, |
|
"loss": 0.2305, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 2.103269537480064e-05, |
|
"loss": 0.1552, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 2.1132376395534292e-05, |
|
"loss": 0.0952, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 2.1232057416267944e-05, |
|
"loss": 0.1021, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 2.1331738437001596e-05, |
|
"loss": 0.0702, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 2.143141945773525e-05, |
|
"loss": 0.1879, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 2.1531100478468903e-05, |
|
"loss": 0.1929, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 2.1630781499202555e-05, |
|
"loss": 0.1982, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 2.1730462519936203e-05, |
|
"loss": 0.1917, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 2.1830143540669855e-05, |
|
"loss": 0.3343, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 2.1929824561403507e-05, |
|
"loss": 0.1096, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 2.2029505582137162e-05, |
|
"loss": 0.1774, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 2.2129186602870814e-05, |
|
"loss": 0.1189, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 2.2228867623604466e-05, |
|
"loss": 0.1268, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 2.2328548644338118e-05, |
|
"loss": 0.0761, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"learning_rate": 2.242822966507177e-05, |
|
"loss": 0.0904, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 2.2527910685805422e-05, |
|
"loss": 0.088, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 5.7, |
|
"learning_rate": 2.2627591706539074e-05, |
|
"loss": 0.0607, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 2.272727272727273e-05, |
|
"loss": 0.1923, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 2.282695374800638e-05, |
|
"loss": 0.1361, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 2.2926634768740033e-05, |
|
"loss": 0.1227, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 2.3026315789473685e-05, |
|
"loss": 0.1345, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 5.83, |
|
"learning_rate": 2.3125996810207337e-05, |
|
"loss": 0.179, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 2.322567783094099e-05, |
|
"loss": 0.1282, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 2.332535885167464e-05, |
|
"loss": 0.1503, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 2.3425039872408293e-05, |
|
"loss": 0.0902, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"learning_rate": 2.3524720893141948e-05, |
|
"loss": 0.1091, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 2.36244019138756e-05, |
|
"loss": 0.0839, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 2.3724082934609252e-05, |
|
"loss": 0.1007, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"eval_accuracy": 0.9769085768143261, |
|
"eval_loss": 0.07253596186637878, |
|
"eval_runtime": 70.8263, |
|
"eval_samples_per_second": 29.961, |
|
"eval_steps_per_second": 1.878, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 6.01, |
|
"learning_rate": 2.3823763955342904e-05, |
|
"loss": 0.0863, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 2.3923444976076556e-05, |
|
"loss": 0.088, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 6.06, |
|
"learning_rate": 2.4023125996810208e-05, |
|
"loss": 0.0649, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 2.412280701754386e-05, |
|
"loss": 0.0992, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 6.11, |
|
"learning_rate": 2.4222488038277515e-05, |
|
"loss": 0.223, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 2.4322169059011167e-05, |
|
"loss": 0.1472, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 2.442185007974482e-05, |
|
"loss": 0.1692, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 2.452153110047847e-05, |
|
"loss": 0.109, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 2.4621212121212123e-05, |
|
"loss": 0.0773, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 2.4720893141945774e-05, |
|
"loss": 0.0989, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 2.4820574162679426e-05, |
|
"loss": 0.0715, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 2.492025518341308e-05, |
|
"loss": 0.0389, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 2.5019936204146734e-05, |
|
"loss": 0.1711, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 2.5119617224880382e-05, |
|
"loss": 0.0931, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 2.5219298245614037e-05, |
|
"loss": 0.031, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 2.5318979266347686e-05, |
|
"loss": 0.1442, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 2.541866028708134e-05, |
|
"loss": 0.1046, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 2.551834130781499e-05, |
|
"loss": 0.0885, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 2.5618022328548645e-05, |
|
"loss": 0.1875, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 2.5717703349282297e-05, |
|
"loss": 0.0669, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 2.5817384370015952e-05, |
|
"loss": 0.1147, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 2.59170653907496e-05, |
|
"loss": 0.233, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 2.6016746411483256e-05, |
|
"loss": 0.2635, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 2.6116427432216905e-05, |
|
"loss": 0.1609, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 2.621610845295056e-05, |
|
"loss": 0.1306, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 6.63, |
|
"learning_rate": 2.6315789473684212e-05, |
|
"loss": 0.2663, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 2.6415470494417864e-05, |
|
"loss": 0.1074, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 2.6515151515151516e-05, |
|
"loss": 0.0561, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 2.661483253588517e-05, |
|
"loss": 0.043, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 2.671451355661882e-05, |
|
"loss": 0.1899, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 2.6814194577352475e-05, |
|
"loss": 0.0969, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 6.78, |
|
"learning_rate": 2.6913875598086123e-05, |
|
"loss": 0.2774, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 6.81, |
|
"learning_rate": 2.701355661881978e-05, |
|
"loss": 0.1214, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"learning_rate": 2.711323763955343e-05, |
|
"loss": 0.2666, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 2.7212918660287086e-05, |
|
"loss": 0.1761, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 2.7312599681020735e-05, |
|
"loss": 0.0554, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 2.741228070175439e-05, |
|
"loss": 0.0588, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 2.751196172248804e-05, |
|
"loss": 0.0842, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 6.96, |
|
"learning_rate": 2.7611642743221694e-05, |
|
"loss": 0.1822, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 2.7711323763955342e-05, |
|
"loss": 0.1463, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9839773798303487, |
|
"eval_loss": 0.05405208095908165, |
|
"eval_runtime": 70.9415, |
|
"eval_samples_per_second": 29.912, |
|
"eval_steps_per_second": 1.875, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 7.01, |
|
"learning_rate": 2.7811004784688998e-05, |
|
"loss": 0.0536, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 2.791068580542265e-05, |
|
"loss": 0.1287, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 7.06, |
|
"learning_rate": 2.8010366826156305e-05, |
|
"loss": 0.0914, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 2.8110047846889953e-05, |
|
"loss": 0.0428, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 2.820972886762361e-05, |
|
"loss": 0.213, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 7.14, |
|
"learning_rate": 2.8309409888357257e-05, |
|
"loss": 0.0962, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.8409090909090912e-05, |
|
"loss": 0.0824, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"learning_rate": 2.850877192982456e-05, |
|
"loss": 0.1157, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 2.8608452950558216e-05, |
|
"loss": 0.1519, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 2.8708133971291868e-05, |
|
"loss": 0.2063, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 2.8807814992025524e-05, |
|
"loss": 0.1612, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 2.8907496012759172e-05, |
|
"loss": 0.0942, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.900717703349282e-05, |
|
"loss": 0.0405, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.9106858054226476e-05, |
|
"loss": 0.1686, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 2.9206539074960128e-05, |
|
"loss": 0.2167, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 2.9306220095693783e-05, |
|
"loss": 0.0604, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 7.41, |
|
"learning_rate": 2.940590111642743e-05, |
|
"loss": 0.1009, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 2.9505582137161087e-05, |
|
"loss": 0.1279, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 7.46, |
|
"learning_rate": 2.9605263157894735e-05, |
|
"loss": 0.1413, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 2.970494417862839e-05, |
|
"loss": 0.1586, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"learning_rate": 2.980462519936204e-05, |
|
"loss": 0.1487, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 2.9904306220095695e-05, |
|
"loss": 0.0528, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 3.0003987240829347e-05, |
|
"loss": 0.1796, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"learning_rate": 3.0103668261563002e-05, |
|
"loss": 0.1781, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 3.020334928229665e-05, |
|
"loss": 0.1745, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 7.64, |
|
"learning_rate": 3.0303030303030306e-05, |
|
"loss": 0.0827, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 3.0402711323763954e-05, |
|
"loss": 0.1123, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 7.69, |
|
"learning_rate": 3.050239234449761e-05, |
|
"loss": 0.0751, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 7.71, |
|
"learning_rate": 3.060207336523126e-05, |
|
"loss": 0.1033, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 3.0701754385964913e-05, |
|
"loss": 0.0861, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 7.76, |
|
"learning_rate": 3.080143540669856e-05, |
|
"loss": 0.0744, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 3.090111642743222e-05, |
|
"loss": 0.1511, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 3.100079744816587e-05, |
|
"loss": 0.1331, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 3.110047846889952e-05, |
|
"loss": 0.04, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 3.1200159489633176e-05, |
|
"loss": 0.0549, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 7.89, |
|
"learning_rate": 3.129984051036683e-05, |
|
"loss": 0.1301, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 3.139952153110048e-05, |
|
"loss": 0.0605, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"learning_rate": 3.1499202551834136e-05, |
|
"loss": 0.1689, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 3.1598883572567784e-05, |
|
"loss": 0.4188, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 3.169856459330144e-05, |
|
"loss": 0.3564, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"eval_accuracy": 0.9802073515551367, |
|
"eval_loss": 0.08797016739845276, |
|
"eval_runtime": 70.691, |
|
"eval_samples_per_second": 30.018, |
|
"eval_steps_per_second": 1.881, |
|
"step": 1592 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 3.179824561403509e-05, |
|
"loss": 0.1236, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 3.189792663476874e-05, |
|
"loss": 0.0615, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 8.07, |
|
"learning_rate": 3.199760765550239e-05, |
|
"loss": 0.2747, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 3.209728867623605e-05, |
|
"loss": 0.1151, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 3.2196969696969696e-05, |
|
"loss": 0.1025, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 3.229665071770335e-05, |
|
"loss": 0.2233, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 3.2396331738437e-05, |
|
"loss": 0.0602, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 3.2496012759170655e-05, |
|
"loss": 0.0924, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 3.259569377990431e-05, |
|
"loss": 0.1035, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 3.2695374800637965e-05, |
|
"loss": 0.1069, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 3.2795055821371614e-05, |
|
"loss": 0.2335, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 3.289473684210527e-05, |
|
"loss": 0.1592, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 3.299441786283892e-05, |
|
"loss": 0.0557, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"learning_rate": 3.309409888357257e-05, |
|
"loss": 0.0985, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 3.319377990430622e-05, |
|
"loss": 0.0447, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 8.39, |
|
"learning_rate": 3.329346092503988e-05, |
|
"loss": 0.2291, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 3.3393141945773525e-05, |
|
"loss": 0.1829, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 3.349282296650718e-05, |
|
"loss": 0.1253, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 3.359250398724083e-05, |
|
"loss": 0.0124, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 3.369218500797448e-05, |
|
"loss": 0.0723, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 8.52, |
|
"learning_rate": 3.379186602870813e-05, |
|
"loss": 0.1938, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 8.54, |
|
"learning_rate": 3.389154704944179e-05, |
|
"loss": 0.2344, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"learning_rate": 3.3991228070175444e-05, |
|
"loss": 0.2282, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 8.59, |
|
"learning_rate": 3.409090909090909e-05, |
|
"loss": 0.1385, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 3.419059011164275e-05, |
|
"loss": 0.2466, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 8.64, |
|
"learning_rate": 3.4290271132376396e-05, |
|
"loss": 0.1208, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 3.438995215311005e-05, |
|
"loss": 0.091, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 3.44896331738437e-05, |
|
"loss": 0.0724, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 8.72, |
|
"learning_rate": 3.4589314194577355e-05, |
|
"loss": 0.1488, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 3.4688995215311004e-05, |
|
"loss": 0.0948, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"learning_rate": 3.478867623604466e-05, |
|
"loss": 0.0663, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 3.488835725677831e-05, |
|
"loss": 0.0654, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 8.82, |
|
"learning_rate": 3.498803827751196e-05, |
|
"loss": 0.036, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 3.508771929824561e-05, |
|
"loss": 0.1179, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 3.518740031897927e-05, |
|
"loss": 0.1061, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 3.5287081339712915e-05, |
|
"loss": 0.0765, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 3.538676236044657e-05, |
|
"loss": 0.0466, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 3.5486443381180226e-05, |
|
"loss": 0.113, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 3.558612440191388e-05, |
|
"loss": 0.1163, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 3.568580542264753e-05, |
|
"loss": 0.0957, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"eval_accuracy": 0.96559849198869, |
|
"eval_loss": 0.13746494054794312, |
|
"eval_runtime": 70.9091, |
|
"eval_samples_per_second": 29.926, |
|
"eval_steps_per_second": 1.876, |
|
"step": 1791 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 3.5785486443381185e-05, |
|
"loss": 0.1195, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 3.5885167464114834e-05, |
|
"loss": 0.1754, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 9.07, |
|
"learning_rate": 3.598484848484849e-05, |
|
"loss": 0.1201, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 3.608452950558214e-05, |
|
"loss": 0.1016, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 3.618421052631579e-05, |
|
"loss": 0.2312, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 9.15, |
|
"learning_rate": 3.628389154704944e-05, |
|
"loss": 0.0625, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"learning_rate": 3.6383572567783097e-05, |
|
"loss": 0.0684, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"learning_rate": 3.6483253588516745e-05, |
|
"loss": 0.0445, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 9.22, |
|
"learning_rate": 3.65829346092504e-05, |
|
"loss": 0.0355, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 3.668261562998405e-05, |
|
"loss": 0.216, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 9.27, |
|
"learning_rate": 3.6782296650717704e-05, |
|
"loss": 0.1667, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 9.3, |
|
"learning_rate": 3.688197767145136e-05, |
|
"loss": 0.1845, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 3.6981658692185015e-05, |
|
"loss": 0.0561, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 9.35, |
|
"learning_rate": 3.7081339712918663e-05, |
|
"loss": 0.2596, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 3.718102073365232e-05, |
|
"loss": 0.3591, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"learning_rate": 3.728070175438597e-05, |
|
"loss": 0.1171, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 9.42, |
|
"learning_rate": 3.738038277511962e-05, |
|
"loss": 0.0377, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 9.45, |
|
"learning_rate": 3.748006379585327e-05, |
|
"loss": 0.1234, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 9.47, |
|
"learning_rate": 3.7579744816586926e-05, |
|
"loss": 0.1602, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 3.7679425837320575e-05, |
|
"loss": 0.0583, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 9.52, |
|
"learning_rate": 3.777910685805423e-05, |
|
"loss": 0.1777, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 3.787878787878788e-05, |
|
"loss": 0.1032, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 9.57, |
|
"learning_rate": 3.7978468899521534e-05, |
|
"loss": 0.1959, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 9.6, |
|
"learning_rate": 3.807814992025518e-05, |
|
"loss": 0.1256, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 3.817783094098884e-05, |
|
"loss": 0.1693, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 9.65, |
|
"learning_rate": 3.8277511961722486e-05, |
|
"loss": 0.1142, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 9.67, |
|
"learning_rate": 3.837719298245614e-05, |
|
"loss": 0.1055, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 9.7, |
|
"learning_rate": 3.84768740031898e-05, |
|
"loss": 0.0944, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 9.72, |
|
"learning_rate": 3.8576555023923446e-05, |
|
"loss": 0.081, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 3.86762360446571e-05, |
|
"loss": 0.1426, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 3.877591706539075e-05, |
|
"loss": 0.1158, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 9.8, |
|
"learning_rate": 3.8875598086124405e-05, |
|
"loss": 0.0763, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 3.897527910685805e-05, |
|
"loss": 0.0932, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 9.85, |
|
"learning_rate": 3.907496012759171e-05, |
|
"loss": 0.0565, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 9.87, |
|
"learning_rate": 3.917464114832536e-05, |
|
"loss": 0.0578, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 9.9, |
|
"learning_rate": 3.927432216905901e-05, |
|
"loss": 0.0449, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 9.92, |
|
"learning_rate": 3.937400318979266e-05, |
|
"loss": 0.0792, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 3.9473684210526316e-05, |
|
"loss": 0.0463, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 9.97, |
|
"learning_rate": 3.9573365231259965e-05, |
|
"loss": 0.2275, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 3.967304625199362e-05, |
|
"loss": 0.1481, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"eval_accuracy": 0.9872761545711592, |
|
"eval_loss": 0.051094409078359604, |
|
"eval_runtime": 70.5765, |
|
"eval_samples_per_second": 30.067, |
|
"eval_steps_per_second": 1.884, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 10.03, |
|
"learning_rate": 3.9772727272727275e-05, |
|
"loss": 0.0831, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 10.05, |
|
"learning_rate": 3.987240829346093e-05, |
|
"loss": 0.1587, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 10.08, |
|
"learning_rate": 3.997208931419458e-05, |
|
"loss": 0.0135, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 10.1, |
|
"learning_rate": 4.0071770334928235e-05, |
|
"loss": 0.1272, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 10.13, |
|
"learning_rate": 4.017145135566188e-05, |
|
"loss": 0.0556, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 10.15, |
|
"learning_rate": 4.027113237639554e-05, |
|
"loss": 0.1079, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 10.18, |
|
"learning_rate": 4.037081339712919e-05, |
|
"loss": 0.1884, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 10.2, |
|
"learning_rate": 4.047049441786284e-05, |
|
"loss": 0.0644, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 10.23, |
|
"learning_rate": 4.057017543859649e-05, |
|
"loss": 0.073, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 4.0669856459330146e-05, |
|
"loss": 0.0946, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 10.28, |
|
"learning_rate": 4.0769537480063795e-05, |
|
"loss": 0.0888, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 10.3, |
|
"learning_rate": 4.086921850079745e-05, |
|
"loss": 0.0212, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 10.33, |
|
"learning_rate": 4.09688995215311e-05, |
|
"loss": 0.0737, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 10.35, |
|
"learning_rate": 4.1068580542264754e-05, |
|
"loss": 0.0286, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 4.116826156299841e-05, |
|
"loss": 0.0866, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 10.4, |
|
"learning_rate": 4.1267942583732064e-05, |
|
"loss": 0.1419, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 10.43, |
|
"learning_rate": 4.136762360446571e-05, |
|
"loss": 0.1377, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 10.45, |
|
"learning_rate": 4.146730462519937e-05, |
|
"loss": 0.0808, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 10.48, |
|
"learning_rate": 4.156698564593302e-05, |
|
"loss": 0.0938, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 0.1383, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 10.53, |
|
"learning_rate": 4.176634768740032e-05, |
|
"loss": 0.0873, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 10.55, |
|
"learning_rate": 4.1866028708133976e-05, |
|
"loss": 0.0451, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 10.58, |
|
"learning_rate": 4.1965709728867624e-05, |
|
"loss": 0.0879, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 10.6, |
|
"learning_rate": 4.206539074960128e-05, |
|
"loss": 0.0927, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 10.63, |
|
"learning_rate": 4.216507177033493e-05, |
|
"loss": 0.0236, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 10.65, |
|
"learning_rate": 4.2264752791068584e-05, |
|
"loss": 0.0729, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 10.68, |
|
"learning_rate": 4.236443381180223e-05, |
|
"loss": 0.1569, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 10.7, |
|
"learning_rate": 4.246411483253589e-05, |
|
"loss": 0.2289, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 10.73, |
|
"learning_rate": 4.2563795853269536e-05, |
|
"loss": 0.3105, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.266347687400319e-05, |
|
"loss": 0.1285, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 10.78, |
|
"learning_rate": 4.2763157894736847e-05, |
|
"loss": 0.1607, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 10.8, |
|
"learning_rate": 4.28628389154705e-05, |
|
"loss": 0.1358, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 10.83, |
|
"learning_rate": 4.296251993620415e-05, |
|
"loss": 0.1026, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 10.85, |
|
"learning_rate": 4.3062200956937806e-05, |
|
"loss": 0.0586, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.3161881977671454e-05, |
|
"loss": 0.0435, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 10.9, |
|
"learning_rate": 4.326156299840511e-05, |
|
"loss": 0.2147, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 10.93, |
|
"learning_rate": 4.336124401913876e-05, |
|
"loss": 0.162, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 10.95, |
|
"learning_rate": 4.346092503987241e-05, |
|
"loss": 0.2294, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 10.98, |
|
"learning_rate": 4.356060606060606e-05, |
|
"loss": 0.1536, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"eval_accuracy": 0.971253534401508, |
|
"eval_loss": 0.08268017321825027, |
|
"eval_runtime": 70.5616, |
|
"eval_samples_per_second": 30.073, |
|
"eval_steps_per_second": 1.885, |
|
"step": 2189 |
|
}, |
|
{ |
|
"epoch": 11.01, |
|
"learning_rate": 4.366028708133971e-05, |
|
"loss": 0.131, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 11.03, |
|
"learning_rate": 4.3759968102073366e-05, |
|
"loss": 0.0911, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 11.06, |
|
"learning_rate": 4.3859649122807014e-05, |
|
"loss": 0.0458, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 11.08, |
|
"learning_rate": 4.395933014354067e-05, |
|
"loss": 0.1948, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 11.11, |
|
"learning_rate": 4.4059011164274325e-05, |
|
"loss": 0.0882, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 11.13, |
|
"learning_rate": 4.415869218500798e-05, |
|
"loss": 0.0359, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 11.16, |
|
"learning_rate": 4.425837320574163e-05, |
|
"loss": 0.0299, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 11.18, |
|
"learning_rate": 4.4358054226475284e-05, |
|
"loss": 0.1302, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 11.21, |
|
"learning_rate": 4.445773524720893e-05, |
|
"loss": 0.0896, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 11.23, |
|
"learning_rate": 4.455741626794259e-05, |
|
"loss": 0.1446, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 11.26, |
|
"learning_rate": 4.4657097288676236e-05, |
|
"loss": 0.0728, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 11.28, |
|
"learning_rate": 4.475677830940989e-05, |
|
"loss": 0.0577, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 11.31, |
|
"learning_rate": 4.485645933014354e-05, |
|
"loss": 0.0469, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 11.33, |
|
"learning_rate": 4.4956140350877196e-05, |
|
"loss": 0.1188, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 11.36, |
|
"learning_rate": 4.5055821371610844e-05, |
|
"loss": 0.0319, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.51555023923445e-05, |
|
"loss": 0.0795, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 11.41, |
|
"learning_rate": 4.525518341307815e-05, |
|
"loss": 0.0812, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 11.43, |
|
"learning_rate": 4.53548644338118e-05, |
|
"loss": 0.0215, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 11.46, |
|
"learning_rate": 4.545454545454546e-05, |
|
"loss": 0.0851, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 11.48, |
|
"learning_rate": 4.555422647527911e-05, |
|
"loss": 0.0818, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 11.51, |
|
"learning_rate": 4.565390749601276e-05, |
|
"loss": 0.0753, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 11.53, |
|
"learning_rate": 4.575358851674642e-05, |
|
"loss": 0.0488, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 11.56, |
|
"learning_rate": 4.5853269537480066e-05, |
|
"loss": 0.1423, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 11.58, |
|
"learning_rate": 4.595295055821372e-05, |
|
"loss": 0.0852, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 11.61, |
|
"learning_rate": 4.605263157894737e-05, |
|
"loss": 0.1462, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 11.63, |
|
"learning_rate": 4.6152312599681025e-05, |
|
"loss": 0.1058, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 11.66, |
|
"learning_rate": 4.6251993620414674e-05, |
|
"loss": 0.0474, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 11.68, |
|
"learning_rate": 4.635167464114833e-05, |
|
"loss": 0.0254, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 11.71, |
|
"learning_rate": 4.645135566188198e-05, |
|
"loss": 0.2376, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 11.73, |
|
"learning_rate": 4.655103668261563e-05, |
|
"loss": 0.1412, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 11.76, |
|
"learning_rate": 4.665071770334928e-05, |
|
"loss": 0.1011, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 11.78, |
|
"learning_rate": 4.675039872408294e-05, |
|
"loss": 0.1552, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 11.81, |
|
"learning_rate": 4.6850079744816585e-05, |
|
"loss": 0.0418, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 11.83, |
|
"learning_rate": 4.694976076555024e-05, |
|
"loss": 0.1005, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 11.86, |
|
"learning_rate": 4.7049441786283896e-05, |
|
"loss": 0.274, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 4.714912280701755e-05, |
|
"loss": 0.137, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 11.91, |
|
"learning_rate": 4.72488038277512e-05, |
|
"loss": 0.122, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 11.93, |
|
"learning_rate": 4.7348484848484855e-05, |
|
"loss": 0.0886, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 11.96, |
|
"learning_rate": 4.7448165869218504e-05, |
|
"loss": 0.1217, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 11.98, |
|
"learning_rate": 4.754784688995216e-05, |
|
"loss": 0.0458, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"eval_accuracy": 0.9882186616399623, |
|
"eval_loss": 0.03981461003422737, |
|
"eval_runtime": 70.8311, |
|
"eval_samples_per_second": 29.959, |
|
"eval_steps_per_second": 1.878, |
|
"step": 2388 |
|
}, |
|
{ |
|
"epoch": 12.01, |
|
"learning_rate": 4.764752791068581e-05, |
|
"loss": 0.0762, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 12.04, |
|
"learning_rate": 4.774720893141946e-05, |
|
"loss": 0.1132, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 12.06, |
|
"learning_rate": 4.784688995215311e-05, |
|
"loss": 0.1384, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 12.09, |
|
"learning_rate": 4.794657097288677e-05, |
|
"loss": 0.1139, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 12.11, |
|
"learning_rate": 4.8046251993620415e-05, |
|
"loss": 0.0659, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 12.14, |
|
"learning_rate": 4.8145933014354064e-05, |
|
"loss": 0.175, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 12.16, |
|
"learning_rate": 4.824561403508772e-05, |
|
"loss": 0.14, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 12.19, |
|
"learning_rate": 4.8345295055821374e-05, |
|
"loss": 0.0635, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 12.21, |
|
"learning_rate": 4.844497607655503e-05, |
|
"loss": 0.101, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 12.24, |
|
"learning_rate": 4.854465709728868e-05, |
|
"loss": 0.1797, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 12.26, |
|
"learning_rate": 4.8644338118022334e-05, |
|
"loss": 0.1087, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 12.29, |
|
"learning_rate": 4.874401913875598e-05, |
|
"loss": 0.0672, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 12.31, |
|
"learning_rate": 4.884370015948964e-05, |
|
"loss": 0.0629, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 12.34, |
|
"learning_rate": 4.8943381180223286e-05, |
|
"loss": 0.0354, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 12.36, |
|
"learning_rate": 4.904306220095694e-05, |
|
"loss": 0.0454, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 12.39, |
|
"learning_rate": 4.914274322169059e-05, |
|
"loss": 0.0085, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 12.41, |
|
"learning_rate": 4.9242424242424245e-05, |
|
"loss": 0.0789, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 12.44, |
|
"learning_rate": 4.9342105263157894e-05, |
|
"loss": 0.0784, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 12.46, |
|
"learning_rate": 4.944178628389155e-05, |
|
"loss": 0.1032, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 12.49, |
|
"learning_rate": 4.95414673046252e-05, |
|
"loss": 0.2055, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 12.51, |
|
"learning_rate": 4.964114832535885e-05, |
|
"loss": 0.1465, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 12.54, |
|
"learning_rate": 4.97408293460925e-05, |
|
"loss": 0.1464, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 12.56, |
|
"learning_rate": 4.984051036682616e-05, |
|
"loss": 0.0842, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 12.59, |
|
"learning_rate": 4.994019138755981e-05, |
|
"loss": 0.0296, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 12.61, |
|
"learning_rate": 4.964028776978418e-05, |
|
"loss": 0.0736, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 12.64, |
|
"learning_rate": 4.874100719424461e-05, |
|
"loss": 0.2944, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 12.66, |
|
"learning_rate": 4.784172661870504e-05, |
|
"loss": 0.6141, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 12.69, |
|
"learning_rate": 4.6942446043165475e-05, |
|
"loss": 0.7013, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 12.71, |
|
"learning_rate": 4.60431654676259e-05, |
|
"loss": 0.5952, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 12.74, |
|
"learning_rate": 4.5143884892086334e-05, |
|
"loss": 0.6078, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 12.76, |
|
"learning_rate": 4.424460431654677e-05, |
|
"loss": 0.6268, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 12.79, |
|
"learning_rate": 4.33453237410072e-05, |
|
"loss": 0.6536, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 12.81, |
|
"learning_rate": 4.244604316546763e-05, |
|
"loss": 0.5829, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 12.84, |
|
"learning_rate": 4.154676258992806e-05, |
|
"loss": 0.4209, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 12.86, |
|
"learning_rate": 4.064748201438849e-05, |
|
"loss": 0.448, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 12.89, |
|
"learning_rate": 3.9748201438848925e-05, |
|
"loss": 0.5086, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 12.91, |
|
"learning_rate": 3.884892086330936e-05, |
|
"loss": 0.5025, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 12.94, |
|
"learning_rate": 3.794964028776979e-05, |
|
"loss": 0.484, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 12.96, |
|
"learning_rate": 3.7050359712230217e-05, |
|
"loss": 0.3999, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 12.99, |
|
"learning_rate": 3.615107913669065e-05, |
|
"loss": 0.4956, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"eval_accuracy": 0.8642789820923656, |
|
"eval_loss": 0.3473888337612152, |
|
"eval_runtime": 70.415, |
|
"eval_samples_per_second": 30.136, |
|
"eval_steps_per_second": 1.889, |
|
"step": 2587 |
|
}, |
|
{ |
|
"epoch": 13.02, |
|
"learning_rate": 3.5251798561151075e-05, |
|
"loss": 0.5233, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 13.04, |
|
"learning_rate": 3.435251798561151e-05, |
|
"loss": 0.3388, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 13.07, |
|
"learning_rate": 3.345323741007194e-05, |
|
"loss": 0.4695, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 13.09, |
|
"learning_rate": 3.2553956834532374e-05, |
|
"loss": 0.4958, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 3.165467625899281e-05, |
|
"loss": 0.3863, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 13.14, |
|
"learning_rate": 3.075539568345324e-05, |
|
"loss": 0.2537, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 13.17, |
|
"learning_rate": 2.985611510791367e-05, |
|
"loss": 0.3639, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 13.19, |
|
"learning_rate": 2.89568345323741e-05, |
|
"loss": 0.2409, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 13.22, |
|
"learning_rate": 2.805755395683453e-05, |
|
"loss": 0.2559, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 13.24, |
|
"learning_rate": 2.7158273381294964e-05, |
|
"loss": 0.2671, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 13.27, |
|
"learning_rate": 2.6258992805755394e-05, |
|
"loss": 0.3325, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 13.29, |
|
"learning_rate": 2.5359712230215827e-05, |
|
"loss": 0.158, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 13.32, |
|
"learning_rate": 2.446043165467626e-05, |
|
"loss": 0.2713, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 13.34, |
|
"learning_rate": 2.3561151079136692e-05, |
|
"loss": 0.1543, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 13.37, |
|
"learning_rate": 2.2661870503597125e-05, |
|
"loss": 0.0994, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 13.39, |
|
"learning_rate": 2.1762589928057555e-05, |
|
"loss": 0.1104, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 13.42, |
|
"learning_rate": 2.0863309352517988e-05, |
|
"loss": 0.1673, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 13.44, |
|
"learning_rate": 1.996402877697842e-05, |
|
"loss": 0.1764, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 13.47, |
|
"learning_rate": 1.906474820143885e-05, |
|
"loss": 0.2138, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 13.49, |
|
"learning_rate": 1.8165467625899283e-05, |
|
"loss": 0.1848, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 13.52, |
|
"learning_rate": 1.7266187050359716e-05, |
|
"loss": 0.152, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 13.54, |
|
"learning_rate": 1.6366906474820145e-05, |
|
"loss": 0.261, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 13.57, |
|
"learning_rate": 1.5467625899280578e-05, |
|
"loss": 0.1587, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 13.59, |
|
"learning_rate": 1.4568345323741009e-05, |
|
"loss": 0.1373, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 1.366906474820144e-05, |
|
"loss": 0.1088, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 13.64, |
|
"learning_rate": 1.2769784172661871e-05, |
|
"loss": 0.0728, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 13.67, |
|
"learning_rate": 1.1870503597122303e-05, |
|
"loss": 0.0996, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 13.69, |
|
"learning_rate": 1.0971223021582734e-05, |
|
"loss": 0.1201, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 13.72, |
|
"learning_rate": 1.0071942446043167e-05, |
|
"loss": 0.1087, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 13.74, |
|
"learning_rate": 9.172661870503598e-06, |
|
"loss": 0.1906, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 13.77, |
|
"learning_rate": 8.273381294964029e-06, |
|
"loss": 0.1458, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 13.79, |
|
"learning_rate": 7.374100719424461e-06, |
|
"loss": 0.2096, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 13.82, |
|
"learning_rate": 6.474820143884892e-06, |
|
"loss": 0.2497, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 13.84, |
|
"learning_rate": 5.575539568345324e-06, |
|
"loss": 0.167, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 13.87, |
|
"learning_rate": 4.676258992805756e-06, |
|
"loss": 0.0807, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 13.89, |
|
"learning_rate": 3.7769784172661873e-06, |
|
"loss": 0.0646, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 13.92, |
|
"learning_rate": 2.877697841726619e-06, |
|
"loss": 0.1084, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 13.94, |
|
"learning_rate": 1.9784172661870504e-06, |
|
"loss": 0.0622, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 13.97, |
|
"learning_rate": 1.0791366906474822e-06, |
|
"loss": 0.066, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 13.99, |
|
"learning_rate": 1.7985611510791368e-07, |
|
"loss": 0.0801, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"eval_accuracy": 0.9797360980207351, |
|
"eval_loss": 0.0849713608622551, |
|
"eval_runtime": 70.6282, |
|
"eval_samples_per_second": 30.045, |
|
"eval_steps_per_second": 1.883, |
|
"step": 2786 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"step": 2786, |
|
"total_flos": 1.58314339385421e+19, |
|
"train_loss": 0.05687390304887928, |
|
"train_runtime": 3560.0954, |
|
"train_samples_per_second": 25.026, |
|
"train_steps_per_second": 0.783 |
|
} |
|
], |
|
"max_steps": 2786, |
|
"num_train_epochs": 14, |
|
"total_flos": 1.58314339385421e+19, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|