|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 4992, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.004006410256410256, |
|
"grad_norm": 1.243200421333313, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5025, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.008012820512820512, |
|
"grad_norm": 0.40575382113456726, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.3957, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01201923076923077, |
|
"grad_norm": 0.2619800269603729, |
|
"learning_rate": 2e-05, |
|
"loss": 0.3277, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.016025641025641024, |
|
"grad_norm": 0.2629871964454651, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 0.319, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.020032051282051284, |
|
"grad_norm": 0.2376326471567154, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.2829, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02403846153846154, |
|
"grad_norm": 0.25014540553092957, |
|
"learning_rate": 4e-05, |
|
"loss": 0.2777, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.028044871794871796, |
|
"grad_norm": 0.30097439885139465, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.2688, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03205128205128205, |
|
"grad_norm": 0.29906877875328064, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.272, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.036057692307692304, |
|
"grad_norm": 0.31522494554519653, |
|
"learning_rate": 6e-05, |
|
"loss": 0.2573, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04006410256410257, |
|
"grad_norm": 0.31278184056282043, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.2548, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.044070512820512824, |
|
"grad_norm": 0.31608548760414124, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.2496, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.04807692307692308, |
|
"grad_norm": 0.29328644275665283, |
|
"learning_rate": 8e-05, |
|
"loss": 0.2475, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.052083333333333336, |
|
"grad_norm": 0.33972907066345215, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 0.2512, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05608974358974359, |
|
"grad_norm": 0.32710641622543335, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.2382, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06009615384615385, |
|
"grad_norm": 0.2911882996559143, |
|
"learning_rate": 0.0001, |
|
"loss": 0.2373, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0641025641025641, |
|
"grad_norm": 0.29396378993988037, |
|
"learning_rate": 9.999894758104792e-05, |
|
"loss": 0.2433, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.06810897435897435, |
|
"grad_norm": 0.33355265855789185, |
|
"learning_rate": 9.999579036849509e-05, |
|
"loss": 0.2435, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07211538461538461, |
|
"grad_norm": 0.29101336002349854, |
|
"learning_rate": 9.999052849524992e-05, |
|
"loss": 0.2376, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07612179487179487, |
|
"grad_norm": 0.27655455470085144, |
|
"learning_rate": 9.998316218282024e-05, |
|
"loss": 0.2365, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.08012820512820513, |
|
"grad_norm": 0.2681509554386139, |
|
"learning_rate": 9.997369174130391e-05, |
|
"loss": 0.2425, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08413461538461539, |
|
"grad_norm": 0.26265430450439453, |
|
"learning_rate": 9.996211756937579e-05, |
|
"loss": 0.2389, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.08814102564102565, |
|
"grad_norm": 0.2830798625946045, |
|
"learning_rate": 9.994844015427102e-05, |
|
"loss": 0.2304, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.0921474358974359, |
|
"grad_norm": 0.2548621892929077, |
|
"learning_rate": 9.993266007176445e-05, |
|
"loss": 0.2308, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.09615384615384616, |
|
"grad_norm": 0.27960866689682007, |
|
"learning_rate": 9.991477798614638e-05, |
|
"loss": 0.2277, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.10016025641025642, |
|
"grad_norm": 0.2278411090373993, |
|
"learning_rate": 9.989479465019462e-05, |
|
"loss": 0.2314, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.10416666666666667, |
|
"grad_norm": 0.2582249045372009, |
|
"learning_rate": 9.987271090514288e-05, |
|
"loss": 0.2355, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.10817307692307693, |
|
"grad_norm": 0.28811654448509216, |
|
"learning_rate": 9.984852768064516e-05, |
|
"loss": 0.2285, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.11217948717948718, |
|
"grad_norm": 0.241895392537117, |
|
"learning_rate": 9.982224599473689e-05, |
|
"loss": 0.2344, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.11618589743589744, |
|
"grad_norm": 0.2796310484409332, |
|
"learning_rate": 9.97938669537918e-05, |
|
"loss": 0.2246, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.1201923076923077, |
|
"grad_norm": 0.24658632278442383, |
|
"learning_rate": 9.976339175247551e-05, |
|
"loss": 0.2187, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.12419871794871795, |
|
"grad_norm": 0.23214490711688995, |
|
"learning_rate": 9.973082167369521e-05, |
|
"loss": 0.225, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1282051282051282, |
|
"grad_norm": 0.25273966789245605, |
|
"learning_rate": 9.969615808854562e-05, |
|
"loss": 0.2291, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.13221153846153846, |
|
"grad_norm": 0.25187525153160095, |
|
"learning_rate": 9.965940245625131e-05, |
|
"loss": 0.2227, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.1362179487179487, |
|
"grad_norm": 0.296224445104599, |
|
"learning_rate": 9.962055632410523e-05, |
|
"loss": 0.2185, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.14022435897435898, |
|
"grad_norm": 0.2560974955558777, |
|
"learning_rate": 9.95796213274036e-05, |
|
"loss": 0.2251, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.14423076923076922, |
|
"grad_norm": 0.24337032437324524, |
|
"learning_rate": 9.95365991893771e-05, |
|
"loss": 0.2212, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.1482371794871795, |
|
"grad_norm": 0.2607037127017975, |
|
"learning_rate": 9.949149172111825e-05, |
|
"loss": 0.2229, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.15224358974358973, |
|
"grad_norm": 0.2829968333244324, |
|
"learning_rate": 9.944430082150522e-05, |
|
"loss": 0.2276, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.15625, |
|
"grad_norm": 0.24074934422969818, |
|
"learning_rate": 9.93950284771219e-05, |
|
"loss": 0.2241, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.16025641025641027, |
|
"grad_norm": 0.27647778391838074, |
|
"learning_rate": 9.934367676217427e-05, |
|
"loss": 0.2071, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.1642628205128205, |
|
"grad_norm": 0.2719784677028656, |
|
"learning_rate": 9.929024783840302e-05, |
|
"loss": 0.2217, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.16826923076923078, |
|
"grad_norm": 0.23359382152557373, |
|
"learning_rate": 9.923474395499265e-05, |
|
"loss": 0.2222, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.17227564102564102, |
|
"grad_norm": 0.27376872301101685, |
|
"learning_rate": 9.917716744847673e-05, |
|
"loss": 0.222, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.1762820512820513, |
|
"grad_norm": 0.2602153718471527, |
|
"learning_rate": 9.911752074263949e-05, |
|
"loss": 0.2232, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.18028846153846154, |
|
"grad_norm": 0.29065001010894775, |
|
"learning_rate": 9.905580634841388e-05, |
|
"loss": 0.2212, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.1842948717948718, |
|
"grad_norm": 0.24715092778205872, |
|
"learning_rate": 9.899202686377584e-05, |
|
"loss": 0.2246, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.18830128205128205, |
|
"grad_norm": 0.2442002296447754, |
|
"learning_rate": 9.89261849736349e-05, |
|
"loss": 0.2171, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.19230769230769232, |
|
"grad_norm": 0.22722263634204865, |
|
"learning_rate": 9.88582834497212e-05, |
|
"loss": 0.2176, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.19631410256410256, |
|
"grad_norm": 0.22697150707244873, |
|
"learning_rate": 9.878832515046871e-05, |
|
"loss": 0.2179, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.20032051282051283, |
|
"grad_norm": 0.23478808999061584, |
|
"learning_rate": 9.87163130208951e-05, |
|
"loss": 0.2114, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.20432692307692307, |
|
"grad_norm": 0.26591965556144714, |
|
"learning_rate": 9.864225009247751e-05, |
|
"loss": 0.2165, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.20833333333333334, |
|
"grad_norm": 0.23949167132377625, |
|
"learning_rate": 9.856613948302518e-05, |
|
"loss": 0.2118, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.21233974358974358, |
|
"grad_norm": 0.22695676982402802, |
|
"learning_rate": 9.848798439654797e-05, |
|
"loss": 0.2256, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.21634615384615385, |
|
"grad_norm": 0.2355981171131134, |
|
"learning_rate": 9.840778812312165e-05, |
|
"loss": 0.2123, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.2203525641025641, |
|
"grad_norm": 0.2660483419895172, |
|
"learning_rate": 9.832555403874937e-05, |
|
"loss": 0.2207, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.22435897435897437, |
|
"grad_norm": 0.2517046630382538, |
|
"learning_rate": 9.824128560521948e-05, |
|
"loss": 0.2144, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.2283653846153846, |
|
"grad_norm": 0.23771142959594727, |
|
"learning_rate": 9.815498636995983e-05, |
|
"loss": 0.2061, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.23237179487179488, |
|
"grad_norm": 0.23048646748065948, |
|
"learning_rate": 9.806665996588844e-05, |
|
"loss": 0.2151, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.23637820512820512, |
|
"grad_norm": 0.26294463872909546, |
|
"learning_rate": 9.79763101112606e-05, |
|
"loss": 0.2129, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.2403846153846154, |
|
"grad_norm": 0.2391149401664734, |
|
"learning_rate": 9.788394060951229e-05, |
|
"loss": 0.2134, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.24439102564102563, |
|
"grad_norm": 0.21405121684074402, |
|
"learning_rate": 9.778955534910003e-05, |
|
"loss": 0.2204, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.2483974358974359, |
|
"grad_norm": 0.21098735928535461, |
|
"learning_rate": 9.769315830333735e-05, |
|
"loss": 0.212, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.25240384615384615, |
|
"grad_norm": 0.24548964202404022, |
|
"learning_rate": 9.759475353022734e-05, |
|
"loss": 0.2091, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2564102564102564, |
|
"grad_norm": 0.23211954534053802, |
|
"learning_rate": 9.749434517229191e-05, |
|
"loss": 0.2074, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.2604166666666667, |
|
"grad_norm": 0.24367853999137878, |
|
"learning_rate": 9.739193745639745e-05, |
|
"loss": 0.2, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.2644230769230769, |
|
"grad_norm": 0.22633159160614014, |
|
"learning_rate": 9.728753469357677e-05, |
|
"loss": 0.2144, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.26842948717948717, |
|
"grad_norm": 0.23814593255519867, |
|
"learning_rate": 9.718114127884774e-05, |
|
"loss": 0.2105, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.2724358974358974, |
|
"grad_norm": 0.23060369491577148, |
|
"learning_rate": 9.70727616910282e-05, |
|
"loss": 0.2095, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.2764423076923077, |
|
"grad_norm": 0.24806129932403564, |
|
"learning_rate": 9.696240049254743e-05, |
|
"loss": 0.2023, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.28044871794871795, |
|
"grad_norm": 0.2317938357591629, |
|
"learning_rate": 9.685006232925413e-05, |
|
"loss": 0.2069, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.2844551282051282, |
|
"grad_norm": 0.2503266930580139, |
|
"learning_rate": 9.673575193022073e-05, |
|
"loss": 0.2131, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.28846153846153844, |
|
"grad_norm": 0.24147148430347443, |
|
"learning_rate": 9.661947410754449e-05, |
|
"loss": 0.2024, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.29246794871794873, |
|
"grad_norm": 0.2683294117450714, |
|
"learning_rate": 9.650123375614477e-05, |
|
"loss": 0.2088, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.296474358974359, |
|
"grad_norm": 0.23034317791461945, |
|
"learning_rate": 9.638103585355705e-05, |
|
"loss": 0.2103, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.3004807692307692, |
|
"grad_norm": 0.23943544924259186, |
|
"learning_rate": 9.625888545972332e-05, |
|
"loss": 0.211, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.30448717948717946, |
|
"grad_norm": 0.24953238666057587, |
|
"learning_rate": 9.613478771677921e-05, |
|
"loss": 0.217, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.30849358974358976, |
|
"grad_norm": 0.25620102882385254, |
|
"learning_rate": 9.600874784883736e-05, |
|
"loss": 0.208, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.3125, |
|
"grad_norm": 0.22348496317863464, |
|
"learning_rate": 9.588077116176756e-05, |
|
"loss": 0.2048, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.31650641025641024, |
|
"grad_norm": 0.21532465517520905, |
|
"learning_rate": 9.575086304297351e-05, |
|
"loss": 0.1997, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.32051282051282054, |
|
"grad_norm": 0.23346661031246185, |
|
"learning_rate": 9.561902896116583e-05, |
|
"loss": 0.2085, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.3245192307692308, |
|
"grad_norm": 0.23158910870552063, |
|
"learning_rate": 9.548527446613198e-05, |
|
"loss": 0.2005, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.328525641025641, |
|
"grad_norm": 0.2302473783493042, |
|
"learning_rate": 9.534960518850257e-05, |
|
"loss": 0.1993, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.33253205128205127, |
|
"grad_norm": 0.21067394316196442, |
|
"learning_rate": 9.521202683951436e-05, |
|
"loss": 0.1902, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.33653846153846156, |
|
"grad_norm": 0.22453920543193817, |
|
"learning_rate": 9.507254521076982e-05, |
|
"loss": 0.1907, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.3405448717948718, |
|
"grad_norm": 0.23678337037563324, |
|
"learning_rate": 9.493116617399334e-05, |
|
"loss": 0.2007, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.34455128205128205, |
|
"grad_norm": 0.2377959042787552, |
|
"learning_rate": 9.478789568078403e-05, |
|
"loss": 0.2077, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.3485576923076923, |
|
"grad_norm": 0.23520340025424957, |
|
"learning_rate": 9.464273976236517e-05, |
|
"loss": 0.2079, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.3525641025641026, |
|
"grad_norm": 0.21978124976158142, |
|
"learning_rate": 9.449570452933038e-05, |
|
"loss": 0.2076, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.35657051282051283, |
|
"grad_norm": 0.21525253355503082, |
|
"learning_rate": 9.434679617138625e-05, |
|
"loss": 0.207, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.3605769230769231, |
|
"grad_norm": 0.23039253056049347, |
|
"learning_rate": 9.419602095709193e-05, |
|
"loss": 0.1985, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.3645833333333333, |
|
"grad_norm": 0.2329927235841751, |
|
"learning_rate": 9.40433852335951e-05, |
|
"loss": 0.2136, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.3685897435897436, |
|
"grad_norm": 0.24603788554668427, |
|
"learning_rate": 9.388889542636495e-05, |
|
"loss": 0.2106, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.37259615384615385, |
|
"grad_norm": 0.2563282549381256, |
|
"learning_rate": 9.373255803892149e-05, |
|
"loss": 0.2052, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3766025641025641, |
|
"grad_norm": 0.2391922026872635, |
|
"learning_rate": 9.357437965256188e-05, |
|
"loss": 0.1965, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.38060897435897434, |
|
"grad_norm": 0.22608546912670135, |
|
"learning_rate": 9.341436692608341e-05, |
|
"loss": 0.1995, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.38461538461538464, |
|
"grad_norm": 0.2254156768321991, |
|
"learning_rate": 9.325252659550309e-05, |
|
"loss": 0.2023, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3886217948717949, |
|
"grad_norm": 0.2172781527042389, |
|
"learning_rate": 9.30888654737742e-05, |
|
"loss": 0.1935, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.3926282051282051, |
|
"grad_norm": 0.24400238692760468, |
|
"learning_rate": 9.292339045049938e-05, |
|
"loss": 0.2046, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.39663461538461536, |
|
"grad_norm": 0.23503102362155914, |
|
"learning_rate": 9.275610849164064e-05, |
|
"loss": 0.1896, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.40064102564102566, |
|
"grad_norm": 0.23404181003570557, |
|
"learning_rate": 9.258702663922614e-05, |
|
"loss": 0.2034, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.4046474358974359, |
|
"grad_norm": 0.24212764203548431, |
|
"learning_rate": 9.241615201105372e-05, |
|
"loss": 0.2027, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.40865384615384615, |
|
"grad_norm": 0.2275129109621048, |
|
"learning_rate": 9.224349180039124e-05, |
|
"loss": 0.195, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.4126602564102564, |
|
"grad_norm": 0.2557786703109741, |
|
"learning_rate": 9.206905327567386e-05, |
|
"loss": 0.2029, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.4166666666666667, |
|
"grad_norm": 0.23480971157550812, |
|
"learning_rate": 9.189284378019794e-05, |
|
"loss": 0.1896, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.4206730769230769, |
|
"grad_norm": 0.20550891757011414, |
|
"learning_rate": 9.171487073181198e-05, |
|
"loss": 0.2012, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.42467948717948717, |
|
"grad_norm": 0.23816007375717163, |
|
"learning_rate": 9.153514162260432e-05, |
|
"loss": 0.2038, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.4286858974358974, |
|
"grad_norm": 0.22253037989139557, |
|
"learning_rate": 9.135366401858783e-05, |
|
"loss": 0.2038, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.4326923076923077, |
|
"grad_norm": 0.2550757825374603, |
|
"learning_rate": 9.117044555938128e-05, |
|
"loss": 0.192, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.43669871794871795, |
|
"grad_norm": 0.24329575896263123, |
|
"learning_rate": 9.098549395788784e-05, |
|
"loss": 0.2033, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.4407051282051282, |
|
"grad_norm": 0.23914198577404022, |
|
"learning_rate": 9.079881699997033e-05, |
|
"loss": 0.203, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.44471153846153844, |
|
"grad_norm": 0.22172652184963226, |
|
"learning_rate": 9.061042254412346e-05, |
|
"loss": 0.1974, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.44871794871794873, |
|
"grad_norm": 0.24050387740135193, |
|
"learning_rate": 9.042031852114311e-05, |
|
"loss": 0.2068, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.452724358974359, |
|
"grad_norm": 0.25631067156791687, |
|
"learning_rate": 9.022851293379231e-05, |
|
"loss": 0.1932, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.4567307692307692, |
|
"grad_norm": 0.24415907263755798, |
|
"learning_rate": 9.003501385646449e-05, |
|
"loss": 0.195, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.46073717948717946, |
|
"grad_norm": 0.2140500694513321, |
|
"learning_rate": 8.983982943484349e-05, |
|
"loss": 0.197, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.46474358974358976, |
|
"grad_norm": 0.2267265021800995, |
|
"learning_rate": 8.964296788556067e-05, |
|
"loss": 0.1956, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.46875, |
|
"grad_norm": 0.2181784212589264, |
|
"learning_rate": 8.944443749584907e-05, |
|
"loss": 0.2025, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.47275641025641024, |
|
"grad_norm": 0.22020593285560608, |
|
"learning_rate": 8.924424662319448e-05, |
|
"loss": 0.1965, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.47676282051282054, |
|
"grad_norm": 0.21737538278102875, |
|
"learning_rate": 8.904240369498362e-05, |
|
"loss": 0.1989, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.4807692307692308, |
|
"grad_norm": 0.21464964747428894, |
|
"learning_rate": 8.883891720814942e-05, |
|
"loss": 0.1845, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.484775641025641, |
|
"grad_norm": 0.26550692319869995, |
|
"learning_rate": 8.863379572881329e-05, |
|
"loss": 0.1888, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.48878205128205127, |
|
"grad_norm": 0.24705030024051666, |
|
"learning_rate": 8.84270478919245e-05, |
|
"loss": 0.1912, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.49278846153846156, |
|
"grad_norm": 0.22611679136753082, |
|
"learning_rate": 8.821868240089676e-05, |
|
"loss": 0.1998, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.4967948717948718, |
|
"grad_norm": 0.21941202878952026, |
|
"learning_rate": 8.800870802724175e-05, |
|
"loss": 0.1981, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.500801282051282, |
|
"grad_norm": 0.22026574611663818, |
|
"learning_rate": 8.779713361019982e-05, |
|
"loss": 0.1915, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.5048076923076923, |
|
"grad_norm": 0.2143823504447937, |
|
"learning_rate": 8.758396805636806e-05, |
|
"loss": 0.1952, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.5088141025641025, |
|
"grad_norm": 0.22727568447589874, |
|
"learning_rate": 8.736922033932522e-05, |
|
"loss": 0.1942, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.5128205128205128, |
|
"grad_norm": 0.226875901222229, |
|
"learning_rate": 8.715289949925401e-05, |
|
"loss": 0.1945, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.5168269230769231, |
|
"grad_norm": 0.1967984139919281, |
|
"learning_rate": 8.693501464256046e-05, |
|
"loss": 0.1888, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.5208333333333334, |
|
"grad_norm": 0.2363027185201645, |
|
"learning_rate": 8.67155749414907e-05, |
|
"loss": 0.1925, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.5248397435897436, |
|
"grad_norm": 0.23106949031352997, |
|
"learning_rate": 8.649458963374473e-05, |
|
"loss": 0.1927, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.5288461538461539, |
|
"grad_norm": 0.20555250346660614, |
|
"learning_rate": 8.62720680220876e-05, |
|
"loss": 0.2296, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.5328525641025641, |
|
"grad_norm": 0.21878638863563538, |
|
"learning_rate": 8.604801947395777e-05, |
|
"loss": 0.1993, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.5368589743589743, |
|
"grad_norm": 0.22356002032756805, |
|
"learning_rate": 8.582245342107273e-05, |
|
"loss": 0.1911, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.5408653846153846, |
|
"grad_norm": 0.23506873846054077, |
|
"learning_rate": 8.55953793590321e-05, |
|
"loss": 0.1879, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.5448717948717948, |
|
"grad_norm": 0.23467378318309784, |
|
"learning_rate": 8.53668068469177e-05, |
|
"loss": 0.1949, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.5488782051282052, |
|
"grad_norm": 0.24210813641548157, |
|
"learning_rate": 8.513674550689128e-05, |
|
"loss": 0.1842, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.5528846153846154, |
|
"grad_norm": 0.2602980434894562, |
|
"learning_rate": 8.49052050237894e-05, |
|
"loss": 0.192, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.5568910256410257, |
|
"grad_norm": 0.23115062713623047, |
|
"learning_rate": 8.467219514471581e-05, |
|
"loss": 0.2008, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.5608974358974359, |
|
"grad_norm": 0.23633736371994019, |
|
"learning_rate": 8.443772567863097e-05, |
|
"loss": 0.1893, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.5649038461538461, |
|
"grad_norm": 0.21265539526939392, |
|
"learning_rate": 8.420180649593929e-05, |
|
"loss": 0.2016, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.5689102564102564, |
|
"grad_norm": 0.21362069249153137, |
|
"learning_rate": 8.396444752807357e-05, |
|
"loss": 0.1861, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.5729166666666666, |
|
"grad_norm": 0.2480146735906601, |
|
"learning_rate": 8.37256587670768e-05, |
|
"loss": 0.1948, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.5769230769230769, |
|
"grad_norm": 0.24672983586788177, |
|
"learning_rate": 8.348545026518171e-05, |
|
"loss": 0.193, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.5809294871794872, |
|
"grad_norm": 0.23038969933986664, |
|
"learning_rate": 8.32438321343875e-05, |
|
"loss": 0.1865, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.5849358974358975, |
|
"grad_norm": 0.23314690589904785, |
|
"learning_rate": 8.300081454603415e-05, |
|
"loss": 0.1863, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.5889423076923077, |
|
"grad_norm": 0.2449977695941925, |
|
"learning_rate": 8.27564077303743e-05, |
|
"loss": 0.199, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.592948717948718, |
|
"grad_norm": 0.22973774373531342, |
|
"learning_rate": 8.251062197614257e-05, |
|
"loss": 0.1818, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.5969551282051282, |
|
"grad_norm": 0.2545977830886841, |
|
"learning_rate": 8.226346763012234e-05, |
|
"loss": 0.1963, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.6009615384615384, |
|
"grad_norm": 0.23034323751926422, |
|
"learning_rate": 8.201495509671037e-05, |
|
"loss": 0.1791, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.6049679487179487, |
|
"grad_norm": 0.22426968812942505, |
|
"learning_rate": 8.176509483747862e-05, |
|
"loss": 0.1941, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.6089743589743589, |
|
"grad_norm": 0.2258072942495346, |
|
"learning_rate": 8.151389737073403e-05, |
|
"loss": 0.195, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.6129807692307693, |
|
"grad_norm": 0.22930273413658142, |
|
"learning_rate": 8.126137327107554e-05, |
|
"loss": 0.191, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.6169871794871795, |
|
"grad_norm": 0.239192396402359, |
|
"learning_rate": 8.100753316894913e-05, |
|
"loss": 0.2073, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.6209935897435898, |
|
"grad_norm": 0.21774733066558838, |
|
"learning_rate": 8.075238775020011e-05, |
|
"loss": 0.1875, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.625, |
|
"grad_norm": 0.21093542873859406, |
|
"learning_rate": 8.04959477556235e-05, |
|
"loss": 0.1942, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.6290064102564102, |
|
"grad_norm": 0.21208257973194122, |
|
"learning_rate": 8.023822398051168e-05, |
|
"loss": 0.1928, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.6330128205128205, |
|
"grad_norm": 0.20604068040847778, |
|
"learning_rate": 7.997922727420011e-05, |
|
"loss": 0.1891, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.6370192307692307, |
|
"grad_norm": 0.209623783826828, |
|
"learning_rate": 7.971896853961042e-05, |
|
"loss": 0.1962, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.6410256410256411, |
|
"grad_norm": 0.22957240045070648, |
|
"learning_rate": 7.945745873279164e-05, |
|
"loss": 0.1908, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.6450320512820513, |
|
"grad_norm": 0.2278134673833847, |
|
"learning_rate": 7.919470886245886e-05, |
|
"loss": 0.186, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.6490384615384616, |
|
"grad_norm": 0.2224593311548233, |
|
"learning_rate": 7.893072998952975e-05, |
|
"loss": 0.1837, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.6530448717948718, |
|
"grad_norm": 0.2841613292694092, |
|
"learning_rate": 7.866553322665911e-05, |
|
"loss": 0.1961, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.657051282051282, |
|
"grad_norm": 0.2239413857460022, |
|
"learning_rate": 7.839912973777088e-05, |
|
"loss": 0.1864, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.6610576923076923, |
|
"grad_norm": 0.24558769166469574, |
|
"learning_rate": 7.813153073758832e-05, |
|
"loss": 0.1902, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.6650641025641025, |
|
"grad_norm": 0.22548821568489075, |
|
"learning_rate": 7.786274749116178e-05, |
|
"loss": 0.1927, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.6690705128205128, |
|
"grad_norm": 0.24735094606876373, |
|
"learning_rate": 7.759279131339454e-05, |
|
"loss": 0.1822, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.6730769230769231, |
|
"grad_norm": 0.23317532241344452, |
|
"learning_rate": 7.732167356856655e-05, |
|
"loss": 0.1928, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.6770833333333334, |
|
"grad_norm": 0.23926283419132233, |
|
"learning_rate": 7.704940566985592e-05, |
|
"loss": 0.1875, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.6810897435897436, |
|
"grad_norm": 0.18621069192886353, |
|
"learning_rate": 7.677599907885851e-05, |
|
"loss": 0.1967, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.6850961538461539, |
|
"grad_norm": 0.2330184280872345, |
|
"learning_rate": 7.650146530510542e-05, |
|
"loss": 0.188, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.6891025641025641, |
|
"grad_norm": 0.2638700008392334, |
|
"learning_rate": 7.622581590557853e-05, |
|
"loss": 0.1815, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.6931089743589743, |
|
"grad_norm": 0.22845664620399475, |
|
"learning_rate": 7.594906248422392e-05, |
|
"loss": 0.1838, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.6971153846153846, |
|
"grad_norm": 0.20396199822425842, |
|
"learning_rate": 7.567121669146343e-05, |
|
"loss": 0.181, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.7011217948717948, |
|
"grad_norm": 0.24452732503414154, |
|
"learning_rate": 7.539229022370417e-05, |
|
"loss": 0.1869, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.7051282051282052, |
|
"grad_norm": 0.21035702526569366, |
|
"learning_rate": 7.511229482284619e-05, |
|
"loss": 0.1912, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.7091346153846154, |
|
"grad_norm": 0.25418221950531006, |
|
"learning_rate": 7.483124227578811e-05, |
|
"loss": 0.1842, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.7131410256410257, |
|
"grad_norm": 0.20252016186714172, |
|
"learning_rate": 7.454914441393104e-05, |
|
"loss": 0.1799, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.7171474358974359, |
|
"grad_norm": 0.22078120708465576, |
|
"learning_rate": 7.426601311268043e-05, |
|
"loss": 0.1864, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.7211538461538461, |
|
"grad_norm": 0.22004160284996033, |
|
"learning_rate": 7.398186029094617e-05, |
|
"loss": 0.1868, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.7251602564102564, |
|
"grad_norm": 0.23584477603435516, |
|
"learning_rate": 7.369669791064084e-05, |
|
"loss": 0.1972, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.7291666666666666, |
|
"grad_norm": 0.23261332511901855, |
|
"learning_rate": 7.341053797617621e-05, |
|
"loss": 0.188, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.7331730769230769, |
|
"grad_norm": 0.23437419533729553, |
|
"learning_rate": 7.312339253395778e-05, |
|
"loss": 0.1912, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.7371794871794872, |
|
"grad_norm": 0.22874341905117035, |
|
"learning_rate": 7.283527367187778e-05, |
|
"loss": 0.1826, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.7411858974358975, |
|
"grad_norm": 0.21897387504577637, |
|
"learning_rate": 7.254619351880625e-05, |
|
"loss": 0.1765, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.7451923076923077, |
|
"grad_norm": 0.24442484974861145, |
|
"learning_rate": 7.225616424408045e-05, |
|
"loss": 0.1903, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.749198717948718, |
|
"grad_norm": 0.22574163973331451, |
|
"learning_rate": 7.19651980569926e-05, |
|
"loss": 0.1861, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.7532051282051282, |
|
"grad_norm": 0.2647426724433899, |
|
"learning_rate": 7.167330720627591e-05, |
|
"loss": 0.1838, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.7572115384615384, |
|
"grad_norm": 0.19669461250305176, |
|
"learning_rate": 7.138050397958888e-05, |
|
"loss": 0.178, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.7612179487179487, |
|
"grad_norm": 0.22219333052635193, |
|
"learning_rate": 7.108680070299813e-05, |
|
"loss": 0.1813, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.7652243589743589, |
|
"grad_norm": 0.24694056808948517, |
|
"learning_rate": 7.079220974045941e-05, |
|
"loss": 0.186, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.7692307692307693, |
|
"grad_norm": 0.2198621928691864, |
|
"learning_rate": 7.049674349329724e-05, |
|
"loss": 0.1883, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.7732371794871795, |
|
"grad_norm": 0.22908103466033936, |
|
"learning_rate": 7.020041439968274e-05, |
|
"loss": 0.1925, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.7772435897435898, |
|
"grad_norm": 0.2108490765094757, |
|
"learning_rate": 6.990323493411006e-05, |
|
"loss": 0.1832, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.78125, |
|
"grad_norm": 0.23686829209327698, |
|
"learning_rate": 6.960521760687129e-05, |
|
"loss": 0.1814, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.7852564102564102, |
|
"grad_norm": 0.21755458414554596, |
|
"learning_rate": 6.930637496352975e-05, |
|
"loss": 0.1869, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.7892628205128205, |
|
"grad_norm": 0.19528847932815552, |
|
"learning_rate": 6.90067195843919e-05, |
|
"loss": 0.181, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.7932692307692307, |
|
"grad_norm": 0.2116439789533615, |
|
"learning_rate": 6.870626408397775e-05, |
|
"loss": 0.1878, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.7972756410256411, |
|
"grad_norm": 0.21799945831298828, |
|
"learning_rate": 6.840502111048981e-05, |
|
"loss": 0.1774, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.8012820512820513, |
|
"grad_norm": 0.21898455917835236, |
|
"learning_rate": 6.810300334528069e-05, |
|
"loss": 0.1873, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.8052884615384616, |
|
"grad_norm": 0.23046636581420898, |
|
"learning_rate": 6.780022350231912e-05, |
|
"loss": 0.1817, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.8092948717948718, |
|
"grad_norm": 0.2145206779241562, |
|
"learning_rate": 6.749669432765496e-05, |
|
"loss": 0.1818, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.813301282051282, |
|
"grad_norm": 0.21499067544937134, |
|
"learning_rate": 6.719242859888244e-05, |
|
"loss": 0.1819, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.8173076923076923, |
|
"grad_norm": 0.23782077431678772, |
|
"learning_rate": 6.688743912460229e-05, |
|
"loss": 0.1813, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.8213141025641025, |
|
"grad_norm": 0.2248157560825348, |
|
"learning_rate": 6.65817387438827e-05, |
|
"loss": 0.1879, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.8253205128205128, |
|
"grad_norm": 0.25905272364616394, |
|
"learning_rate": 6.627534032571859e-05, |
|
"loss": 0.1895, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.8293269230769231, |
|
"grad_norm": 0.21141692996025085, |
|
"learning_rate": 6.596825676849006e-05, |
|
"loss": 0.1812, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.8333333333333334, |
|
"grad_norm": 0.2148115485906601, |
|
"learning_rate": 6.566050099941934e-05, |
|
"loss": 0.1766, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.8373397435897436, |
|
"grad_norm": 0.24514557421207428, |
|
"learning_rate": 6.535208597402658e-05, |
|
"loss": 0.1805, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.8413461538461539, |
|
"grad_norm": 0.20855912566184998, |
|
"learning_rate": 6.50430246755845e-05, |
|
"loss": 0.1908, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.8453525641025641, |
|
"grad_norm": 0.21720488369464874, |
|
"learning_rate": 6.47333301145718e-05, |
|
"loss": 0.2313, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.8493589743589743, |
|
"grad_norm": 0.22862492501735687, |
|
"learning_rate": 6.442301532812552e-05, |
|
"loss": 0.1867, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.8533653846153846, |
|
"grad_norm": 0.23019257187843323, |
|
"learning_rate": 6.411209337949214e-05, |
|
"loss": 0.1857, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.8573717948717948, |
|
"grad_norm": 0.21849659085273743, |
|
"learning_rate": 6.380057735747768e-05, |
|
"loss": 0.1795, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.8613782051282052, |
|
"grad_norm": 0.22794140875339508, |
|
"learning_rate": 6.348848037589683e-05, |
|
"loss": 0.1796, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.8653846153846154, |
|
"grad_norm": 0.22126458585262299, |
|
"learning_rate": 6.317581557302066e-05, |
|
"loss": 0.1854, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.8693910256410257, |
|
"grad_norm": 0.24870412051677704, |
|
"learning_rate": 6.286259611102377e-05, |
|
"loss": 0.1771, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.8733974358974359, |
|
"grad_norm": 0.226875439286232, |
|
"learning_rate": 6.254883517543006e-05, |
|
"loss": 0.1769, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.8774038461538461, |
|
"grad_norm": 0.2393677532672882, |
|
"learning_rate": 6.223454597455776e-05, |
|
"loss": 0.1878, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.8814102564102564, |
|
"grad_norm": 0.23450277745723724, |
|
"learning_rate": 6.191974173896329e-05, |
|
"loss": 0.1795, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.8854166666666666, |
|
"grad_norm": 0.23429277539253235, |
|
"learning_rate": 6.160443572088444e-05, |
|
"loss": 0.1809, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.8894230769230769, |
|
"grad_norm": 0.2641499638557434, |
|
"learning_rate": 6.128864119368234e-05, |
|
"loss": 0.1908, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.8934294871794872, |
|
"grad_norm": 0.25299501419067383, |
|
"learning_rate": 6.0972371451282817e-05, |
|
"loss": 0.1935, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.8974358974358975, |
|
"grad_norm": 0.21204617619514465, |
|
"learning_rate": 6.0655639807616705e-05, |
|
"loss": 0.1871, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.9014423076923077, |
|
"grad_norm": 0.20044928789138794, |
|
"learning_rate": 6.0338459596059395e-05, |
|
"loss": 0.185, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.905448717948718, |
|
"grad_norm": 0.22311033308506012, |
|
"learning_rate": 6.0020844168869496e-05, |
|
"loss": 0.1809, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.9094551282051282, |
|
"grad_norm": 0.2073046863079071, |
|
"learning_rate": 5.970280689662685e-05, |
|
"loss": 0.1822, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.9134615384615384, |
|
"grad_norm": 0.23054327070713043, |
|
"learning_rate": 5.938436116766954e-05, |
|
"loss": 0.1862, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.9174679487179487, |
|
"grad_norm": 0.2098427712917328, |
|
"learning_rate": 5.9065520387530394e-05, |
|
"loss": 0.1766, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.9214743589743589, |
|
"grad_norm": 0.2541731595993042, |
|
"learning_rate": 5.874629797837259e-05, |
|
"loss": 0.1779, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.9254807692307693, |
|
"grad_norm": 0.22261784970760345, |
|
"learning_rate": 5.8426707378424675e-05, |
|
"loss": 0.1885, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.9294871794871795, |
|
"grad_norm": 0.23702876269817352, |
|
"learning_rate": 5.8106762041414795e-05, |
|
"loss": 0.1808, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.9334935897435898, |
|
"grad_norm": 0.22513940930366516, |
|
"learning_rate": 5.778647543600443e-05, |
|
"loss": 0.1861, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.9375, |
|
"grad_norm": 0.2372877299785614, |
|
"learning_rate": 5.7465861045221295e-05, |
|
"loss": 0.1831, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.9415064102564102, |
|
"grad_norm": 0.2384011596441269, |
|
"learning_rate": 5.714493236589187e-05, |
|
"loss": 0.1835, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.9455128205128205, |
|
"grad_norm": 0.22022205591201782, |
|
"learning_rate": 5.6823702908073104e-05, |
|
"loss": 0.1836, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.9495192307692307, |
|
"grad_norm": 0.2236844301223755, |
|
"learning_rate": 5.6502186194483783e-05, |
|
"loss": 0.1797, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.9535256410256411, |
|
"grad_norm": 0.24858827888965607, |
|
"learning_rate": 5.6180395759935234e-05, |
|
"loss": 0.1947, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.9575320512820513, |
|
"grad_norm": 0.19967874884605408, |
|
"learning_rate": 5.5858345150761514e-05, |
|
"loss": 0.1831, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.9615384615384616, |
|
"grad_norm": 0.25792399048805237, |
|
"learning_rate": 5.553604792424922e-05, |
|
"loss": 0.1875, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.9655448717948718, |
|
"grad_norm": 0.22758032381534576, |
|
"learning_rate": 5.521351764806671e-05, |
|
"loss": 0.1851, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.969551282051282, |
|
"grad_norm": 0.20984916388988495, |
|
"learning_rate": 5.4890767899693015e-05, |
|
"loss": 0.1811, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.9735576923076923, |
|
"grad_norm": 0.2236003577709198, |
|
"learning_rate": 5.4567812265846206e-05, |
|
"loss": 0.1824, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.9775641025641025, |
|
"grad_norm": 0.2495725303888321, |
|
"learning_rate": 5.424466434191146e-05, |
|
"loss": 0.1838, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.9815705128205128, |
|
"grad_norm": 0.21274955570697784, |
|
"learning_rate": 5.392133773136877e-05, |
|
"loss": 0.1818, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.9855769230769231, |
|
"grad_norm": 0.24202273786067963, |
|
"learning_rate": 5.359784604522023e-05, |
|
"loss": 0.1821, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.9895833333333334, |
|
"grad_norm": 0.21269406378269196, |
|
"learning_rate": 5.3274202901417115e-05, |
|
"loss": 0.1853, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.9935897435897436, |
|
"grad_norm": 0.2370026856660843, |
|
"learning_rate": 5.2950421924286544e-05, |
|
"loss": 0.167, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.9975961538461539, |
|
"grad_norm": 0.1984049528837204, |
|
"learning_rate": 5.262651674395799e-05, |
|
"loss": 0.1715, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.18003696203231812, |
|
"eval_runtime": 312.955, |
|
"eval_samples_per_second": 26.863, |
|
"eval_steps_per_second": 3.358, |
|
"step": 2496 |
|
}, |
|
{ |
|
"epoch": 1.001602564102564, |
|
"grad_norm": 0.2148449867963791, |
|
"learning_rate": 5.230250099578945e-05, |
|
"loss": 0.1671, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 1.0056089743589745, |
|
"grad_norm": 0.23484507203102112, |
|
"learning_rate": 5.197838831979351e-05, |
|
"loss": 0.1478, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 1.0096153846153846, |
|
"grad_norm": 0.23629869520664215, |
|
"learning_rate": 5.165419236006308e-05, |
|
"loss": 0.1466, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 1.013621794871795, |
|
"grad_norm": 0.2193659245967865, |
|
"learning_rate": 5.132992676419703e-05, |
|
"loss": 0.1487, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 1.017628205128205, |
|
"grad_norm": 0.23937292397022247, |
|
"learning_rate": 5.100560518272573e-05, |
|
"loss": 0.1453, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 1.0216346153846154, |
|
"grad_norm": 0.22235561907291412, |
|
"learning_rate": 5.068124126853633e-05, |
|
"loss": 0.1497, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 1.0256410256410255, |
|
"grad_norm": 0.22830457985401154, |
|
"learning_rate": 5.035684867629803e-05, |
|
"loss": 0.1532, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 1.029647435897436, |
|
"grad_norm": 0.23195677995681763, |
|
"learning_rate": 5.003244106188735e-05, |
|
"loss": 0.1531, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 1.0336538461538463, |
|
"grad_norm": 0.24194592237472534, |
|
"learning_rate": 4.9708032081813144e-05, |
|
"loss": 0.1543, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 1.0376602564102564, |
|
"grad_norm": 0.24159836769104004, |
|
"learning_rate": 4.938363539264175e-05, |
|
"loss": 0.1569, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 1.0416666666666667, |
|
"grad_norm": 0.24700869619846344, |
|
"learning_rate": 4.90592646504221e-05, |
|
"loss": 0.1502, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 1.0456730769230769, |
|
"grad_norm": 0.24695070087909698, |
|
"learning_rate": 4.8734933510110905e-05, |
|
"loss": 0.1555, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 1.0496794871794872, |
|
"grad_norm": 0.21207590401172638, |
|
"learning_rate": 4.841065562499769e-05, |
|
"loss": 0.1495, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 1.0536858974358974, |
|
"grad_norm": 0.22604689002037048, |
|
"learning_rate": 4.8086444646130144e-05, |
|
"loss": 0.1505, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 1.0576923076923077, |
|
"grad_norm": 0.23026780784130096, |
|
"learning_rate": 4.7762314221739416e-05, |
|
"loss": 0.1414, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 1.061698717948718, |
|
"grad_norm": 0.2474188357591629, |
|
"learning_rate": 4.7438277996665546e-05, |
|
"loss": 0.1563, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 1.0657051282051282, |
|
"grad_norm": 0.2669748067855835, |
|
"learning_rate": 4.7114349611783124e-05, |
|
"loss": 0.1446, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 1.0697115384615385, |
|
"grad_norm": 0.2164994478225708, |
|
"learning_rate": 4.679054270342703e-05, |
|
"loss": 0.1557, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 1.0737179487179487, |
|
"grad_norm": 0.2127254158258438, |
|
"learning_rate": 4.646687090281832e-05, |
|
"loss": 0.1489, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 1.077724358974359, |
|
"grad_norm": 0.25909334421157837, |
|
"learning_rate": 4.614334783549049e-05, |
|
"loss": 0.1517, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 1.0817307692307692, |
|
"grad_norm": 0.20796196162700653, |
|
"learning_rate": 4.581998712071583e-05, |
|
"loss": 0.1459, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 1.0857371794871795, |
|
"grad_norm": 0.21044594049453735, |
|
"learning_rate": 4.549680237093215e-05, |
|
"loss": 0.1508, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 1.0897435897435896, |
|
"grad_norm": 0.22035610675811768, |
|
"learning_rate": 4.517380719116966e-05, |
|
"loss": 0.1518, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 1.09375, |
|
"grad_norm": 0.2175433337688446, |
|
"learning_rate": 4.485101517847831e-05, |
|
"loss": 0.1532, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 1.0977564102564104, |
|
"grad_norm": 0.23213978111743927, |
|
"learning_rate": 4.452843992135536e-05, |
|
"loss": 0.1503, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 1.1017628205128205, |
|
"grad_norm": 0.22997038066387177, |
|
"learning_rate": 4.420609499917337e-05, |
|
"loss": 0.1532, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 1.1057692307692308, |
|
"grad_norm": 0.24814608693122864, |
|
"learning_rate": 4.3883993981608576e-05, |
|
"loss": 0.1541, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 1.109775641025641, |
|
"grad_norm": 0.24005189538002014, |
|
"learning_rate": 4.3562150428069565e-05, |
|
"loss": 0.1493, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 1.1137820512820513, |
|
"grad_norm": 0.24364899098873138, |
|
"learning_rate": 4.3240577887126565e-05, |
|
"loss": 0.1507, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 1.1177884615384615, |
|
"grad_norm": 0.25002944469451904, |
|
"learning_rate": 4.291928989594101e-05, |
|
"loss": 0.1493, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 1.1217948717948718, |
|
"grad_norm": 0.24269698560237885, |
|
"learning_rate": 4.2598299979695795e-05, |
|
"loss": 0.146, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 1.125801282051282, |
|
"grad_norm": 0.2566970884799957, |
|
"learning_rate": 4.2277621651025736e-05, |
|
"loss": 0.1498, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 1.1298076923076923, |
|
"grad_norm": 0.24423649907112122, |
|
"learning_rate": 4.195726840944886e-05, |
|
"loss": 0.1562, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 1.1338141025641026, |
|
"grad_norm": 0.2457042932510376, |
|
"learning_rate": 4.1637253740798075e-05, |
|
"loss": 0.1532, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 1.1378205128205128, |
|
"grad_norm": 0.2585982382297516, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 0.1487, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 1.1418269230769231, |
|
"grad_norm": 0.2370419055223465, |
|
"learning_rate": 4.0998293993775237e-05, |
|
"loss": 0.1442, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 1.1458333333333333, |
|
"grad_norm": 0.25113531947135925, |
|
"learning_rate": 4.067937581353708e-05, |
|
"loss": 0.1459, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 1.1498397435897436, |
|
"grad_norm": 0.22391347587108612, |
|
"learning_rate": 4.036085000136046e-05, |
|
"loss": 0.149, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 1.1538461538461537, |
|
"grad_norm": 0.2450188249349594, |
|
"learning_rate": 4.004272996614948e-05, |
|
"loss": 0.1402, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 1.157852564102564, |
|
"grad_norm": 0.2271914780139923, |
|
"learning_rate": 3.972502909972629e-05, |
|
"loss": 0.1488, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 1.1618589743589745, |
|
"grad_norm": 0.2330724447965622, |
|
"learning_rate": 3.940776077626739e-05, |
|
"loss": 0.1517, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 1.1658653846153846, |
|
"grad_norm": 0.23422060906887054, |
|
"learning_rate": 3.909093835174066e-05, |
|
"loss": 0.1475, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 1.169871794871795, |
|
"grad_norm": 0.2604200541973114, |
|
"learning_rate": 3.8774575163343065e-05, |
|
"loss": 0.1531, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 1.173878205128205, |
|
"grad_norm": 0.23886078596115112, |
|
"learning_rate": 3.8458684528939185e-05, |
|
"loss": 0.142, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 1.1778846153846154, |
|
"grad_norm": 0.24594782292842865, |
|
"learning_rate": 3.814327974650067e-05, |
|
"loss": 0.1477, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 1.1818910256410255, |
|
"grad_norm": 0.247484028339386, |
|
"learning_rate": 3.7828374093546306e-05, |
|
"loss": 0.1516, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 1.185897435897436, |
|
"grad_norm": 0.22961559891700745, |
|
"learning_rate": 3.751398082658322e-05, |
|
"loss": 0.1508, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 1.1899038461538463, |
|
"grad_norm": 0.2492343634366989, |
|
"learning_rate": 3.720011318054871e-05, |
|
"loss": 0.1412, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 1.1939102564102564, |
|
"grad_norm": 0.24057136476039886, |
|
"learning_rate": 3.688678436825313e-05, |
|
"loss": 0.1476, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 1.1979166666666667, |
|
"grad_norm": 0.2430902123451233, |
|
"learning_rate": 3.657400757982366e-05, |
|
"loss": 0.1506, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 1.2019230769230769, |
|
"grad_norm": 0.23617245256900787, |
|
"learning_rate": 3.626179598214917e-05, |
|
"loss": 0.1454, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 1.2059294871794872, |
|
"grad_norm": 0.26724281907081604, |
|
"learning_rate": 3.595016271832572e-05, |
|
"loss": 0.1435, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 1.2099358974358974, |
|
"grad_norm": 0.24820230901241302, |
|
"learning_rate": 3.5639120907103416e-05, |
|
"loss": 0.1435, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 1.2139423076923077, |
|
"grad_norm": 0.2463800460100174, |
|
"learning_rate": 3.532868364233416e-05, |
|
"loss": 0.1529, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 1.217948717948718, |
|
"grad_norm": 0.25976675748825073, |
|
"learning_rate": 3.501886399242038e-05, |
|
"loss": 0.15, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 1.2219551282051282, |
|
"grad_norm": 0.22920717298984528, |
|
"learning_rate": 3.470967499976492e-05, |
|
"loss": 0.1449, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 1.2259615384615385, |
|
"grad_norm": 0.2552580237388611, |
|
"learning_rate": 3.440112968022201e-05, |
|
"loss": 0.1453, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 1.2299679487179487, |
|
"grad_norm": 0.22986732423305511, |
|
"learning_rate": 3.409324102254932e-05, |
|
"loss": 0.1434, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 1.233974358974359, |
|
"grad_norm": 0.2583804130554199, |
|
"learning_rate": 3.37860219878612e-05, |
|
"loss": 0.1532, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 1.2379807692307692, |
|
"grad_norm": 0.24155107140541077, |
|
"learning_rate": 3.347948550908303e-05, |
|
"loss": 0.1482, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 1.2419871794871795, |
|
"grad_norm": 0.25099289417266846, |
|
"learning_rate": 3.31736444904068e-05, |
|
"loss": 0.1494, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 1.2459935897435896, |
|
"grad_norm": 0.26350319385528564, |
|
"learning_rate": 3.286851180674788e-05, |
|
"loss": 0.1441, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"grad_norm": 0.23903068900108337, |
|
"learning_rate": 3.2564100303203035e-05, |
|
"loss": 0.1455, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 1.2540064102564101, |
|
"grad_norm": 0.25470852851867676, |
|
"learning_rate": 3.22604227945097e-05, |
|
"loss": 0.149, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 1.2580128205128205, |
|
"grad_norm": 0.25256025791168213, |
|
"learning_rate": 3.195749206450649e-05, |
|
"loss": 0.1484, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 1.2620192307692308, |
|
"grad_norm": 0.2604450583457947, |
|
"learning_rate": 3.165532086559504e-05, |
|
"loss": 0.1438, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 1.266025641025641, |
|
"grad_norm": 0.2456076741218567, |
|
"learning_rate": 3.135392191820324e-05, |
|
"loss": 0.1475, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 1.2700320512820513, |
|
"grad_norm": 0.23290812969207764, |
|
"learning_rate": 3.1053307910249654e-05, |
|
"loss": 0.1473, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 1.2740384615384617, |
|
"grad_norm": 0.2368566393852234, |
|
"learning_rate": 3.0753491496609445e-05, |
|
"loss": 0.1468, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 1.2780448717948718, |
|
"grad_norm": 0.242570161819458, |
|
"learning_rate": 3.045448529858165e-05, |
|
"loss": 0.1489, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 1.282051282051282, |
|
"grad_norm": 0.23196978867053986, |
|
"learning_rate": 3.0156301903357864e-05, |
|
"loss": 0.1479, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 1.2860576923076923, |
|
"grad_norm": 0.2525078356266022, |
|
"learning_rate": 2.9858953863492334e-05, |
|
"loss": 0.1508, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 1.2900641025641026, |
|
"grad_norm": 0.23984560370445251, |
|
"learning_rate": 2.956245369637356e-05, |
|
"loss": 0.1428, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 1.2940705128205128, |
|
"grad_norm": 0.2553931176662445, |
|
"learning_rate": 2.9266813883697342e-05, |
|
"loss": 0.1542, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 1.2980769230769231, |
|
"grad_norm": 0.2719983458518982, |
|
"learning_rate": 2.897204687094136e-05, |
|
"loss": 0.1533, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 1.3020833333333333, |
|
"grad_norm": 0.26820674538612366, |
|
"learning_rate": 2.8678165066841257e-05, |
|
"loss": 0.144, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 1.3060897435897436, |
|
"grad_norm": 0.22624927759170532, |
|
"learning_rate": 2.8385180842868207e-05, |
|
"loss": 0.1453, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 1.3100961538461537, |
|
"grad_norm": 0.2596217095851898, |
|
"learning_rate": 2.8093106532708253e-05, |
|
"loss": 0.1365, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 1.314102564102564, |
|
"grad_norm": 0.24350501596927643, |
|
"learning_rate": 2.780195443174293e-05, |
|
"loss": 0.1458, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 1.3181089743589745, |
|
"grad_norm": 0.2782920300960541, |
|
"learning_rate": 2.7511736796531838e-05, |
|
"loss": 0.1437, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 1.3221153846153846, |
|
"grad_norm": 0.2512044310569763, |
|
"learning_rate": 2.722246584429652e-05, |
|
"loss": 0.1473, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 1.326121794871795, |
|
"grad_norm": 0.24395060539245605, |
|
"learning_rate": 2.6934153752406287e-05, |
|
"loss": 0.1437, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 1.330128205128205, |
|
"grad_norm": 0.24087293446063995, |
|
"learning_rate": 2.6646812657865557e-05, |
|
"loss": 0.1465, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 1.3341346153846154, |
|
"grad_norm": 0.23852036893367767, |
|
"learning_rate": 2.636045465680282e-05, |
|
"loss": 0.1386, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 1.3381410256410255, |
|
"grad_norm": 0.2611803114414215, |
|
"learning_rate": 2.6075091803961616e-05, |
|
"loss": 0.1436, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 1.342147435897436, |
|
"grad_norm": 0.2321190983057022, |
|
"learning_rate": 2.5790736112192894e-05, |
|
"loss": 0.1507, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 1.3461538461538463, |
|
"grad_norm": 0.2473578155040741, |
|
"learning_rate": 2.550739955194944e-05, |
|
"loss": 0.1422, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 1.3501602564102564, |
|
"grad_norm": 0.2508649528026581, |
|
"learning_rate": 2.5225094050781872e-05, |
|
"loss": 0.1432, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 1.3541666666666667, |
|
"grad_norm": 0.2570420205593109, |
|
"learning_rate": 2.4943831492836588e-05, |
|
"loss": 0.1531, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 1.3581730769230769, |
|
"grad_norm": 0.24803844094276428, |
|
"learning_rate": 2.4663623718355444e-05, |
|
"loss": 0.2464, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 1.3621794871794872, |
|
"grad_norm": 0.23173387348651886, |
|
"learning_rate": 2.438448252317736e-05, |
|
"loss": 0.1485, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 1.3661858974358974, |
|
"grad_norm": 0.23720696568489075, |
|
"learning_rate": 2.410641965824166e-05, |
|
"loss": 0.1428, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 1.3701923076923077, |
|
"grad_norm": 0.2676253318786621, |
|
"learning_rate": 2.3829446829093537e-05, |
|
"loss": 0.1471, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 1.374198717948718, |
|
"grad_norm": 0.2445019632577896, |
|
"learning_rate": 2.355357569539114e-05, |
|
"loss": 0.1392, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 1.3782051282051282, |
|
"grad_norm": 0.22741925716400146, |
|
"learning_rate": 2.3278817870414866e-05, |
|
"loss": 0.15, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 1.3822115384615383, |
|
"grad_norm": 0.24508029222488403, |
|
"learning_rate": 2.300518492057842e-05, |
|
"loss": 0.1492, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 1.3862179487179487, |
|
"grad_norm": 0.25865378975868225, |
|
"learning_rate": 2.2732688364941862e-05, |
|
"loss": 0.1494, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 1.390224358974359, |
|
"grad_norm": 0.2557166814804077, |
|
"learning_rate": 2.2461339674726806e-05, |
|
"loss": 0.1486, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 1.3942307692307692, |
|
"grad_norm": 0.25431498885154724, |
|
"learning_rate": 2.219115027283339e-05, |
|
"loss": 0.1443, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 1.3982371794871795, |
|
"grad_norm": 0.2517376244068146, |
|
"learning_rate": 2.192213153335953e-05, |
|
"loss": 0.1434, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 1.4022435897435899, |
|
"grad_norm": 0.24723024666309357, |
|
"learning_rate": 2.165429478112199e-05, |
|
"loss": 0.1405, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 1.40625, |
|
"grad_norm": 0.24276477098464966, |
|
"learning_rate": 2.138765129117977e-05, |
|
"loss": 0.1483, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 1.4102564102564101, |
|
"grad_norm": 0.23349566757678986, |
|
"learning_rate": 2.1122212288359335e-05, |
|
"loss": 0.1508, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 1.4142628205128205, |
|
"grad_norm": 0.2291381061077118, |
|
"learning_rate": 2.085798894678217e-05, |
|
"loss": 0.1502, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 1.4182692307692308, |
|
"grad_norm": 0.2317928522825241, |
|
"learning_rate": 2.0594992389394395e-05, |
|
"loss": 0.1408, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 1.422275641025641, |
|
"grad_norm": 0.2636735141277313, |
|
"learning_rate": 2.0333233687498433e-05, |
|
"loss": 0.1418, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 1.4262820512820513, |
|
"grad_norm": 0.2535155415534973, |
|
"learning_rate": 2.0072723860287056e-05, |
|
"loss": 0.1484, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 1.4302884615384617, |
|
"grad_norm": 0.2632095217704773, |
|
"learning_rate": 1.9813473874379395e-05, |
|
"loss": 0.147, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 1.4342948717948718, |
|
"grad_norm": 0.25649628043174744, |
|
"learning_rate": 1.9555494643359457e-05, |
|
"loss": 0.1464, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 1.438301282051282, |
|
"grad_norm": 0.2731545865535736, |
|
"learning_rate": 1.9298797027316474e-05, |
|
"loss": 0.1516, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 1.4423076923076923, |
|
"grad_norm": 0.22635942697525024, |
|
"learning_rate": 1.9043391832387914e-05, |
|
"loss": 0.1405, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 1.4463141025641026, |
|
"grad_norm": 0.24070985615253448, |
|
"learning_rate": 1.878928981030445e-05, |
|
"loss": 0.1427, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 1.4503205128205128, |
|
"grad_norm": 0.29021933674812317, |
|
"learning_rate": 1.853650165793747e-05, |
|
"loss": 0.1435, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 1.4543269230769231, |
|
"grad_norm": 0.2542175352573395, |
|
"learning_rate": 1.8285038016848636e-05, |
|
"loss": 0.1531, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 1.4583333333333333, |
|
"grad_norm": 0.24497313797473907, |
|
"learning_rate": 1.803490947284204e-05, |
|
"loss": 0.1451, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 1.4623397435897436, |
|
"grad_norm": 0.24667443335056305, |
|
"learning_rate": 1.7786126555518485e-05, |
|
"loss": 0.1417, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 1.4663461538461537, |
|
"grad_norm": 0.2563740015029907, |
|
"learning_rate": 1.7538699737832236e-05, |
|
"loss": 0.1524, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 1.470352564102564, |
|
"grad_norm": 0.2556226849555969, |
|
"learning_rate": 1.7292639435650222e-05, |
|
"loss": 0.141, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 1.4743589743589745, |
|
"grad_norm": 0.23436102271080017, |
|
"learning_rate": 1.704795600731341e-05, |
|
"loss": 0.1477, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 1.4783653846153846, |
|
"grad_norm": 0.2800445556640625, |
|
"learning_rate": 1.6804659753200924e-05, |
|
"loss": 0.1429, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 1.482371794871795, |
|
"grad_norm": 0.28161168098449707, |
|
"learning_rate": 1.6562760915296295e-05, |
|
"loss": 0.1423, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 1.486378205128205, |
|
"grad_norm": 0.2788082957267761, |
|
"learning_rate": 1.63222696767564e-05, |
|
"loss": 0.1465, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 1.4903846153846154, |
|
"grad_norm": 0.2512800097465515, |
|
"learning_rate": 1.608319616148271e-05, |
|
"loss": 0.1354, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 1.4943910256410255, |
|
"grad_norm": 0.24340157210826874, |
|
"learning_rate": 1.5845550433695173e-05, |
|
"loss": 0.1461, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 1.498397435897436, |
|
"grad_norm": 0.25012919306755066, |
|
"learning_rate": 1.560934249750851e-05, |
|
"loss": 0.1493, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 1.5024038461538463, |
|
"grad_norm": 0.242750346660614, |
|
"learning_rate": 1.5374582296511053e-05, |
|
"loss": 0.1411, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 1.5064102564102564, |
|
"grad_norm": 0.23980247974395752, |
|
"learning_rate": 1.5141279713346208e-05, |
|
"loss": 0.1441, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 1.5104166666666665, |
|
"grad_norm": 0.2511241137981415, |
|
"learning_rate": 1.4909444569296333e-05, |
|
"loss": 0.1447, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 1.5144230769230769, |
|
"grad_norm": 0.24846923351287842, |
|
"learning_rate": 1.4679086623869464e-05, |
|
"loss": 0.1441, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 1.5184294871794872, |
|
"grad_norm": 0.24027830362319946, |
|
"learning_rate": 1.4450215574388265e-05, |
|
"loss": 0.1402, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 1.5224358974358974, |
|
"grad_norm": 0.2580472528934479, |
|
"learning_rate": 1.4222841055581964e-05, |
|
"loss": 0.1377, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 1.5264423076923077, |
|
"grad_norm": 0.23837298154830933, |
|
"learning_rate": 1.3996972639180644e-05, |
|
"loss": 0.142, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 1.530448717948718, |
|
"grad_norm": 0.22810089588165283, |
|
"learning_rate": 1.3772619833512412e-05, |
|
"loss": 0.1456, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 1.5344551282051282, |
|
"grad_norm": 0.22962678968906403, |
|
"learning_rate": 1.3549792083103036e-05, |
|
"loss": 0.1374, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 1.5384615384615383, |
|
"grad_norm": 0.2668929398059845, |
|
"learning_rate": 1.332849876827842e-05, |
|
"loss": 0.1499, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 1.5424679487179487, |
|
"grad_norm": 0.24207815527915955, |
|
"learning_rate": 1.3108749204769733e-05, |
|
"loss": 0.1369, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 1.546474358974359, |
|
"grad_norm": 0.2575761377811432, |
|
"learning_rate": 1.2890552643321146e-05, |
|
"loss": 0.1504, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 1.5504807692307692, |
|
"grad_norm": 0.23695258796215057, |
|
"learning_rate": 1.2673918269300556e-05, |
|
"loss": 0.1422, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 1.5544871794871795, |
|
"grad_norm": 0.2854330837726593, |
|
"learning_rate": 1.2458855202312775e-05, |
|
"loss": 0.1506, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 1.5584935897435899, |
|
"grad_norm": 0.2639913260936737, |
|
"learning_rate": 1.2245372495815726e-05, |
|
"loss": 0.1386, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 1.5625, |
|
"grad_norm": 0.26962000131607056, |
|
"learning_rate": 1.203347913673924e-05, |
|
"loss": 0.1469, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 1.5665064102564101, |
|
"grad_norm": 0.2668505907058716, |
|
"learning_rate": 1.1823184045106817e-05, |
|
"loss": 0.1381, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 1.5705128205128205, |
|
"grad_norm": 0.24999478459358215, |
|
"learning_rate": 1.1614496073660026e-05, |
|
"loss": 0.1488, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 1.5745192307692308, |
|
"grad_norm": 0.2618054151535034, |
|
"learning_rate": 1.1407424007485929e-05, |
|
"loss": 0.1365, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 1.578525641025641, |
|
"grad_norm": 0.25881797075271606, |
|
"learning_rate": 1.120197656364722e-05, |
|
"loss": 0.1424, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 1.5825320512820513, |
|
"grad_norm": 0.23021487891674042, |
|
"learning_rate": 1.0998162390815208e-05, |
|
"loss": 0.1344, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 1.5865384615384617, |
|
"grad_norm": 0.24012772738933563, |
|
"learning_rate": 1.0795990068905843e-05, |
|
"loss": 0.1511, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 1.5905448717948718, |
|
"grad_norm": 0.251249760389328, |
|
"learning_rate": 1.0595468108718449e-05, |
|
"loss": 0.1433, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 1.594551282051282, |
|
"grad_norm": 0.2358747273683548, |
|
"learning_rate": 1.0396604951577483e-05, |
|
"loss": 0.147, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 1.5985576923076923, |
|
"grad_norm": 0.25139543414115906, |
|
"learning_rate": 1.0199408968977136e-05, |
|
"loss": 0.1478, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 1.6025641025641026, |
|
"grad_norm": 0.25917524099349976, |
|
"learning_rate": 1.0003888462229011e-05, |
|
"loss": 0.1467, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 1.6065705128205128, |
|
"grad_norm": 0.2669861316680908, |
|
"learning_rate": 9.810051662112557e-06, |
|
"loss": 0.1348, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 1.6105769230769231, |
|
"grad_norm": 0.27329695224761963, |
|
"learning_rate": 9.61790672852868e-06, |
|
"loss": 0.1384, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 1.6145833333333335, |
|
"grad_norm": 0.2671654522418976, |
|
"learning_rate": 9.427461750156142e-06, |
|
"loss": 0.1431, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 1.6185897435897436, |
|
"grad_norm": 0.25406405329704285, |
|
"learning_rate": 9.238724744111138e-06, |
|
"loss": 0.1444, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 1.6225961538461537, |
|
"grad_norm": 0.25970324873924255, |
|
"learning_rate": 9.05170365560976e-06, |
|
"loss": 0.1396, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 1.626602564102564, |
|
"grad_norm": 0.2700863480567932, |
|
"learning_rate": 8.86640635763351e-06, |
|
"loss": 0.1484, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 1.6306089743589745, |
|
"grad_norm": 0.24397385120391846, |
|
"learning_rate": 8.68284065059794e-06, |
|
"loss": 0.1407, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 1.6346153846153846, |
|
"grad_norm": 0.2537296414375305, |
|
"learning_rate": 8.501014262024176e-06, |
|
"loss": 0.1468, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 1.6386217948717947, |
|
"grad_norm": 0.2614051103591919, |
|
"learning_rate": 8.320934846213746e-06, |
|
"loss": 0.1368, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 1.6426282051282053, |
|
"grad_norm": 0.2941496670246124, |
|
"learning_rate": 8.142609983926225e-06, |
|
"loss": 0.1434, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 1.6466346153846154, |
|
"grad_norm": 0.2491438090801239, |
|
"learning_rate": 7.966047182060226e-06, |
|
"loss": 0.1433, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 1.6506410256410255, |
|
"grad_norm": 0.267880916595459, |
|
"learning_rate": 7.791253873337278e-06, |
|
"loss": 0.1437, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 1.654647435897436, |
|
"grad_norm": 0.25333133339881897, |
|
"learning_rate": 7.618237415989032e-06, |
|
"loss": 0.1433, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 1.6586538461538463, |
|
"grad_norm": 0.25538668036460876, |
|
"learning_rate": 7.44700509344744e-06, |
|
"loss": 0.139, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 1.6626602564102564, |
|
"grad_norm": 0.25621935725212097, |
|
"learning_rate": 7.277564114038149e-06, |
|
"loss": 0.1347, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 0.2569233775138855, |
|
"learning_rate": 7.109921610677078e-06, |
|
"loss": 0.1408, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 1.6706730769230769, |
|
"grad_norm": 0.23728112876415253, |
|
"learning_rate": 6.9440846405701424e-06, |
|
"loss": 0.1352, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 1.6746794871794872, |
|
"grad_norm": 0.23264577984809875, |
|
"learning_rate": 6.780060184916159e-06, |
|
"loss": 0.1463, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 1.6786858974358974, |
|
"grad_norm": 0.24841400980949402, |
|
"learning_rate": 6.6178551486129445e-06, |
|
"loss": 0.1438, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 1.6826923076923077, |
|
"grad_norm": 0.2776187062263489, |
|
"learning_rate": 6.4574763599666856e-06, |
|
"loss": 0.1438, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 1.686698717948718, |
|
"grad_norm": 0.2587837874889374, |
|
"learning_rate": 6.2989305704044325e-06, |
|
"loss": 0.1367, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 1.6907051282051282, |
|
"grad_norm": 0.2615884244441986, |
|
"learning_rate": 6.142224454189954e-06, |
|
"loss": 0.1726, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 1.6947115384615383, |
|
"grad_norm": 0.25619083642959595, |
|
"learning_rate": 5.987364608142693e-06, |
|
"loss": 0.151, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 1.6987179487179487, |
|
"grad_norm": 0.28548672795295715, |
|
"learning_rate": 5.834357551360132e-06, |
|
"loss": 0.1421, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 1.702724358974359, |
|
"grad_norm": 0.26761943101882935, |
|
"learning_rate": 5.683209724943345e-06, |
|
"loss": 0.1383, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 1.7067307692307692, |
|
"grad_norm": 0.2561447322368622, |
|
"learning_rate": 5.533927491725787e-06, |
|
"loss": 0.1421, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 1.7107371794871795, |
|
"grad_norm": 0.2707176208496094, |
|
"learning_rate": 5.3865171360055425e-06, |
|
"loss": 0.1519, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 1.7147435897435899, |
|
"grad_norm": 0.264593243598938, |
|
"learning_rate": 5.240984863280668e-06, |
|
"loss": 0.1464, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 1.71875, |
|
"grad_norm": 0.2739621102809906, |
|
"learning_rate": 5.097336799988067e-06, |
|
"loss": 0.1392, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 1.7227564102564101, |
|
"grad_norm": 0.23156005144119263, |
|
"learning_rate": 4.955578993245485e-06, |
|
"loss": 0.1377, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 1.7267628205128205, |
|
"grad_norm": 0.24819669127464294, |
|
"learning_rate": 4.815717410597043e-06, |
|
"loss": 0.138, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 1.7307692307692308, |
|
"grad_norm": 0.25722768902778625, |
|
"learning_rate": 4.6777579397619285e-06, |
|
"loss": 0.1434, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 1.734775641025641, |
|
"grad_norm": 0.22565537691116333, |
|
"learning_rate": 4.541706388386624e-06, |
|
"loss": 0.1409, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 1.7387820512820513, |
|
"grad_norm": 0.2539729177951813, |
|
"learning_rate": 4.4075684838003784e-06, |
|
"loss": 0.1447, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 1.7427884615384617, |
|
"grad_norm": 0.2426980435848236, |
|
"learning_rate": 4.275349872774098e-06, |
|
"loss": 0.1422, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 1.7467948717948718, |
|
"grad_norm": 0.26267197728157043, |
|
"learning_rate": 4.14505612128267e-06, |
|
"loss": 0.1381, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 1.750801282051282, |
|
"grad_norm": 0.23471184074878693, |
|
"learning_rate": 4.016692714270631e-06, |
|
"loss": 0.1367, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 1.7548076923076923, |
|
"grad_norm": 0.2703573703765869, |
|
"learning_rate": 3.890265055421283e-06, |
|
"loss": 0.144, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 1.7588141025641026, |
|
"grad_norm": 0.23946279287338257, |
|
"learning_rate": 3.7657784669291816e-06, |
|
"loss": 0.1397, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 1.7628205128205128, |
|
"grad_norm": 0.23393961787223816, |
|
"learning_rate": 3.643238189276138e-06, |
|
"loss": 0.1329, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 1.7668269230769231, |
|
"grad_norm": 0.24087683856487274, |
|
"learning_rate": 3.522649381010562e-06, |
|
"loss": 0.1367, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 1.7708333333333335, |
|
"grad_norm": 0.2636178135871887, |
|
"learning_rate": 3.4040171185303616e-06, |
|
"loss": 0.1415, |
|
"step": 4420 |
|
}, |
|
{ |
|
"epoch": 1.7748397435897436, |
|
"grad_norm": 0.2364189773797989, |
|
"learning_rate": 3.2873463958691673e-06, |
|
"loss": 0.1445, |
|
"step": 4430 |
|
}, |
|
{ |
|
"epoch": 1.7788461538461537, |
|
"grad_norm": 0.26020485162734985, |
|
"learning_rate": 3.1726421244861917e-06, |
|
"loss": 0.1388, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 1.782852564102564, |
|
"grad_norm": 0.25162965059280396, |
|
"learning_rate": 3.0599091330593797e-06, |
|
"loss": 0.1427, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 1.7868589743589745, |
|
"grad_norm": 0.24311134219169617, |
|
"learning_rate": 2.949152167282204e-06, |
|
"loss": 0.1313, |
|
"step": 4460 |
|
}, |
|
{ |
|
"epoch": 1.7908653846153846, |
|
"grad_norm": 0.249532088637352, |
|
"learning_rate": 2.840375889663871e-06, |
|
"loss": 0.1486, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 1.7948717948717947, |
|
"grad_norm": 0.2393971085548401, |
|
"learning_rate": 2.733584879333001e-06, |
|
"loss": 0.1467, |
|
"step": 4480 |
|
}, |
|
{ |
|
"epoch": 1.7988782051282053, |
|
"grad_norm": 0.25224366784095764, |
|
"learning_rate": 2.62878363184495e-06, |
|
"loss": 0.1446, |
|
"step": 4490 |
|
}, |
|
{ |
|
"epoch": 1.8028846153846154, |
|
"grad_norm": 0.2636267840862274, |
|
"learning_rate": 2.5259765589924544e-06, |
|
"loss": 0.1487, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 1.8068910256410255, |
|
"grad_norm": 0.25640687346458435, |
|
"learning_rate": 2.425167988620014e-06, |
|
"loss": 0.1358, |
|
"step": 4510 |
|
}, |
|
{ |
|
"epoch": 1.810897435897436, |
|
"grad_norm": 0.24088767170906067, |
|
"learning_rate": 2.3263621644416034e-06, |
|
"loss": 0.1404, |
|
"step": 4520 |
|
}, |
|
{ |
|
"epoch": 1.8149038461538463, |
|
"grad_norm": 0.2403586059808731, |
|
"learning_rate": 2.2295632458621095e-06, |
|
"loss": 0.1412, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 1.8189102564102564, |
|
"grad_norm": 0.23450630903244019, |
|
"learning_rate": 2.1347753078022005e-06, |
|
"loss": 0.1447, |
|
"step": 4540 |
|
}, |
|
{ |
|
"epoch": 1.8229166666666665, |
|
"grad_norm": 0.2330087125301361, |
|
"learning_rate": 2.0420023405267665e-06, |
|
"loss": 0.1438, |
|
"step": 4550 |
|
}, |
|
{ |
|
"epoch": 1.8269230769230769, |
|
"grad_norm": 0.208469420671463, |
|
"learning_rate": 1.9512482494769613e-06, |
|
"loss": 0.1363, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 1.8309294871794872, |
|
"grad_norm": 0.24098703265190125, |
|
"learning_rate": 1.8625168551058114e-06, |
|
"loss": 0.145, |
|
"step": 4570 |
|
}, |
|
{ |
|
"epoch": 1.8349358974358974, |
|
"grad_norm": 0.24833528697490692, |
|
"learning_rate": 1.7758118927173606e-06, |
|
"loss": 0.1417, |
|
"step": 4580 |
|
}, |
|
{ |
|
"epoch": 1.8389423076923077, |
|
"grad_norm": 0.2671266496181488, |
|
"learning_rate": 1.6911370123094239e-06, |
|
"loss": 0.141, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 1.842948717948718, |
|
"grad_norm": 0.25512388348579407, |
|
"learning_rate": 1.6084957784199662e-06, |
|
"loss": 0.1357, |
|
"step": 4600 |
|
}, |
|
{ |
|
"epoch": 1.8469551282051282, |
|
"grad_norm": 0.2621639668941498, |
|
"learning_rate": 1.5278916699770163e-06, |
|
"loss": 0.1446, |
|
"step": 4610 |
|
}, |
|
{ |
|
"epoch": 1.8509615384615383, |
|
"grad_norm": 0.2648192048072815, |
|
"learning_rate": 1.4493280801522347e-06, |
|
"loss": 0.1391, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 1.8549679487179487, |
|
"grad_norm": 0.26396840810775757, |
|
"learning_rate": 1.3728083162180382e-06, |
|
"loss": 0.1409, |
|
"step": 4630 |
|
}, |
|
{ |
|
"epoch": 1.858974358974359, |
|
"grad_norm": 0.24495120346546173, |
|
"learning_rate": 1.2983355994084401e-06, |
|
"loss": 0.1388, |
|
"step": 4640 |
|
}, |
|
{ |
|
"epoch": 1.8629807692307692, |
|
"grad_norm": 0.26375770568847656, |
|
"learning_rate": 1.2259130647833627e-06, |
|
"loss": 0.1376, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 1.8669871794871795, |
|
"grad_norm": 0.25506845116615295, |
|
"learning_rate": 1.155543761096739e-06, |
|
"loss": 0.1432, |
|
"step": 4660 |
|
}, |
|
{ |
|
"epoch": 1.8709935897435899, |
|
"grad_norm": 0.2563297152519226, |
|
"learning_rate": 1.0872306506681251e-06, |
|
"loss": 0.143, |
|
"step": 4670 |
|
}, |
|
{ |
|
"epoch": 1.875, |
|
"grad_norm": 0.23788757622241974, |
|
"learning_rate": 1.0209766092579897e-06, |
|
"loss": 0.1387, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 1.8790064102564101, |
|
"grad_norm": 0.2355082929134369, |
|
"learning_rate": 9.56784425946705e-07, |
|
"loss": 0.1403, |
|
"step": 4690 |
|
}, |
|
{ |
|
"epoch": 1.8830128205128205, |
|
"grad_norm": 0.26146796345710754, |
|
"learning_rate": 8.946568030170688e-07, |
|
"loss": 0.1413, |
|
"step": 4700 |
|
}, |
|
{ |
|
"epoch": 1.8870192307692308, |
|
"grad_norm": 0.2697630822658539, |
|
"learning_rate": 8.345963558406e-07, |
|
"loss": 0.147, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 1.891025641025641, |
|
"grad_norm": 0.26238909363746643, |
|
"learning_rate": 7.766056127674115e-07, |
|
"loss": 0.1458, |
|
"step": 4720 |
|
}, |
|
{ |
|
"epoch": 1.8950320512820513, |
|
"grad_norm": 0.2590833902359009, |
|
"learning_rate": 7.20687015019783e-07, |
|
"loss": 0.1396, |
|
"step": 4730 |
|
}, |
|
{ |
|
"epoch": 1.8990384615384617, |
|
"grad_norm": 0.2681121230125427, |
|
"learning_rate": 6.668429165893997e-07, |
|
"loss": 0.1356, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 1.9030448717948718, |
|
"grad_norm": 0.2643556296825409, |
|
"learning_rate": 6.150755841382527e-07, |
|
"loss": 0.1429, |
|
"step": 4750 |
|
}, |
|
{ |
|
"epoch": 1.907051282051282, |
|
"grad_norm": 0.2717660963535309, |
|
"learning_rate": 5.653871969032054e-07, |
|
"loss": 0.1409, |
|
"step": 4760 |
|
}, |
|
{ |
|
"epoch": 1.9110576923076923, |
|
"grad_norm": 0.24568721652030945, |
|
"learning_rate": 5.177798466042716e-07, |
|
"loss": 0.1444, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 1.9150641025641026, |
|
"grad_norm": 0.23634164035320282, |
|
"learning_rate": 4.722555373565751e-07, |
|
"loss": 0.1472, |
|
"step": 4780 |
|
}, |
|
{ |
|
"epoch": 1.9190705128205128, |
|
"grad_norm": 0.23092865943908691, |
|
"learning_rate": 4.288161855859285e-07, |
|
"loss": 0.141, |
|
"step": 4790 |
|
}, |
|
{ |
|
"epoch": 1.9230769230769231, |
|
"grad_norm": 0.26985424757003784, |
|
"learning_rate": 3.874636199482307e-07, |
|
"loss": 0.1446, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 1.9270833333333335, |
|
"grad_norm": 0.2507458031177521, |
|
"learning_rate": 3.481995812524286e-07, |
|
"loss": 0.1398, |
|
"step": 4810 |
|
}, |
|
{ |
|
"epoch": 1.9310897435897436, |
|
"grad_norm": 0.24660862982273102, |
|
"learning_rate": 3.110257223872592e-07, |
|
"loss": 0.1421, |
|
"step": 4820 |
|
}, |
|
{ |
|
"epoch": 1.9350961538461537, |
|
"grad_norm": 0.24846522510051727, |
|
"learning_rate": 2.7594360825166644e-07, |
|
"loss": 0.1439, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 1.939102564102564, |
|
"grad_norm": 0.25589925050735474, |
|
"learning_rate": 2.4295471568892005e-07, |
|
"loss": 0.136, |
|
"step": 4840 |
|
}, |
|
{ |
|
"epoch": 1.9431089743589745, |
|
"grad_norm": 0.23860958218574524, |
|
"learning_rate": 2.1206043342445469e-07, |
|
"loss": 0.1446, |
|
"step": 4850 |
|
}, |
|
{ |
|
"epoch": 1.9471153846153846, |
|
"grad_norm": 0.2679506540298462, |
|
"learning_rate": 1.8326206200739436e-07, |
|
"loss": 0.1399, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 1.9511217948717947, |
|
"grad_norm": 0.2246963083744049, |
|
"learning_rate": 1.5656081375581277e-07, |
|
"loss": 0.1435, |
|
"step": 4870 |
|
}, |
|
{ |
|
"epoch": 1.9551282051282053, |
|
"grad_norm": 0.23173221945762634, |
|
"learning_rate": 1.3195781270570196e-07, |
|
"loss": 0.1438, |
|
"step": 4880 |
|
}, |
|
{ |
|
"epoch": 1.9591346153846154, |
|
"grad_norm": 0.24970708787441254, |
|
"learning_rate": 1.0945409456364352e-07, |
|
"loss": 0.1429, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 1.9631410256410255, |
|
"grad_norm": 0.23272265493869781, |
|
"learning_rate": 8.90506066632102e-08, |
|
"loss": 0.14, |
|
"step": 4900 |
|
}, |
|
{ |
|
"epoch": 1.967147435897436, |
|
"grad_norm": 0.25013232231140137, |
|
"learning_rate": 7.074820792510317e-08, |
|
"loss": 0.143, |
|
"step": 4910 |
|
}, |
|
{ |
|
"epoch": 1.9711538461538463, |
|
"grad_norm": 0.2563280165195465, |
|
"learning_rate": 5.454766882097007e-08, |
|
"loss": 0.149, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 1.9751602564102564, |
|
"grad_norm": 0.25120726227760315, |
|
"learning_rate": 4.0449671340991866e-08, |
|
"loss": 0.1454, |
|
"step": 4930 |
|
}, |
|
{ |
|
"epoch": 1.9791666666666665, |
|
"grad_norm": 0.2662029564380646, |
|
"learning_rate": 2.8454808965155954e-08, |
|
"loss": 0.1461, |
|
"step": 4940 |
|
}, |
|
{ |
|
"epoch": 1.9831730769230769, |
|
"grad_norm": 0.24329765141010284, |
|
"learning_rate": 1.8563586638281615e-08, |
|
"loss": 0.1352, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 1.9871794871794872, |
|
"grad_norm": 0.24964918196201324, |
|
"learning_rate": 1.0776420748753734e-08, |
|
"loss": 0.1378, |
|
"step": 4960 |
|
}, |
|
{ |
|
"epoch": 1.9911858974358974, |
|
"grad_norm": 0.24900883436203003, |
|
"learning_rate": 5.093639111025672e-09, |
|
"loss": 0.1451, |
|
"step": 4970 |
|
}, |
|
{ |
|
"epoch": 1.9951923076923077, |
|
"grad_norm": 0.24761377274990082, |
|
"learning_rate": 1.5154809517747837e-09, |
|
"loss": 0.1333, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 1.999198717948718, |
|
"grad_norm": 0.2465759515762329, |
|
"learning_rate": 4.2096899854904284e-11, |
|
"loss": 0.1421, |
|
"step": 4990 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 4992, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3554923454948815e+19, |
|
"train_batch_size": 64, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|