|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.20619819130363204, |
|
"eval_steps": 100000000, |
|
"global_step": 25300, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.15012613848348e-06, |
|
"grad_norm": 3.4710192680358887, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 11.2605, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002445037841545044, |
|
"grad_norm": 5.464046955108643, |
|
"learning_rate": 3e-06, |
|
"loss": 10.9977, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0004890075683090088, |
|
"grad_norm": 1.1376756429672241, |
|
"learning_rate": 6e-06, |
|
"loss": 9.4082, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0007335113524635132, |
|
"grad_norm": 1.2878344058990479, |
|
"learning_rate": 9e-06, |
|
"loss": 8.549, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0009780151366180175, |
|
"grad_norm": 1.2800588607788086, |
|
"learning_rate": 1.2e-05, |
|
"loss": 8.0361, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.001222518920772522, |
|
"grad_norm": 1.4315314292907715, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.653, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0014670227049270264, |
|
"grad_norm": 1.381317377090454, |
|
"learning_rate": 1.8e-05, |
|
"loss": 7.4179, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0017115264890815308, |
|
"grad_norm": 1.7989460229873657, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.2012, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.001956030273236035, |
|
"grad_norm": 1.3431414365768433, |
|
"learning_rate": 2.4e-05, |
|
"loss": 7.0383, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.0022005340573905395, |
|
"grad_norm": 1.028826117515564, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.866, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.002445037841545044, |
|
"grad_norm": 1.201025128364563, |
|
"learning_rate": 3e-05, |
|
"loss": 6.717, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.0026895416256995483, |
|
"grad_norm": 1.1023098230361938, |
|
"learning_rate": 3.3e-05, |
|
"loss": 6.5535, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0029340454098540527, |
|
"grad_norm": 1.2839674949645996, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.4022, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.003178549194008557, |
|
"grad_norm": 2.267265796661377, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.2858, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0034230529781630616, |
|
"grad_norm": 1.0635628700256348, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.1681, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0036675567623175656, |
|
"grad_norm": 1.263838768005371, |
|
"learning_rate": 4.5e-05, |
|
"loss": 6.0728, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.00391206054647207, |
|
"grad_norm": 1.4611454010009766, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.972, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.0041565643306265745, |
|
"grad_norm": 1.0120561122894287, |
|
"learning_rate": 4.999999990869806e-05, |
|
"loss": 5.8619, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.004401068114781079, |
|
"grad_norm": 1.1349974870681763, |
|
"learning_rate": 4.999999853916893e-05, |
|
"loss": 5.785, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.004645571898935583, |
|
"grad_norm": 1.0840613842010498, |
|
"learning_rate": 4.9999995526204936e-05, |
|
"loss": 5.7071, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.004890075683090088, |
|
"grad_norm": 1.258074402809143, |
|
"learning_rate": 4.999999086980628e-05, |
|
"loss": 5.6199, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.005134579467244592, |
|
"grad_norm": 1.284726858139038, |
|
"learning_rate": 4.999998456997326e-05, |
|
"loss": 5.5465, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.005379083251399097, |
|
"grad_norm": 1.2079874277114868, |
|
"learning_rate": 4.999997662670628e-05, |
|
"loss": 5.4816, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.005623587035553601, |
|
"grad_norm": 1.3364052772521973, |
|
"learning_rate": 4.999996704000589e-05, |
|
"loss": 5.4079, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.0058680908197081055, |
|
"grad_norm": 0.9860705137252808, |
|
"learning_rate": 4.99999558098727e-05, |
|
"loss": 5.3598, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.00611259460386261, |
|
"grad_norm": 1.2071930170059204, |
|
"learning_rate": 4.9999942936307445e-05, |
|
"loss": 5.2884, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.006357098388017114, |
|
"grad_norm": 0.8959563970565796, |
|
"learning_rate": 4.9999928419310994e-05, |
|
"loss": 5.2391, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.006601602172171619, |
|
"grad_norm": 1.2356096506118774, |
|
"learning_rate": 4.999991225888427e-05, |
|
"loss": 5.1879, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.006846105956326123, |
|
"grad_norm": 0.9705113172531128, |
|
"learning_rate": 4.999989445502837e-05, |
|
"loss": 5.1424, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.007090609740480627, |
|
"grad_norm": 0.9504437446594238, |
|
"learning_rate": 4.9999875007744436e-05, |
|
"loss": 5.0966, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.007335113524635131, |
|
"grad_norm": 0.9488673806190491, |
|
"learning_rate": 4.9999853917033756e-05, |
|
"loss": 5.0424, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.007579617308789636, |
|
"grad_norm": 0.959373950958252, |
|
"learning_rate": 4.999983118289773e-05, |
|
"loss": 5.0387, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.00782412109294414, |
|
"grad_norm": 0.8465414643287659, |
|
"learning_rate": 4.999980680533782e-05, |
|
"loss": 4.9769, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.008068624877098645, |
|
"grad_norm": 0.8328993916511536, |
|
"learning_rate": 4.999978078435567e-05, |
|
"loss": 4.9335, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.008313128661253149, |
|
"grad_norm": 0.8107655644416809, |
|
"learning_rate": 4.999975311995295e-05, |
|
"loss": 4.9214, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.008557632445407654, |
|
"grad_norm": 0.8149654865264893, |
|
"learning_rate": 4.99997238121315e-05, |
|
"loss": 4.8651, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.008802136229562158, |
|
"grad_norm": 0.8837414979934692, |
|
"learning_rate": 4.999969286089325e-05, |
|
"loss": 4.8327, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.009046640013716663, |
|
"grad_norm": 1.1360137462615967, |
|
"learning_rate": 4.9999660266240235e-05, |
|
"loss": 4.7906, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.009291143797871167, |
|
"grad_norm": 0.7992026209831238, |
|
"learning_rate": 4.9999626028174585e-05, |
|
"loss": 4.7612, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.00953564758202567, |
|
"grad_norm": 0.8481825590133667, |
|
"learning_rate": 4.999959014669856e-05, |
|
"loss": 4.7106, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.009780151366180176, |
|
"grad_norm": 0.8183879256248474, |
|
"learning_rate": 4.9999552621814513e-05, |
|
"loss": 4.6993, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.010024655150334679, |
|
"grad_norm": 0.8460689187049866, |
|
"learning_rate": 4.9999513453524917e-05, |
|
"loss": 4.6664, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.010269158934489184, |
|
"grad_norm": 0.8723706007003784, |
|
"learning_rate": 4.9999472641832336e-05, |
|
"loss": 4.6371, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.010513662718643688, |
|
"grad_norm": 0.7682787179946899, |
|
"learning_rate": 4.999943018673946e-05, |
|
"loss": 4.6184, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.010758166502798193, |
|
"grad_norm": 0.9049955606460571, |
|
"learning_rate": 4.999938608824909e-05, |
|
"loss": 4.5968, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.011002670286952697, |
|
"grad_norm": 0.7860899567604065, |
|
"learning_rate": 4.999934034636411e-05, |
|
"loss": 4.5266, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.011247174071107202, |
|
"grad_norm": 0.7918768525123596, |
|
"learning_rate": 4.999929296108753e-05, |
|
"loss": 4.5069, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.011491677855261706, |
|
"grad_norm": 0.6713089942932129, |
|
"learning_rate": 4.9999243932422466e-05, |
|
"loss": 4.4663, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.011736181639416211, |
|
"grad_norm": 0.756101131439209, |
|
"learning_rate": 4.999919326037215e-05, |
|
"loss": 4.4782, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.011980685423570715, |
|
"grad_norm": 0.7067996263504028, |
|
"learning_rate": 4.99991409449399e-05, |
|
"loss": 4.4388, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.01222518920772522, |
|
"grad_norm": 0.7132194638252258, |
|
"learning_rate": 4.999908698612916e-05, |
|
"loss": 4.4135, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.012469692991879723, |
|
"grad_norm": 0.7282501459121704, |
|
"learning_rate": 4.9999031383943486e-05, |
|
"loss": 4.4057, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.012714196776034229, |
|
"grad_norm": 0.6851722598075867, |
|
"learning_rate": 4.999897413838651e-05, |
|
"loss": 4.3612, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.012958700560188732, |
|
"grad_norm": 0.7953110337257385, |
|
"learning_rate": 4.999891524946202e-05, |
|
"loss": 4.3268, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.013203204344343238, |
|
"grad_norm": 0.726719319820404, |
|
"learning_rate": 4.999885471717387e-05, |
|
"loss": 4.314, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.013447708128497741, |
|
"grad_norm": 0.7354792356491089, |
|
"learning_rate": 4.999879254152605e-05, |
|
"loss": 4.2807, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.013692211912652246, |
|
"grad_norm": 0.7072017788887024, |
|
"learning_rate": 4.999872872252265e-05, |
|
"loss": 4.282, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.01393671569680675, |
|
"grad_norm": 0.690112292766571, |
|
"learning_rate": 4.999866326016785e-05, |
|
"loss": 4.2686, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.014181219480961254, |
|
"grad_norm": 0.697634756565094, |
|
"learning_rate": 4.999859615446596e-05, |
|
"loss": 4.2662, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.014425723265115759, |
|
"grad_norm": 0.6999862790107727, |
|
"learning_rate": 4.99985274054214e-05, |
|
"loss": 4.2207, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.014670227049270262, |
|
"grad_norm": 0.6920334100723267, |
|
"learning_rate": 4.999845701303868e-05, |
|
"loss": 4.2163, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.014914730833424768, |
|
"grad_norm": 0.6884493827819824, |
|
"learning_rate": 4.999838497732243e-05, |
|
"loss": 4.209, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.015159234617579271, |
|
"grad_norm": 0.776447057723999, |
|
"learning_rate": 4.999831129827739e-05, |
|
"loss": 4.1856, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.015403738401733777, |
|
"grad_norm": 0.7332949042320251, |
|
"learning_rate": 4.9998235975908394e-05, |
|
"loss": 4.156, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.01564824218588828, |
|
"grad_norm": 0.6691558361053467, |
|
"learning_rate": 4.99981590102204e-05, |
|
"loss": 4.1339, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.015892745970042785, |
|
"grad_norm": 0.6789201498031616, |
|
"learning_rate": 4.9998080401218464e-05, |
|
"loss": 4.1344, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.01613724975419729, |
|
"grad_norm": 0.6654175519943237, |
|
"learning_rate": 4.999800014890777e-05, |
|
"loss": 4.1295, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.016381753538351793, |
|
"grad_norm": 0.6632519364356995, |
|
"learning_rate": 4.9997918253293555e-05, |
|
"loss": 4.1036, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.016626257322506298, |
|
"grad_norm": 0.6618740558624268, |
|
"learning_rate": 4.999783471438124e-05, |
|
"loss": 4.0857, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.016870761106660803, |
|
"grad_norm": 0.6384952068328857, |
|
"learning_rate": 4.999774953217631e-05, |
|
"loss": 4.0755, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.01711526489081531, |
|
"grad_norm": 0.6808215975761414, |
|
"learning_rate": 4.9997662706684345e-05, |
|
"loss": 4.0568, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.01735976867496981, |
|
"grad_norm": 0.6514068841934204, |
|
"learning_rate": 4.999757423791107e-05, |
|
"loss": 4.0474, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.017604272459124316, |
|
"grad_norm": 0.6691142320632935, |
|
"learning_rate": 4.9997484125862306e-05, |
|
"loss": 4.0467, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.01784877624327882, |
|
"grad_norm": 0.6240984201431274, |
|
"learning_rate": 4.999739237054395e-05, |
|
"loss": 4.0259, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.018093280027433326, |
|
"grad_norm": 0.6286123991012573, |
|
"learning_rate": 4.9997298971962065e-05, |
|
"loss": 4.0098, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.018337783811587828, |
|
"grad_norm": 0.6232910752296448, |
|
"learning_rate": 4.999720393012277e-05, |
|
"loss": 4.0203, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.018582287595742333, |
|
"grad_norm": 0.6792376041412354, |
|
"learning_rate": 4.999710724503233e-05, |
|
"loss": 3.9909, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.01882679137989684, |
|
"grad_norm": 0.6824718117713928, |
|
"learning_rate": 4.9997008916697075e-05, |
|
"loss": 3.9922, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.01907129516405134, |
|
"grad_norm": 0.6520410776138306, |
|
"learning_rate": 4.999690894512349e-05, |
|
"loss": 3.9533, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.019315798948205846, |
|
"grad_norm": 0.6467030048370361, |
|
"learning_rate": 4.999680733031814e-05, |
|
"loss": 3.9464, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.01956030273236035, |
|
"grad_norm": 0.6356080770492554, |
|
"learning_rate": 4.9996704072287716e-05, |
|
"loss": 3.9288, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.019804806516514856, |
|
"grad_norm": 0.6498362421989441, |
|
"learning_rate": 4.9996599171038984e-05, |
|
"loss": 3.9345, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.020049310300669358, |
|
"grad_norm": 0.6107562184333801, |
|
"learning_rate": 4.999649262657886e-05, |
|
"loss": 3.9368, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.020293814084823864, |
|
"grad_norm": 0.6478250622749329, |
|
"learning_rate": 4.999638443891434e-05, |
|
"loss": 3.9273, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.02053831786897837, |
|
"grad_norm": 0.631263256072998, |
|
"learning_rate": 4.999627460805253e-05, |
|
"loss": 3.9136, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.020782821653132874, |
|
"grad_norm": 0.6120920777320862, |
|
"learning_rate": 4.999616313400066e-05, |
|
"loss": 3.8834, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.021027325437287376, |
|
"grad_norm": 0.6151197552680969, |
|
"learning_rate": 4.999605001676605e-05, |
|
"loss": 3.8994, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.02127182922144188, |
|
"grad_norm": 0.623715877532959, |
|
"learning_rate": 4.9995935256356144e-05, |
|
"loss": 3.8929, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.021516333005596387, |
|
"grad_norm": 0.6688769459724426, |
|
"learning_rate": 4.9995818852778476e-05, |
|
"loss": 3.8499, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.021760836789750892, |
|
"grad_norm": 0.6272155046463013, |
|
"learning_rate": 4.999570080604071e-05, |
|
"loss": 3.8861, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.022005340573905394, |
|
"grad_norm": 0.597653329372406, |
|
"learning_rate": 4.99955811161506e-05, |
|
"loss": 3.8674, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.0222498443580599, |
|
"grad_norm": 0.5608483552932739, |
|
"learning_rate": 4.9995459783116004e-05, |
|
"loss": 3.8493, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.022494348142214404, |
|
"grad_norm": 0.5991063117980957, |
|
"learning_rate": 4.999533680694493e-05, |
|
"loss": 3.8454, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.02273885192636891, |
|
"grad_norm": 0.5738102793693542, |
|
"learning_rate": 4.9995212187645416e-05, |
|
"loss": 3.8395, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.02298335571052341, |
|
"grad_norm": 0.6145568490028381, |
|
"learning_rate": 4.9995085925225693e-05, |
|
"loss": 3.8313, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.023227859494677917, |
|
"grad_norm": 0.6019515991210938, |
|
"learning_rate": 4.999495801969404e-05, |
|
"loss": 3.8277, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.023472363278832422, |
|
"grad_norm": 0.6177758574485779, |
|
"learning_rate": 4.9994828471058876e-05, |
|
"loss": 3.8083, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.023716867062986924, |
|
"grad_norm": 0.5616528987884521, |
|
"learning_rate": 4.9994697279328714e-05, |
|
"loss": 3.8114, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.02396137084714143, |
|
"grad_norm": 0.6194447875022888, |
|
"learning_rate": 4.9994564444512176e-05, |
|
"loss": 3.8183, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.024205874631295934, |
|
"grad_norm": 0.578895092010498, |
|
"learning_rate": 4.9994429966618e-05, |
|
"loss": 3.7871, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.02445037841545044, |
|
"grad_norm": 0.6014060378074646, |
|
"learning_rate": 4.999429384565502e-05, |
|
"loss": 3.7711, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.02469488219960494, |
|
"grad_norm": 0.5589067339897156, |
|
"learning_rate": 4.999415608163217e-05, |
|
"loss": 3.7533, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.024939385983759447, |
|
"grad_norm": 0.5722873210906982, |
|
"learning_rate": 4.999401667455854e-05, |
|
"loss": 3.7585, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.025183889767913952, |
|
"grad_norm": 0.5734965801239014, |
|
"learning_rate": 4.9993875624443274e-05, |
|
"loss": 3.77, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.025428393552068457, |
|
"grad_norm": 0.5524207353591919, |
|
"learning_rate": 4.9993732931295646e-05, |
|
"loss": 3.718, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.02567289733622296, |
|
"grad_norm": 0.5776082277297974, |
|
"learning_rate": 4.999358859512503e-05, |
|
"loss": 3.7573, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.025917401120377465, |
|
"grad_norm": 0.5799595713615417, |
|
"learning_rate": 4.9993442615940936e-05, |
|
"loss": 3.7552, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.02616190490453197, |
|
"grad_norm": 0.5820346474647522, |
|
"learning_rate": 4.999329499375292e-05, |
|
"loss": 3.7394, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.026406408688686475, |
|
"grad_norm": 0.5450282096862793, |
|
"learning_rate": 4.999314572857074e-05, |
|
"loss": 3.7393, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.026650912472840977, |
|
"grad_norm": 0.6213249564170837, |
|
"learning_rate": 4.9992994820404174e-05, |
|
"loss": 3.7191, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.026895416256995482, |
|
"grad_norm": 0.5940688848495483, |
|
"learning_rate": 4.999284226926314e-05, |
|
"loss": 3.719, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.027139920041149988, |
|
"grad_norm": 0.5779993534088135, |
|
"learning_rate": 4.999268807515768e-05, |
|
"loss": 3.7092, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.027384423825304493, |
|
"grad_norm": 0.5899255871772766, |
|
"learning_rate": 4.999253223809792e-05, |
|
"loss": 3.6939, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.027628927609458995, |
|
"grad_norm": 0.6330375671386719, |
|
"learning_rate": 4.999237475809411e-05, |
|
"loss": 3.7102, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.0278734313936135, |
|
"grad_norm": 0.5771914720535278, |
|
"learning_rate": 4.99922156351566e-05, |
|
"loss": 3.7196, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.028117935177768005, |
|
"grad_norm": 0.5769143104553223, |
|
"learning_rate": 4.999205486929586e-05, |
|
"loss": 3.7127, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.028362438961922507, |
|
"grad_norm": 0.5581954121589661, |
|
"learning_rate": 4.999189246052245e-05, |
|
"loss": 3.689, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.028606942746077012, |
|
"grad_norm": 0.6041043400764465, |
|
"learning_rate": 4.999172840884704e-05, |
|
"loss": 3.6831, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.028851446530231518, |
|
"grad_norm": 0.5458335280418396, |
|
"learning_rate": 4.999156271428043e-05, |
|
"loss": 3.6694, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.029095950314386023, |
|
"grad_norm": 0.5470607280731201, |
|
"learning_rate": 4.9991395376833496e-05, |
|
"loss": 3.6702, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.029340454098540525, |
|
"grad_norm": 0.5878787040710449, |
|
"learning_rate": 4.999122639651725e-05, |
|
"loss": 3.6492, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.02958495788269503, |
|
"grad_norm": 0.5691691637039185, |
|
"learning_rate": 4.9991055773342795e-05, |
|
"loss": 3.6812, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.029829461666849535, |
|
"grad_norm": 0.5548356771469116, |
|
"learning_rate": 4.9990883507321354e-05, |
|
"loss": 3.645, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.03007396545100404, |
|
"grad_norm": 0.5686156153678894, |
|
"learning_rate": 4.999070959846424e-05, |
|
"loss": 3.6505, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.030318469235158543, |
|
"grad_norm": 0.5596534013748169, |
|
"learning_rate": 4.999053404678289e-05, |
|
"loss": 3.6532, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.030562973019313048, |
|
"grad_norm": 0.54421067237854, |
|
"learning_rate": 4.999035685228884e-05, |
|
"loss": 3.6238, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.030807476803467553, |
|
"grad_norm": 0.56732177734375, |
|
"learning_rate": 4.999017801499375e-05, |
|
"loss": 3.6449, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.03105198058762206, |
|
"grad_norm": 0.5608410239219666, |
|
"learning_rate": 4.998999753490937e-05, |
|
"loss": 3.6344, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.03129648437177656, |
|
"grad_norm": 0.5919491648674011, |
|
"learning_rate": 4.998981541204757e-05, |
|
"loss": 3.6213, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.031540988155931066, |
|
"grad_norm": 0.5795233249664307, |
|
"learning_rate": 4.998963164642031e-05, |
|
"loss": 3.6239, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.03178549194008557, |
|
"grad_norm": 0.5669205784797668, |
|
"learning_rate": 4.9989446238039676e-05, |
|
"loss": 3.5831, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.032029995724240076, |
|
"grad_norm": 0.5817368626594543, |
|
"learning_rate": 4.998925918691786e-05, |
|
"loss": 3.6074, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.03227449950839458, |
|
"grad_norm": 0.5662333369255066, |
|
"learning_rate": 4.998907049306715e-05, |
|
"loss": 3.5874, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.03251900329254909, |
|
"grad_norm": 0.5641735792160034, |
|
"learning_rate": 4.998888015649996e-05, |
|
"loss": 3.6056, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.032763507076703585, |
|
"grad_norm": 0.524918794631958, |
|
"learning_rate": 4.99886881772288e-05, |
|
"loss": 3.5963, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.03300801086085809, |
|
"grad_norm": 0.5625722408294678, |
|
"learning_rate": 4.998849455526628e-05, |
|
"loss": 3.5917, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.033252514645012596, |
|
"grad_norm": 0.5612478852272034, |
|
"learning_rate": 4.998829929062515e-05, |
|
"loss": 3.5792, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.0334970184291671, |
|
"grad_norm": 0.5586293935775757, |
|
"learning_rate": 4.998810238331822e-05, |
|
"loss": 3.5708, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.033741522213321606, |
|
"grad_norm": 0.53324955701828, |
|
"learning_rate": 4.998790383335845e-05, |
|
"loss": 3.5686, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.03398602599747611, |
|
"grad_norm": 0.5210742950439453, |
|
"learning_rate": 4.9987703640758894e-05, |
|
"loss": 3.575, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.03423052978163062, |
|
"grad_norm": 0.5591189861297607, |
|
"learning_rate": 4.99875018055327e-05, |
|
"loss": 3.5717, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.034475033565785115, |
|
"grad_norm": 0.5435970425605774, |
|
"learning_rate": 4.998729832769315e-05, |
|
"loss": 3.5638, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.03471953734993962, |
|
"grad_norm": 0.5489551424980164, |
|
"learning_rate": 4.998709320725361e-05, |
|
"loss": 3.574, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.034964041134094126, |
|
"grad_norm": 0.5275290012359619, |
|
"learning_rate": 4.998688644422756e-05, |
|
"loss": 3.5695, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.03520854491824863, |
|
"grad_norm": 0.583881139755249, |
|
"learning_rate": 4.998667803862861e-05, |
|
"loss": 3.5703, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.03545304870240314, |
|
"grad_norm": 0.5317121744155884, |
|
"learning_rate": 4.9986467990470445e-05, |
|
"loss": 3.5668, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.03569755248655764, |
|
"grad_norm": 0.5981696248054504, |
|
"learning_rate": 4.998625629976688e-05, |
|
"loss": 3.5411, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.03594205627071215, |
|
"grad_norm": 0.55133455991745, |
|
"learning_rate": 4.998604296653182e-05, |
|
"loss": 3.521, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.03618656005486665, |
|
"grad_norm": 0.5481094717979431, |
|
"learning_rate": 4.99858279907793e-05, |
|
"loss": 3.5421, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.03643106383902115, |
|
"grad_norm": 0.5218031406402588, |
|
"learning_rate": 4.998561137252346e-05, |
|
"loss": 3.5305, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.036675567623175656, |
|
"grad_norm": 0.5458360910415649, |
|
"learning_rate": 4.9985393111778525e-05, |
|
"loss": 3.5332, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.03692007140733016, |
|
"grad_norm": 0.5501233339309692, |
|
"learning_rate": 4.998517320855884e-05, |
|
"loss": 3.5485, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.03716457519148467, |
|
"grad_norm": 0.5359978675842285, |
|
"learning_rate": 4.998495166287887e-05, |
|
"loss": 3.534, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.03740907897563917, |
|
"grad_norm": 0.5447133183479309, |
|
"learning_rate": 4.998472847475318e-05, |
|
"loss": 3.5176, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.03765358275979368, |
|
"grad_norm": 0.5201069712638855, |
|
"learning_rate": 4.998450364419643e-05, |
|
"loss": 3.518, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.03789808654394818, |
|
"grad_norm": 0.5251840353012085, |
|
"learning_rate": 4.998427717122342e-05, |
|
"loss": 3.5021, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.03814259032810268, |
|
"grad_norm": 0.5204648375511169, |
|
"learning_rate": 4.9984049055849024e-05, |
|
"loss": 3.505, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.038387094112257186, |
|
"grad_norm": 0.5137141942977905, |
|
"learning_rate": 4.9983819298088234e-05, |
|
"loss": 3.4997, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.03863159789641169, |
|
"grad_norm": 0.5232805609703064, |
|
"learning_rate": 4.9983587897956166e-05, |
|
"loss": 3.5049, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.0388761016805662, |
|
"grad_norm": 0.5415229201316833, |
|
"learning_rate": 4.998335485546802e-05, |
|
"loss": 3.5123, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.0391206054647207, |
|
"grad_norm": 0.5097187161445618, |
|
"learning_rate": 4.998312017063912e-05, |
|
"loss": 3.4839, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.03936510924887521, |
|
"grad_norm": 0.5637670755386353, |
|
"learning_rate": 4.9982883843484895e-05, |
|
"loss": 3.5084, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.03960961303302971, |
|
"grad_norm": 0.5436129570007324, |
|
"learning_rate": 4.998264587402088e-05, |
|
"loss": 3.5184, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.03985411681718422, |
|
"grad_norm": 0.5133230090141296, |
|
"learning_rate": 4.9982406262262726e-05, |
|
"loss": 3.48, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.040098620601338716, |
|
"grad_norm": 0.5678510069847107, |
|
"learning_rate": 4.9982165008226175e-05, |
|
"loss": 3.4899, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.04034312438549322, |
|
"grad_norm": 0.5121597647666931, |
|
"learning_rate": 4.998192211192708e-05, |
|
"loss": 3.4942, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.04058762816964773, |
|
"grad_norm": 0.537929117679596, |
|
"learning_rate": 4.9981677573381423e-05, |
|
"loss": 3.4721, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.04083213195380223, |
|
"grad_norm": 0.537289023399353, |
|
"learning_rate": 4.9981431392605274e-05, |
|
"loss": 3.4919, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.04107663573795674, |
|
"grad_norm": 0.5594364404678345, |
|
"learning_rate": 4.998118356961481e-05, |
|
"loss": 3.4665, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.04132113952211124, |
|
"grad_norm": 0.5009840726852417, |
|
"learning_rate": 4.998093410442632e-05, |
|
"loss": 3.4524, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.04156564330626575, |
|
"grad_norm": 0.53801429271698, |
|
"learning_rate": 4.998068299705623e-05, |
|
"loss": 3.4629, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.041810147090420254, |
|
"grad_norm": 0.5134426355361938, |
|
"learning_rate": 4.9980430247521014e-05, |
|
"loss": 3.4504, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.04205465087457475, |
|
"grad_norm": 0.5095092058181763, |
|
"learning_rate": 4.998017585583731e-05, |
|
"loss": 3.4684, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.04229915465872926, |
|
"grad_norm": 0.5233584642410278, |
|
"learning_rate": 4.9979919822021824e-05, |
|
"loss": 3.4726, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.04254365844288376, |
|
"grad_norm": 0.5375773906707764, |
|
"learning_rate": 4.99796621460914e-05, |
|
"loss": 3.4502, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.04278816222703827, |
|
"grad_norm": 0.5292348265647888, |
|
"learning_rate": 4.9979402828062963e-05, |
|
"loss": 3.4494, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.04303266601119277, |
|
"grad_norm": 0.5170340538024902, |
|
"learning_rate": 4.997914186795358e-05, |
|
"loss": 3.4671, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.04327716979534728, |
|
"grad_norm": 0.5027357339859009, |
|
"learning_rate": 4.9978879265780385e-05, |
|
"loss": 3.4424, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.043521673579501784, |
|
"grad_norm": 0.4898991584777832, |
|
"learning_rate": 4.997861502156066e-05, |
|
"loss": 3.4515, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.04376617736365628, |
|
"grad_norm": 0.5362656712532043, |
|
"learning_rate": 4.997834913531176e-05, |
|
"loss": 3.4209, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.04401068114781079, |
|
"grad_norm": 0.5150277614593506, |
|
"learning_rate": 4.9978081607051176e-05, |
|
"loss": 3.4575, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.04425518493196529, |
|
"grad_norm": 0.5086013674736023, |
|
"learning_rate": 4.997781243679648e-05, |
|
"loss": 3.4377, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.0444996887161198, |
|
"grad_norm": 0.538194477558136, |
|
"learning_rate": 4.9977541624565374e-05, |
|
"loss": 3.4282, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.0447441925002743, |
|
"grad_norm": 0.5045614838600159, |
|
"learning_rate": 4.9977269170375665e-05, |
|
"loss": 3.4109, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.04498869628442881, |
|
"grad_norm": 0.5368480086326599, |
|
"learning_rate": 4.997699507424526e-05, |
|
"loss": 3.4341, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.045233200068583314, |
|
"grad_norm": 0.6185830235481262, |
|
"learning_rate": 4.997671933619218e-05, |
|
"loss": 3.4203, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.04547770385273782, |
|
"grad_norm": 0.4984918534755707, |
|
"learning_rate": 4.9976441956234546e-05, |
|
"loss": 3.4309, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.04572220763689232, |
|
"grad_norm": 0.5066754221916199, |
|
"learning_rate": 4.99761629343906e-05, |
|
"loss": 3.3986, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.04596671142104682, |
|
"grad_norm": 0.5132448673248291, |
|
"learning_rate": 4.9975882270678676e-05, |
|
"loss": 3.4126, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.04621121520520133, |
|
"grad_norm": 0.5501627922058105, |
|
"learning_rate": 4.997559996511723e-05, |
|
"loss": 3.4057, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.04645571898935583, |
|
"grad_norm": 0.4679185152053833, |
|
"learning_rate": 4.997531601772481e-05, |
|
"loss": 3.428, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.04670022277351034, |
|
"grad_norm": 0.5020308494567871, |
|
"learning_rate": 4.9975030428520084e-05, |
|
"loss": 3.3977, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.046944726557664844, |
|
"grad_norm": 0.5141638517379761, |
|
"learning_rate": 4.997474319752184e-05, |
|
"loss": 3.4044, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.04718923034181935, |
|
"grad_norm": 0.5127893090248108, |
|
"learning_rate": 4.997445432474895e-05, |
|
"loss": 3.4071, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.04743373412597385, |
|
"grad_norm": 0.5528402924537659, |
|
"learning_rate": 4.9974163810220406e-05, |
|
"loss": 3.3891, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.04767823791012835, |
|
"grad_norm": 0.5092859864234924, |
|
"learning_rate": 4.99738716539553e-05, |
|
"loss": 3.4118, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.04792274169428286, |
|
"grad_norm": 0.49626457691192627, |
|
"learning_rate": 4.997357785597284e-05, |
|
"loss": 3.3834, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.048167245478437364, |
|
"grad_norm": 0.5132192969322205, |
|
"learning_rate": 4.997328241629234e-05, |
|
"loss": 3.3944, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.04841174926259187, |
|
"grad_norm": 0.5104256868362427, |
|
"learning_rate": 4.997298533493323e-05, |
|
"loss": 3.4001, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.048656253046746374, |
|
"grad_norm": 0.507205069065094, |
|
"learning_rate": 4.997268661191503e-05, |
|
"loss": 3.3867, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.04890075683090088, |
|
"grad_norm": 0.6091153025627136, |
|
"learning_rate": 4.9972386247257385e-05, |
|
"loss": 3.3984, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.049145260615055385, |
|
"grad_norm": 0.4803309142589569, |
|
"learning_rate": 4.9972084240980025e-05, |
|
"loss": 3.3703, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.04938976439920988, |
|
"grad_norm": 0.515164852142334, |
|
"learning_rate": 4.997178059310281e-05, |
|
"loss": 3.3733, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.04963426818336439, |
|
"grad_norm": 0.515275776386261, |
|
"learning_rate": 4.997147530364571e-05, |
|
"loss": 3.3802, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.049878771967518894, |
|
"grad_norm": 0.5258405208587646, |
|
"learning_rate": 4.9971168372628793e-05, |
|
"loss": 3.3705, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.0501232757516734, |
|
"grad_norm": 0.49037066102027893, |
|
"learning_rate": 4.997085980007222e-05, |
|
"loss": 3.37, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.050367779535827904, |
|
"grad_norm": 0.47182220220565796, |
|
"learning_rate": 4.99705495859963e-05, |
|
"loss": 3.3705, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.05061228331998241, |
|
"grad_norm": 0.4967211186885834, |
|
"learning_rate": 4.99702377304214e-05, |
|
"loss": 3.3743, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.050856787104136915, |
|
"grad_norm": 0.7469291090965271, |
|
"learning_rate": 4.9969924233368036e-05, |
|
"loss": 3.3732, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.05110129088829142, |
|
"grad_norm": 0.5237293839454651, |
|
"learning_rate": 4.996960909485681e-05, |
|
"loss": 3.3646, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.05134579467244592, |
|
"grad_norm": 0.48839274048805237, |
|
"learning_rate": 4.9969292314908446e-05, |
|
"loss": 3.3665, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.051590298456600424, |
|
"grad_norm": 0.48733407258987427, |
|
"learning_rate": 4.996897389354376e-05, |
|
"loss": 3.3646, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.05183480224075493, |
|
"grad_norm": 0.4938340187072754, |
|
"learning_rate": 4.9968653830783684e-05, |
|
"loss": 3.3542, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.052079306024909434, |
|
"grad_norm": 0.5001193284988403, |
|
"learning_rate": 4.996833212664927e-05, |
|
"loss": 3.3583, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.05232380980906394, |
|
"grad_norm": 0.4772029221057892, |
|
"learning_rate": 4.996800878116166e-05, |
|
"loss": 3.336, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.052568313593218445, |
|
"grad_norm": 0.49077659845352173, |
|
"learning_rate": 4.99676837943421e-05, |
|
"loss": 3.3681, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.05281281737737295, |
|
"grad_norm": 0.5102148056030273, |
|
"learning_rate": 4.996735716621196e-05, |
|
"loss": 3.3437, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.05305732116152745, |
|
"grad_norm": 0.5012289881706238, |
|
"learning_rate": 4.996702889679272e-05, |
|
"loss": 3.3536, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.053301824945681954, |
|
"grad_norm": 0.4870162308216095, |
|
"learning_rate": 4.996669898610595e-05, |
|
"loss": 3.3513, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.05354632872983646, |
|
"grad_norm": 0.48452699184417725, |
|
"learning_rate": 4.996636743417334e-05, |
|
"loss": 3.3415, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.053790832513990965, |
|
"grad_norm": 0.4973999559879303, |
|
"learning_rate": 4.996603424101669e-05, |
|
"loss": 3.3304, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.05403533629814547, |
|
"grad_norm": 0.4833717942237854, |
|
"learning_rate": 4.996569940665789e-05, |
|
"loss": 3.3424, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.054279840082299975, |
|
"grad_norm": 0.5074206590652466, |
|
"learning_rate": 4.996536293111896e-05, |
|
"loss": 3.3302, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.05452434386645448, |
|
"grad_norm": 0.4932290315628052, |
|
"learning_rate": 4.996502481442202e-05, |
|
"loss": 3.3388, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.054768847650608986, |
|
"grad_norm": 0.48740679025650024, |
|
"learning_rate": 4.9964685056589314e-05, |
|
"loss": 3.3182, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.055013351434763484, |
|
"grad_norm": 0.48752760887145996, |
|
"learning_rate": 4.996434365764314e-05, |
|
"loss": 3.3065, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.05525785521891799, |
|
"grad_norm": 0.50692218542099, |
|
"learning_rate": 4.996400061760597e-05, |
|
"loss": 3.3379, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.055502359003072495, |
|
"grad_norm": 0.479159414768219, |
|
"learning_rate": 4.996365593650033e-05, |
|
"loss": 3.3317, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.055746862787227, |
|
"grad_norm": 0.498662531375885, |
|
"learning_rate": 4.99633096143489e-05, |
|
"loss": 3.3306, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.055991366571381505, |
|
"grad_norm": 1.4371449947357178, |
|
"learning_rate": 4.9962961651174436e-05, |
|
"loss": 3.3334, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.05623587035553601, |
|
"grad_norm": 0.49862873554229736, |
|
"learning_rate": 4.9962612046999827e-05, |
|
"loss": 3.3142, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.056480374139690516, |
|
"grad_norm": 0.4759610593318939, |
|
"learning_rate": 4.996226080184803e-05, |
|
"loss": 3.3238, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.056724877923845014, |
|
"grad_norm": 0.4844242334365845, |
|
"learning_rate": 4.996190791574215e-05, |
|
"loss": 3.3197, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.05696938170799952, |
|
"grad_norm": 0.46844130754470825, |
|
"learning_rate": 4.996155338870538e-05, |
|
"loss": 3.2949, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.057213885492154025, |
|
"grad_norm": 0.4850478768348694, |
|
"learning_rate": 4.9961197220761035e-05, |
|
"loss": 3.3143, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.05745838927630853, |
|
"grad_norm": 0.4838846027851105, |
|
"learning_rate": 4.996083941193252e-05, |
|
"loss": 3.3015, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.057702893060463036, |
|
"grad_norm": 0.49992483854293823, |
|
"learning_rate": 4.9960479962243367e-05, |
|
"loss": 3.3099, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.05794739684461754, |
|
"grad_norm": 0.49964553117752075, |
|
"learning_rate": 4.996011887171719e-05, |
|
"loss": 3.3046, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.058191900628772046, |
|
"grad_norm": 0.4723115563392639, |
|
"learning_rate": 4.995975614037773e-05, |
|
"loss": 3.3009, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.05843640441292655, |
|
"grad_norm": 0.48575958609580994, |
|
"learning_rate": 4.995939176824883e-05, |
|
"loss": 3.3018, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.05868090819708105, |
|
"grad_norm": 0.5264491438865662, |
|
"learning_rate": 4.995902575535446e-05, |
|
"loss": 3.2877, |
|
"step": 7200 |
|
}, |
|
{ |
|
"epoch": 0.058925411981235555, |
|
"grad_norm": 0.4813016355037689, |
|
"learning_rate": 4.995865810171866e-05, |
|
"loss": 3.2933, |
|
"step": 7230 |
|
}, |
|
{ |
|
"epoch": 0.05916991576539006, |
|
"grad_norm": 0.47151580452919006, |
|
"learning_rate": 4.995828880736561e-05, |
|
"loss": 3.3143, |
|
"step": 7260 |
|
}, |
|
{ |
|
"epoch": 0.059414419549544566, |
|
"grad_norm": 0.4812193512916565, |
|
"learning_rate": 4.995791787231958e-05, |
|
"loss": 3.2914, |
|
"step": 7290 |
|
}, |
|
{ |
|
"epoch": 0.05965892333369907, |
|
"grad_norm": 0.4926256239414215, |
|
"learning_rate": 4.9957545296604965e-05, |
|
"loss": 3.2756, |
|
"step": 7320 |
|
}, |
|
{ |
|
"epoch": 0.059903427117853576, |
|
"grad_norm": 0.47530651092529297, |
|
"learning_rate": 4.9957171080246245e-05, |
|
"loss": 3.3075, |
|
"step": 7350 |
|
}, |
|
{ |
|
"epoch": 0.06014793090200808, |
|
"grad_norm": 0.4838476777076721, |
|
"learning_rate": 4.995679522326803e-05, |
|
"loss": 3.292, |
|
"step": 7380 |
|
}, |
|
{ |
|
"epoch": 0.06039243468616259, |
|
"grad_norm": 0.48683232069015503, |
|
"learning_rate": 4.995641772569502e-05, |
|
"loss": 3.269, |
|
"step": 7410 |
|
}, |
|
{ |
|
"epoch": 0.060636938470317085, |
|
"grad_norm": 0.4818269908428192, |
|
"learning_rate": 4.995603858755203e-05, |
|
"loss": 3.2798, |
|
"step": 7440 |
|
}, |
|
{ |
|
"epoch": 0.06088144225447159, |
|
"grad_norm": 0.46415388584136963, |
|
"learning_rate": 4.9955657808863985e-05, |
|
"loss": 3.2768, |
|
"step": 7470 |
|
}, |
|
{ |
|
"epoch": 0.061125946038626096, |
|
"grad_norm": 0.48808780312538147, |
|
"learning_rate": 4.995527538965593e-05, |
|
"loss": 3.2797, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.0613704498227806, |
|
"grad_norm": 0.4957239627838135, |
|
"learning_rate": 4.995489132995298e-05, |
|
"loss": 3.2912, |
|
"step": 7530 |
|
}, |
|
{ |
|
"epoch": 0.061614953606935106, |
|
"grad_norm": 0.4858773350715637, |
|
"learning_rate": 4.99545056297804e-05, |
|
"loss": 3.269, |
|
"step": 7560 |
|
}, |
|
{ |
|
"epoch": 0.06185945739108961, |
|
"grad_norm": 0.46054506301879883, |
|
"learning_rate": 4.995411828916354e-05, |
|
"loss": 3.2663, |
|
"step": 7590 |
|
}, |
|
{ |
|
"epoch": 0.06210396117524412, |
|
"grad_norm": 0.4704018831253052, |
|
"learning_rate": 4.9953729308127874e-05, |
|
"loss": 3.263, |
|
"step": 7620 |
|
}, |
|
{ |
|
"epoch": 0.062348464959398615, |
|
"grad_norm": 0.491974413394928, |
|
"learning_rate": 4.995333868669895e-05, |
|
"loss": 3.2709, |
|
"step": 7650 |
|
}, |
|
{ |
|
"epoch": 0.06259296874355312, |
|
"grad_norm": 0.46538054943084717, |
|
"learning_rate": 4.995294642490246e-05, |
|
"loss": 3.2818, |
|
"step": 7680 |
|
}, |
|
{ |
|
"epoch": 0.06283747252770763, |
|
"grad_norm": 0.49248039722442627, |
|
"learning_rate": 4.995255252276418e-05, |
|
"loss": 3.2581, |
|
"step": 7710 |
|
}, |
|
{ |
|
"epoch": 0.06308197631186213, |
|
"grad_norm": 0.45257478952407837, |
|
"learning_rate": 4.9952156980310016e-05, |
|
"loss": 3.2691, |
|
"step": 7740 |
|
}, |
|
{ |
|
"epoch": 0.06332648009601663, |
|
"grad_norm": 0.479942262172699, |
|
"learning_rate": 4.9951759797565965e-05, |
|
"loss": 3.276, |
|
"step": 7770 |
|
}, |
|
{ |
|
"epoch": 0.06357098388017114, |
|
"grad_norm": 0.48799383640289307, |
|
"learning_rate": 4.995136097455815e-05, |
|
"loss": 3.2668, |
|
"step": 7800 |
|
}, |
|
{ |
|
"epoch": 0.06381548766432564, |
|
"grad_norm": 0.46520474553108215, |
|
"learning_rate": 4.995096051131276e-05, |
|
"loss": 3.2509, |
|
"step": 7830 |
|
}, |
|
{ |
|
"epoch": 0.06405999144848015, |
|
"grad_norm": 0.4901852309703827, |
|
"learning_rate": 4.995055840785614e-05, |
|
"loss": 3.2383, |
|
"step": 7860 |
|
}, |
|
{ |
|
"epoch": 0.06430449523263465, |
|
"grad_norm": 0.46401068568229675, |
|
"learning_rate": 4.995015466421473e-05, |
|
"loss": 3.2626, |
|
"step": 7890 |
|
}, |
|
{ |
|
"epoch": 0.06454899901678916, |
|
"grad_norm": 0.46814706921577454, |
|
"learning_rate": 4.9949749280415056e-05, |
|
"loss": 3.2488, |
|
"step": 7920 |
|
}, |
|
{ |
|
"epoch": 0.06479350280094366, |
|
"grad_norm": 0.48936349153518677, |
|
"learning_rate": 4.9949342256483766e-05, |
|
"loss": 3.2368, |
|
"step": 7950 |
|
}, |
|
{ |
|
"epoch": 0.06503800658509817, |
|
"grad_norm": 0.4871247708797455, |
|
"learning_rate": 4.9948933592447636e-05, |
|
"loss": 3.2603, |
|
"step": 7980 |
|
}, |
|
{ |
|
"epoch": 0.06528251036925267, |
|
"grad_norm": 0.4689597487449646, |
|
"learning_rate": 4.9948523288333506e-05, |
|
"loss": 3.2437, |
|
"step": 8010 |
|
}, |
|
{ |
|
"epoch": 0.06552701415340717, |
|
"grad_norm": 0.45705971121788025, |
|
"learning_rate": 4.994811134416836e-05, |
|
"loss": 3.2415, |
|
"step": 8040 |
|
}, |
|
{ |
|
"epoch": 0.06577151793756168, |
|
"grad_norm": 0.46996039152145386, |
|
"learning_rate": 4.994769775997927e-05, |
|
"loss": 3.2545, |
|
"step": 8070 |
|
}, |
|
{ |
|
"epoch": 0.06601602172171618, |
|
"grad_norm": 0.4835875928401947, |
|
"learning_rate": 4.994728253579345e-05, |
|
"loss": 3.2587, |
|
"step": 8100 |
|
}, |
|
{ |
|
"epoch": 0.0662605255058707, |
|
"grad_norm": 0.4678030014038086, |
|
"learning_rate": 4.9946865671638166e-05, |
|
"loss": 3.2368, |
|
"step": 8130 |
|
}, |
|
{ |
|
"epoch": 0.06650502929002519, |
|
"grad_norm": 0.5105451345443726, |
|
"learning_rate": 4.9946447167540835e-05, |
|
"loss": 3.259, |
|
"step": 8160 |
|
}, |
|
{ |
|
"epoch": 0.0667495330741797, |
|
"grad_norm": 0.471935898065567, |
|
"learning_rate": 4.994602702352896e-05, |
|
"loss": 3.2631, |
|
"step": 8190 |
|
}, |
|
{ |
|
"epoch": 0.0669940368583342, |
|
"grad_norm": 0.48006853461265564, |
|
"learning_rate": 4.994560523963018e-05, |
|
"loss": 3.2313, |
|
"step": 8220 |
|
}, |
|
{ |
|
"epoch": 0.0672385406424887, |
|
"grad_norm": 0.46124544739723206, |
|
"learning_rate": 4.9945181815872196e-05, |
|
"loss": 3.215, |
|
"step": 8250 |
|
}, |
|
{ |
|
"epoch": 0.06748304442664321, |
|
"grad_norm": 0.481571763753891, |
|
"learning_rate": 4.9944756752282855e-05, |
|
"loss": 3.2357, |
|
"step": 8280 |
|
}, |
|
{ |
|
"epoch": 0.06772754821079771, |
|
"grad_norm": 0.47422316670417786, |
|
"learning_rate": 4.994433004889011e-05, |
|
"loss": 3.2279, |
|
"step": 8310 |
|
}, |
|
{ |
|
"epoch": 0.06797205199495222, |
|
"grad_norm": 0.48801445960998535, |
|
"learning_rate": 4.994390170572199e-05, |
|
"loss": 3.2369, |
|
"step": 8340 |
|
}, |
|
{ |
|
"epoch": 0.06821655577910672, |
|
"grad_norm": 0.46795913577079773, |
|
"learning_rate": 4.994347172280667e-05, |
|
"loss": 3.2187, |
|
"step": 8370 |
|
}, |
|
{ |
|
"epoch": 0.06846105956326123, |
|
"grad_norm": 0.4645237326622009, |
|
"learning_rate": 4.994304010017241e-05, |
|
"loss": 3.2183, |
|
"step": 8400 |
|
}, |
|
{ |
|
"epoch": 0.06870556334741573, |
|
"grad_norm": 0.45294952392578125, |
|
"learning_rate": 4.994260683784758e-05, |
|
"loss": 3.2449, |
|
"step": 8430 |
|
}, |
|
{ |
|
"epoch": 0.06895006713157023, |
|
"grad_norm": 0.47424617409706116, |
|
"learning_rate": 4.9942171935860674e-05, |
|
"loss": 3.2249, |
|
"step": 8460 |
|
}, |
|
{ |
|
"epoch": 0.06919457091572474, |
|
"grad_norm": 0.4657289683818817, |
|
"learning_rate": 4.994173539424026e-05, |
|
"loss": 3.2313, |
|
"step": 8490 |
|
}, |
|
{ |
|
"epoch": 0.06943907469987924, |
|
"grad_norm": 0.4828115701675415, |
|
"learning_rate": 4.994129721301506e-05, |
|
"loss": 3.236, |
|
"step": 8520 |
|
}, |
|
{ |
|
"epoch": 0.06968357848403375, |
|
"grad_norm": 0.4601866602897644, |
|
"learning_rate": 4.994085739221386e-05, |
|
"loss": 3.2459, |
|
"step": 8550 |
|
}, |
|
{ |
|
"epoch": 0.06992808226818825, |
|
"grad_norm": 0.45241278409957886, |
|
"learning_rate": 4.994041593186558e-05, |
|
"loss": 3.2077, |
|
"step": 8580 |
|
}, |
|
{ |
|
"epoch": 0.07017258605234276, |
|
"grad_norm": 0.47955596446990967, |
|
"learning_rate": 4.993997283199924e-05, |
|
"loss": 3.2378, |
|
"step": 8610 |
|
}, |
|
{ |
|
"epoch": 0.07041708983649726, |
|
"grad_norm": 0.4656619727611542, |
|
"learning_rate": 4.993952809264397e-05, |
|
"loss": 3.2277, |
|
"step": 8640 |
|
}, |
|
{ |
|
"epoch": 0.07066159362065176, |
|
"grad_norm": 0.45982396602630615, |
|
"learning_rate": 4.9939081713829006e-05, |
|
"loss": 3.2192, |
|
"step": 8670 |
|
}, |
|
{ |
|
"epoch": 0.07090609740480627, |
|
"grad_norm": 0.4598824381828308, |
|
"learning_rate": 4.993863369558369e-05, |
|
"loss": 3.2286, |
|
"step": 8700 |
|
}, |
|
{ |
|
"epoch": 0.07115060118896077, |
|
"grad_norm": 0.4744945168495178, |
|
"learning_rate": 4.9938184037937466e-05, |
|
"loss": 3.2201, |
|
"step": 8730 |
|
}, |
|
{ |
|
"epoch": 0.07139510497311528, |
|
"grad_norm": 0.46979376673698425, |
|
"learning_rate": 4.993773274091991e-05, |
|
"loss": 3.1986, |
|
"step": 8760 |
|
}, |
|
{ |
|
"epoch": 0.07163960875726978, |
|
"grad_norm": 0.46936362981796265, |
|
"learning_rate": 4.993727980456067e-05, |
|
"loss": 3.2047, |
|
"step": 8790 |
|
}, |
|
{ |
|
"epoch": 0.0718841125414243, |
|
"grad_norm": 0.4529063105583191, |
|
"learning_rate": 4.993682522888954e-05, |
|
"loss": 3.221, |
|
"step": 8820 |
|
}, |
|
{ |
|
"epoch": 0.07212861632557879, |
|
"grad_norm": 0.46270012855529785, |
|
"learning_rate": 4.993636901393639e-05, |
|
"loss": 3.2054, |
|
"step": 8850 |
|
}, |
|
{ |
|
"epoch": 0.0723731201097333, |
|
"grad_norm": 0.6890011429786682, |
|
"learning_rate": 4.993591115973121e-05, |
|
"loss": 3.2124, |
|
"step": 8880 |
|
}, |
|
{ |
|
"epoch": 0.0726176238938878, |
|
"grad_norm": 0.48755085468292236, |
|
"learning_rate": 4.9935451666304105e-05, |
|
"loss": 3.2177, |
|
"step": 8910 |
|
}, |
|
{ |
|
"epoch": 0.0728621276780423, |
|
"grad_norm": 0.4768828749656677, |
|
"learning_rate": 4.993499053368528e-05, |
|
"loss": 3.2075, |
|
"step": 8940 |
|
}, |
|
{ |
|
"epoch": 0.07310663146219681, |
|
"grad_norm": 0.5368286371231079, |
|
"learning_rate": 4.993452776190504e-05, |
|
"loss": 3.1981, |
|
"step": 8970 |
|
}, |
|
{ |
|
"epoch": 0.07335113524635131, |
|
"grad_norm": 0.45793619751930237, |
|
"learning_rate": 4.993406335099382e-05, |
|
"loss": 3.2143, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.07359563903050582, |
|
"grad_norm": 0.459844172000885, |
|
"learning_rate": 4.993359730098214e-05, |
|
"loss": 3.2107, |
|
"step": 9030 |
|
}, |
|
{ |
|
"epoch": 0.07384014281466032, |
|
"grad_norm": 0.4570547640323639, |
|
"learning_rate": 4.993312961190064e-05, |
|
"loss": 3.1962, |
|
"step": 9060 |
|
}, |
|
{ |
|
"epoch": 0.07408464659881484, |
|
"grad_norm": 0.4817084074020386, |
|
"learning_rate": 4.993266028378006e-05, |
|
"loss": 3.1895, |
|
"step": 9090 |
|
}, |
|
{ |
|
"epoch": 0.07432915038296933, |
|
"grad_norm": 0.46276503801345825, |
|
"learning_rate": 4.993218931665126e-05, |
|
"loss": 3.1916, |
|
"step": 9120 |
|
}, |
|
{ |
|
"epoch": 0.07457365416712383, |
|
"grad_norm": 0.4731460511684418, |
|
"learning_rate": 4.993171671054519e-05, |
|
"loss": 3.2119, |
|
"step": 9150 |
|
}, |
|
{ |
|
"epoch": 0.07481815795127834, |
|
"grad_norm": 0.4732898473739624, |
|
"learning_rate": 4.993124246549293e-05, |
|
"loss": 3.1993, |
|
"step": 9180 |
|
}, |
|
{ |
|
"epoch": 0.07506266173543284, |
|
"grad_norm": 0.4521377682685852, |
|
"learning_rate": 4.9930766581525645e-05, |
|
"loss": 3.1948, |
|
"step": 9210 |
|
}, |
|
{ |
|
"epoch": 0.07530716551958735, |
|
"grad_norm": 0.466330885887146, |
|
"learning_rate": 4.993028905867463e-05, |
|
"loss": 3.1929, |
|
"step": 9240 |
|
}, |
|
{ |
|
"epoch": 0.07555166930374185, |
|
"grad_norm": 0.4678347110748291, |
|
"learning_rate": 4.992980989697126e-05, |
|
"loss": 3.1962, |
|
"step": 9270 |
|
}, |
|
{ |
|
"epoch": 0.07579617308789637, |
|
"grad_norm": 0.4650745391845703, |
|
"learning_rate": 4.992932909644705e-05, |
|
"loss": 3.19, |
|
"step": 9300 |
|
}, |
|
{ |
|
"epoch": 0.07604067687205086, |
|
"grad_norm": 0.45810264348983765, |
|
"learning_rate": 4.9928846657133596e-05, |
|
"loss": 3.1937, |
|
"step": 9330 |
|
}, |
|
{ |
|
"epoch": 0.07628518065620536, |
|
"grad_norm": 0.4602174162864685, |
|
"learning_rate": 4.992836257906262e-05, |
|
"loss": 3.2078, |
|
"step": 9360 |
|
}, |
|
{ |
|
"epoch": 0.07652968444035987, |
|
"grad_norm": 0.44673460721969604, |
|
"learning_rate": 4.9927876862265935e-05, |
|
"loss": 3.1726, |
|
"step": 9390 |
|
}, |
|
{ |
|
"epoch": 0.07677418822451437, |
|
"grad_norm": 0.4698408246040344, |
|
"learning_rate": 4.992738950677548e-05, |
|
"loss": 3.1658, |
|
"step": 9420 |
|
}, |
|
{ |
|
"epoch": 0.07701869200866888, |
|
"grad_norm": 0.47652050852775574, |
|
"learning_rate": 4.992690051262329e-05, |
|
"loss": 3.1642, |
|
"step": 9450 |
|
}, |
|
{ |
|
"epoch": 0.07726319579282338, |
|
"grad_norm": 0.4623103737831116, |
|
"learning_rate": 4.99264098798415e-05, |
|
"loss": 3.1788, |
|
"step": 9480 |
|
}, |
|
{ |
|
"epoch": 0.0775076995769779, |
|
"grad_norm": 0.4699987471103668, |
|
"learning_rate": 4.992591760846238e-05, |
|
"loss": 3.1679, |
|
"step": 9510 |
|
}, |
|
{ |
|
"epoch": 0.0777522033611324, |
|
"grad_norm": 0.466349720954895, |
|
"learning_rate": 4.9925423698518274e-05, |
|
"loss": 3.1674, |
|
"step": 9540 |
|
}, |
|
{ |
|
"epoch": 0.0779967071452869, |
|
"grad_norm": 0.4621599316596985, |
|
"learning_rate": 4.992492815004166e-05, |
|
"loss": 3.193, |
|
"step": 9570 |
|
}, |
|
{ |
|
"epoch": 0.0782412109294414, |
|
"grad_norm": 0.4567157030105591, |
|
"learning_rate": 4.992443096306512e-05, |
|
"loss": 3.1681, |
|
"step": 9600 |
|
}, |
|
{ |
|
"epoch": 0.0784857147135959, |
|
"grad_norm": 0.45757701992988586, |
|
"learning_rate": 4.992393213762132e-05, |
|
"loss": 3.2002, |
|
"step": 9630 |
|
}, |
|
{ |
|
"epoch": 0.07873021849775041, |
|
"grad_norm": 0.45566242933273315, |
|
"learning_rate": 4.992343167374307e-05, |
|
"loss": 3.1651, |
|
"step": 9660 |
|
}, |
|
{ |
|
"epoch": 0.07897472228190491, |
|
"grad_norm": 0.4485650360584259, |
|
"learning_rate": 4.992292957146326e-05, |
|
"loss": 3.1613, |
|
"step": 9690 |
|
}, |
|
{ |
|
"epoch": 0.07921922606605943, |
|
"grad_norm": 0.4732878804206848, |
|
"learning_rate": 4.992242583081489e-05, |
|
"loss": 3.1602, |
|
"step": 9720 |
|
}, |
|
{ |
|
"epoch": 0.07946372985021392, |
|
"grad_norm": 0.4469018280506134, |
|
"learning_rate": 4.992192045183109e-05, |
|
"loss": 3.1487, |
|
"step": 9750 |
|
}, |
|
{ |
|
"epoch": 0.07970823363436844, |
|
"grad_norm": 0.45860594511032104, |
|
"learning_rate": 4.9921413434545075e-05, |
|
"loss": 3.1516, |
|
"step": 9780 |
|
}, |
|
{ |
|
"epoch": 0.07995273741852293, |
|
"grad_norm": 0.45328783988952637, |
|
"learning_rate": 4.992090477899018e-05, |
|
"loss": 3.1481, |
|
"step": 9810 |
|
}, |
|
{ |
|
"epoch": 0.08019724120267743, |
|
"grad_norm": 0.48180997371673584, |
|
"learning_rate": 4.992039448519982e-05, |
|
"loss": 3.1459, |
|
"step": 9840 |
|
}, |
|
{ |
|
"epoch": 0.08044174498683195, |
|
"grad_norm": 0.45059671998023987, |
|
"learning_rate": 4.9919882553207566e-05, |
|
"loss": 3.1432, |
|
"step": 9870 |
|
}, |
|
{ |
|
"epoch": 0.08068624877098644, |
|
"grad_norm": 0.4592690169811249, |
|
"learning_rate": 4.9919368983047066e-05, |
|
"loss": 3.1532, |
|
"step": 9900 |
|
}, |
|
{ |
|
"epoch": 0.08093075255514096, |
|
"grad_norm": 0.4452771842479706, |
|
"learning_rate": 4.9918853774752074e-05, |
|
"loss": 3.171, |
|
"step": 9930 |
|
}, |
|
{ |
|
"epoch": 0.08117525633929545, |
|
"grad_norm": 0.45305460691452026, |
|
"learning_rate": 4.991833692835646e-05, |
|
"loss": 3.1422, |
|
"step": 9960 |
|
}, |
|
{ |
|
"epoch": 0.08141976012344997, |
|
"grad_norm": 0.47526153922080994, |
|
"learning_rate": 4.9917818443894203e-05, |
|
"loss": 3.1565, |
|
"step": 9990 |
|
}, |
|
{ |
|
"epoch": 0.08166426390760446, |
|
"grad_norm": 0.4707624614238739, |
|
"learning_rate": 4.991729832139939e-05, |
|
"loss": 3.1481, |
|
"step": 10020 |
|
}, |
|
{ |
|
"epoch": 0.08190876769175896, |
|
"grad_norm": 0.44442108273506165, |
|
"learning_rate": 4.991677656090621e-05, |
|
"loss": 3.1455, |
|
"step": 10050 |
|
}, |
|
{ |
|
"epoch": 0.08215327147591348, |
|
"grad_norm": 0.4681081473827362, |
|
"learning_rate": 4.991625316244896e-05, |
|
"loss": 3.1703, |
|
"step": 10080 |
|
}, |
|
{ |
|
"epoch": 0.08239777526006797, |
|
"grad_norm": 0.4762340486049652, |
|
"learning_rate": 4.991572812606205e-05, |
|
"loss": 3.1636, |
|
"step": 10110 |
|
}, |
|
{ |
|
"epoch": 0.08264227904422249, |
|
"grad_norm": 0.4579741954803467, |
|
"learning_rate": 4.991520145177998e-05, |
|
"loss": 3.1546, |
|
"step": 10140 |
|
}, |
|
{ |
|
"epoch": 0.08288678282837698, |
|
"grad_norm": 0.45810720324516296, |
|
"learning_rate": 4.991467313963739e-05, |
|
"loss": 3.1456, |
|
"step": 10170 |
|
}, |
|
{ |
|
"epoch": 0.0831312866125315, |
|
"grad_norm": 0.4368288516998291, |
|
"learning_rate": 4.991414318966901e-05, |
|
"loss": 3.1393, |
|
"step": 10200 |
|
}, |
|
{ |
|
"epoch": 0.083375790396686, |
|
"grad_norm": 0.4572373330593109, |
|
"learning_rate": 4.991361160190966e-05, |
|
"loss": 3.1258, |
|
"step": 10230 |
|
}, |
|
{ |
|
"epoch": 0.08362029418084051, |
|
"grad_norm": 0.488158255815506, |
|
"learning_rate": 4.9913078376394304e-05, |
|
"loss": 3.1563, |
|
"step": 10260 |
|
}, |
|
{ |
|
"epoch": 0.083864797964995, |
|
"grad_norm": 0.4564719498157501, |
|
"learning_rate": 4.991254351315799e-05, |
|
"loss": 3.1344, |
|
"step": 10290 |
|
}, |
|
{ |
|
"epoch": 0.0841093017491495, |
|
"grad_norm": 0.4469270408153534, |
|
"learning_rate": 4.991200701223587e-05, |
|
"loss": 3.1493, |
|
"step": 10320 |
|
}, |
|
{ |
|
"epoch": 0.08435380553330402, |
|
"grad_norm": 0.46140623092651367, |
|
"learning_rate": 4.991146887366323e-05, |
|
"loss": 3.1517, |
|
"step": 10350 |
|
}, |
|
{ |
|
"epoch": 0.08459830931745851, |
|
"grad_norm": 0.4541148841381073, |
|
"learning_rate": 4.991092909747542e-05, |
|
"loss": 3.1442, |
|
"step": 10380 |
|
}, |
|
{ |
|
"epoch": 0.08484281310161303, |
|
"grad_norm": 0.4556724727153778, |
|
"learning_rate": 4.9910387683707946e-05, |
|
"loss": 3.1429, |
|
"step": 10410 |
|
}, |
|
{ |
|
"epoch": 0.08508731688576752, |
|
"grad_norm": 0.4350408613681793, |
|
"learning_rate": 4.9909844632396386e-05, |
|
"loss": 3.1356, |
|
"step": 10440 |
|
}, |
|
{ |
|
"epoch": 0.08533182066992204, |
|
"grad_norm": 0.45419394969940186, |
|
"learning_rate": 4.9909299943576445e-05, |
|
"loss": 3.1321, |
|
"step": 10470 |
|
}, |
|
{ |
|
"epoch": 0.08557632445407654, |
|
"grad_norm": 0.4699922204017639, |
|
"learning_rate": 4.990875361728393e-05, |
|
"loss": 3.1406, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.08582082823823103, |
|
"grad_norm": 0.45109736919403076, |
|
"learning_rate": 4.990820565355475e-05, |
|
"loss": 3.1369, |
|
"step": 10530 |
|
}, |
|
{ |
|
"epoch": 0.08606533202238555, |
|
"grad_norm": 0.45368725061416626, |
|
"learning_rate": 4.990765605242493e-05, |
|
"loss": 3.1299, |
|
"step": 10560 |
|
}, |
|
{ |
|
"epoch": 0.08630983580654004, |
|
"grad_norm": 0.4491060674190521, |
|
"learning_rate": 4.990710481393061e-05, |
|
"loss": 3.1139, |
|
"step": 10590 |
|
}, |
|
{ |
|
"epoch": 0.08655433959069456, |
|
"grad_norm": 0.47071024775505066, |
|
"learning_rate": 4.9906551938108003e-05, |
|
"loss": 3.159, |
|
"step": 10620 |
|
}, |
|
{ |
|
"epoch": 0.08679884337484906, |
|
"grad_norm": 0.4578341841697693, |
|
"learning_rate": 4.990599742499347e-05, |
|
"loss": 3.1449, |
|
"step": 10650 |
|
}, |
|
{ |
|
"epoch": 0.08704334715900357, |
|
"grad_norm": 0.4422541856765747, |
|
"learning_rate": 4.990544127462346e-05, |
|
"loss": 3.125, |
|
"step": 10680 |
|
}, |
|
{ |
|
"epoch": 0.08728785094315807, |
|
"grad_norm": 0.44820329546928406, |
|
"learning_rate": 4.9904883487034537e-05, |
|
"loss": 3.1328, |
|
"step": 10710 |
|
}, |
|
{ |
|
"epoch": 0.08753235472731256, |
|
"grad_norm": 0.4683224856853485, |
|
"learning_rate": 4.990432406226336e-05, |
|
"loss": 3.1296, |
|
"step": 10740 |
|
}, |
|
{ |
|
"epoch": 0.08777685851146708, |
|
"grad_norm": 0.4463740885257721, |
|
"learning_rate": 4.990376300034671e-05, |
|
"loss": 3.1177, |
|
"step": 10770 |
|
}, |
|
{ |
|
"epoch": 0.08802136229562157, |
|
"grad_norm": 0.4537316560745239, |
|
"learning_rate": 4.990320030132147e-05, |
|
"loss": 3.1269, |
|
"step": 10800 |
|
}, |
|
{ |
|
"epoch": 0.08826586607977609, |
|
"grad_norm": 0.472412109375, |
|
"learning_rate": 4.990263596522462e-05, |
|
"loss": 3.1378, |
|
"step": 10830 |
|
}, |
|
{ |
|
"epoch": 0.08851036986393059, |
|
"grad_norm": 0.4640551805496216, |
|
"learning_rate": 4.9902069992093275e-05, |
|
"loss": 3.1381, |
|
"step": 10860 |
|
}, |
|
{ |
|
"epoch": 0.0887548736480851, |
|
"grad_norm": 0.4387233853340149, |
|
"learning_rate": 4.990150238196463e-05, |
|
"loss": 3.1299, |
|
"step": 10890 |
|
}, |
|
{ |
|
"epoch": 0.0889993774322396, |
|
"grad_norm": 0.4460201561450958, |
|
"learning_rate": 4.9900933134876e-05, |
|
"loss": 3.127, |
|
"step": 10920 |
|
}, |
|
{ |
|
"epoch": 0.0892438812163941, |
|
"grad_norm": 0.4335034191608429, |
|
"learning_rate": 4.990036225086481e-05, |
|
"loss": 3.1032, |
|
"step": 10950 |
|
}, |
|
{ |
|
"epoch": 0.0894883850005486, |
|
"grad_norm": 0.44630929827690125, |
|
"learning_rate": 4.9899789729968585e-05, |
|
"loss": 3.119, |
|
"step": 10980 |
|
}, |
|
{ |
|
"epoch": 0.0897328887847031, |
|
"grad_norm": 0.4360102117061615, |
|
"learning_rate": 4.989921557222496e-05, |
|
"loss": 3.1123, |
|
"step": 11010 |
|
}, |
|
{ |
|
"epoch": 0.08997739256885762, |
|
"grad_norm": 0.45332589745521545, |
|
"learning_rate": 4.989863977767167e-05, |
|
"loss": 3.123, |
|
"step": 11040 |
|
}, |
|
{ |
|
"epoch": 0.09022189635301212, |
|
"grad_norm": 0.4539777338504791, |
|
"learning_rate": 4.989806234634659e-05, |
|
"loss": 3.1144, |
|
"step": 11070 |
|
}, |
|
{ |
|
"epoch": 0.09046640013716663, |
|
"grad_norm": 0.5205842852592468, |
|
"learning_rate": 4.989748327828765e-05, |
|
"loss": 3.1199, |
|
"step": 11100 |
|
}, |
|
{ |
|
"epoch": 0.09071090392132113, |
|
"grad_norm": 0.44270429015159607, |
|
"learning_rate": 4.989690257353294e-05, |
|
"loss": 3.0783, |
|
"step": 11130 |
|
}, |
|
{ |
|
"epoch": 0.09095540770547564, |
|
"grad_norm": 0.4286257326602936, |
|
"learning_rate": 4.989632023212062e-05, |
|
"loss": 3.1162, |
|
"step": 11160 |
|
}, |
|
{ |
|
"epoch": 0.09119991148963014, |
|
"grad_norm": 0.49412211775779724, |
|
"learning_rate": 4.9895736254088975e-05, |
|
"loss": 3.1142, |
|
"step": 11190 |
|
}, |
|
{ |
|
"epoch": 0.09144441527378463, |
|
"grad_norm": 0.4755348563194275, |
|
"learning_rate": 4.989515063947641e-05, |
|
"loss": 3.1173, |
|
"step": 11220 |
|
}, |
|
{ |
|
"epoch": 0.09168891905793915, |
|
"grad_norm": 0.48216554522514343, |
|
"learning_rate": 4.9894563388321395e-05, |
|
"loss": 3.1027, |
|
"step": 11250 |
|
}, |
|
{ |
|
"epoch": 0.09193342284209365, |
|
"grad_norm": 0.4571470320224762, |
|
"learning_rate": 4.989397450066254e-05, |
|
"loss": 3.1191, |
|
"step": 11280 |
|
}, |
|
{ |
|
"epoch": 0.09217792662624816, |
|
"grad_norm": 0.4681689441204071, |
|
"learning_rate": 4.989338397653858e-05, |
|
"loss": 3.097, |
|
"step": 11310 |
|
}, |
|
{ |
|
"epoch": 0.09242243041040266, |
|
"grad_norm": 0.4475450813770294, |
|
"learning_rate": 4.98927918159883e-05, |
|
"loss": 3.118, |
|
"step": 11340 |
|
}, |
|
{ |
|
"epoch": 0.09266693419455717, |
|
"grad_norm": 0.4555697739124298, |
|
"learning_rate": 4.989219801905066e-05, |
|
"loss": 3.1078, |
|
"step": 11370 |
|
}, |
|
{ |
|
"epoch": 0.09291143797871167, |
|
"grad_norm": 0.45538437366485596, |
|
"learning_rate": 4.989160258576469e-05, |
|
"loss": 3.1106, |
|
"step": 11400 |
|
}, |
|
{ |
|
"epoch": 0.09315594176286617, |
|
"grad_norm": 0.48153620958328247, |
|
"learning_rate": 4.98910055161695e-05, |
|
"loss": 3.0844, |
|
"step": 11430 |
|
}, |
|
{ |
|
"epoch": 0.09340044554702068, |
|
"grad_norm": 0.42441487312316895, |
|
"learning_rate": 4.989040681030437e-05, |
|
"loss": 3.1159, |
|
"step": 11460 |
|
}, |
|
{ |
|
"epoch": 0.09364494933117518, |
|
"grad_norm": 0.4604051411151886, |
|
"learning_rate": 4.988980646820865e-05, |
|
"loss": 3.1035, |
|
"step": 11490 |
|
}, |
|
{ |
|
"epoch": 0.09388945311532969, |
|
"grad_norm": 0.43700629472732544, |
|
"learning_rate": 4.9889204489921804e-05, |
|
"loss": 3.0811, |
|
"step": 11520 |
|
}, |
|
{ |
|
"epoch": 0.09413395689948419, |
|
"grad_norm": 0.4468238949775696, |
|
"learning_rate": 4.9888600875483404e-05, |
|
"loss": 3.1104, |
|
"step": 11550 |
|
}, |
|
{ |
|
"epoch": 0.0943784606836387, |
|
"grad_norm": 0.44943588972091675, |
|
"learning_rate": 4.9887995624933137e-05, |
|
"loss": 3.0917, |
|
"step": 11580 |
|
}, |
|
{ |
|
"epoch": 0.0946229644677932, |
|
"grad_norm": 0.4467644691467285, |
|
"learning_rate": 4.988738873831078e-05, |
|
"loss": 3.087, |
|
"step": 11610 |
|
}, |
|
{ |
|
"epoch": 0.0948674682519477, |
|
"grad_norm": 0.44013822078704834, |
|
"learning_rate": 4.988678021565623e-05, |
|
"loss": 3.0868, |
|
"step": 11640 |
|
}, |
|
{ |
|
"epoch": 0.09511197203610221, |
|
"grad_norm": 0.4420963525772095, |
|
"learning_rate": 4.988617005700949e-05, |
|
"loss": 3.102, |
|
"step": 11670 |
|
}, |
|
{ |
|
"epoch": 0.0953564758202567, |
|
"grad_norm": 0.45339009165763855, |
|
"learning_rate": 4.988555826241068e-05, |
|
"loss": 3.1001, |
|
"step": 11700 |
|
}, |
|
{ |
|
"epoch": 0.09560097960441122, |
|
"grad_norm": 0.4557483196258545, |
|
"learning_rate": 4.988494483190001e-05, |
|
"loss": 3.0894, |
|
"step": 11730 |
|
}, |
|
{ |
|
"epoch": 0.09584548338856572, |
|
"grad_norm": 0.43180930614471436, |
|
"learning_rate": 4.98843297655178e-05, |
|
"loss": 3.1116, |
|
"step": 11760 |
|
}, |
|
{ |
|
"epoch": 0.09608998717272023, |
|
"grad_norm": 0.43689119815826416, |
|
"learning_rate": 4.988371306330449e-05, |
|
"loss": 3.0829, |
|
"step": 11790 |
|
}, |
|
{ |
|
"epoch": 0.09633449095687473, |
|
"grad_norm": 0.45857593417167664, |
|
"learning_rate": 4.9883094725300625e-05, |
|
"loss": 3.0991, |
|
"step": 11820 |
|
}, |
|
{ |
|
"epoch": 0.09657899474102924, |
|
"grad_norm": 0.46954238414764404, |
|
"learning_rate": 4.9882474751546846e-05, |
|
"loss": 3.0792, |
|
"step": 11850 |
|
}, |
|
{ |
|
"epoch": 0.09682349852518374, |
|
"grad_norm": 0.4400712847709656, |
|
"learning_rate": 4.98818531420839e-05, |
|
"loss": 3.0839, |
|
"step": 11880 |
|
}, |
|
{ |
|
"epoch": 0.09706800230933824, |
|
"grad_norm": 0.45475077629089355, |
|
"learning_rate": 4.988122989695267e-05, |
|
"loss": 3.1036, |
|
"step": 11910 |
|
}, |
|
{ |
|
"epoch": 0.09731250609349275, |
|
"grad_norm": 0.451103150844574, |
|
"learning_rate": 4.9880605016194116e-05, |
|
"loss": 3.0762, |
|
"step": 11940 |
|
}, |
|
{ |
|
"epoch": 0.09755700987764725, |
|
"grad_norm": 0.44475677609443665, |
|
"learning_rate": 4.9879978499849304e-05, |
|
"loss": 3.082, |
|
"step": 11970 |
|
}, |
|
{ |
|
"epoch": 0.09780151366180176, |
|
"grad_norm": 0.4441121518611908, |
|
"learning_rate": 4.987935034795944e-05, |
|
"loss": 3.0826, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.09804601744595626, |
|
"grad_norm": 0.4929714798927307, |
|
"learning_rate": 4.9878720560565803e-05, |
|
"loss": 3.0762, |
|
"step": 12030 |
|
}, |
|
{ |
|
"epoch": 0.09829052123011077, |
|
"grad_norm": 0.4454306364059448, |
|
"learning_rate": 4.9878089137709806e-05, |
|
"loss": 3.0864, |
|
"step": 12060 |
|
}, |
|
{ |
|
"epoch": 0.09853502501426527, |
|
"grad_norm": 0.45685863494873047, |
|
"learning_rate": 4.987745607943295e-05, |
|
"loss": 3.1007, |
|
"step": 12090 |
|
}, |
|
{ |
|
"epoch": 0.09877952879841977, |
|
"grad_norm": 0.43002381920814514, |
|
"learning_rate": 4.9876821385776854e-05, |
|
"loss": 3.0768, |
|
"step": 12120 |
|
}, |
|
{ |
|
"epoch": 0.09902403258257428, |
|
"grad_norm": 0.4407506585121155, |
|
"learning_rate": 4.9876185056783226e-05, |
|
"loss": 3.0818, |
|
"step": 12150 |
|
}, |
|
{ |
|
"epoch": 0.09926853636672878, |
|
"grad_norm": 0.4490843117237091, |
|
"learning_rate": 4.987554709249391e-05, |
|
"loss": 3.0837, |
|
"step": 12180 |
|
}, |
|
{ |
|
"epoch": 0.09951304015088329, |
|
"grad_norm": 0.45774146914482117, |
|
"learning_rate": 4.987490749295085e-05, |
|
"loss": 3.0757, |
|
"step": 12210 |
|
}, |
|
{ |
|
"epoch": 0.09975754393503779, |
|
"grad_norm": 0.45156994462013245, |
|
"learning_rate": 4.9874266258196084e-05, |
|
"loss": 3.0738, |
|
"step": 12240 |
|
}, |
|
{ |
|
"epoch": 0.1000020477191923, |
|
"grad_norm": 0.42710986733436584, |
|
"learning_rate": 4.9873623388271764e-05, |
|
"loss": 3.0838, |
|
"step": 12270 |
|
}, |
|
{ |
|
"epoch": 0.1002465515033468, |
|
"grad_norm": 0.4447808563709259, |
|
"learning_rate": 4.987297888322015e-05, |
|
"loss": 3.0669, |
|
"step": 12300 |
|
}, |
|
{ |
|
"epoch": 0.1004910552875013, |
|
"grad_norm": 0.4497504234313965, |
|
"learning_rate": 4.9872332743083615e-05, |
|
"loss": 3.0806, |
|
"step": 12330 |
|
}, |
|
{ |
|
"epoch": 0.10073555907165581, |
|
"grad_norm": 0.44097018241882324, |
|
"learning_rate": 4.987168496790463e-05, |
|
"loss": 3.0687, |
|
"step": 12360 |
|
}, |
|
{ |
|
"epoch": 0.10098006285581031, |
|
"grad_norm": 0.4529583752155304, |
|
"learning_rate": 4.9871035557725774e-05, |
|
"loss": 3.0707, |
|
"step": 12390 |
|
}, |
|
{ |
|
"epoch": 0.10122456663996482, |
|
"grad_norm": 0.4495998024940491, |
|
"learning_rate": 4.987038451258975e-05, |
|
"loss": 3.0772, |
|
"step": 12420 |
|
}, |
|
{ |
|
"epoch": 0.10146907042411932, |
|
"grad_norm": 0.46496671438217163, |
|
"learning_rate": 4.9869731832539346e-05, |
|
"loss": 3.0572, |
|
"step": 12450 |
|
}, |
|
{ |
|
"epoch": 0.10171357420827383, |
|
"grad_norm": 0.43100932240486145, |
|
"learning_rate": 4.9869077517617474e-05, |
|
"loss": 3.0545, |
|
"step": 12480 |
|
}, |
|
{ |
|
"epoch": 0.10195807799242833, |
|
"grad_norm": 0.4375341534614563, |
|
"learning_rate": 4.986842156786714e-05, |
|
"loss": 3.0411, |
|
"step": 12510 |
|
}, |
|
{ |
|
"epoch": 0.10220258177658284, |
|
"grad_norm": 0.463009774684906, |
|
"learning_rate": 4.9867763983331474e-05, |
|
"loss": 3.0525, |
|
"step": 12540 |
|
}, |
|
{ |
|
"epoch": 0.10244708556073734, |
|
"grad_norm": 0.44937971234321594, |
|
"learning_rate": 4.9867104764053697e-05, |
|
"loss": 3.0572, |
|
"step": 12570 |
|
}, |
|
{ |
|
"epoch": 0.10269158934489184, |
|
"grad_norm": 0.43725940585136414, |
|
"learning_rate": 4.986644391007714e-05, |
|
"loss": 3.0464, |
|
"step": 12600 |
|
}, |
|
{ |
|
"epoch": 0.10293609312904635, |
|
"grad_norm": 0.4457106292247772, |
|
"learning_rate": 4.986578142144525e-05, |
|
"loss": 3.0667, |
|
"step": 12630 |
|
}, |
|
{ |
|
"epoch": 0.10318059691320085, |
|
"grad_norm": 0.43846726417541504, |
|
"learning_rate": 4.986511729820158e-05, |
|
"loss": 3.064, |
|
"step": 12660 |
|
}, |
|
{ |
|
"epoch": 0.10342510069735536, |
|
"grad_norm": 0.4581950306892395, |
|
"learning_rate": 4.986445154038979e-05, |
|
"loss": 3.0567, |
|
"step": 12690 |
|
}, |
|
{ |
|
"epoch": 0.10366960448150986, |
|
"grad_norm": 0.45388519763946533, |
|
"learning_rate": 4.986378414805364e-05, |
|
"loss": 3.0679, |
|
"step": 12720 |
|
}, |
|
{ |
|
"epoch": 0.10391410826566437, |
|
"grad_norm": 0.43947508931159973, |
|
"learning_rate": 4.9863115121237006e-05, |
|
"loss": 3.0475, |
|
"step": 12750 |
|
}, |
|
{ |
|
"epoch": 0.10415861204981887, |
|
"grad_norm": 0.4460260570049286, |
|
"learning_rate": 4.986244445998386e-05, |
|
"loss": 3.0657, |
|
"step": 12780 |
|
}, |
|
{ |
|
"epoch": 0.10440311583397337, |
|
"grad_norm": 0.44423437118530273, |
|
"learning_rate": 4.9861772164338304e-05, |
|
"loss": 3.052, |
|
"step": 12810 |
|
}, |
|
{ |
|
"epoch": 0.10464761961812788, |
|
"grad_norm": 0.4410192668437958, |
|
"learning_rate": 4.986109823434452e-05, |
|
"loss": 3.0528, |
|
"step": 12840 |
|
}, |
|
{ |
|
"epoch": 0.10489212340228238, |
|
"grad_norm": 0.46037909388542175, |
|
"learning_rate": 4.986042267004681e-05, |
|
"loss": 3.0629, |
|
"step": 12870 |
|
}, |
|
{ |
|
"epoch": 0.10513662718643689, |
|
"grad_norm": 0.4454433023929596, |
|
"learning_rate": 4.98597454714896e-05, |
|
"loss": 3.0608, |
|
"step": 12900 |
|
}, |
|
{ |
|
"epoch": 0.10538113097059139, |
|
"grad_norm": 0.47135457396507263, |
|
"learning_rate": 4.985906663871739e-05, |
|
"loss": 3.066, |
|
"step": 12930 |
|
}, |
|
{ |
|
"epoch": 0.1056256347547459, |
|
"grad_norm": 0.4219493865966797, |
|
"learning_rate": 4.9858386171774804e-05, |
|
"loss": 3.0631, |
|
"step": 12960 |
|
}, |
|
{ |
|
"epoch": 0.1058701385389004, |
|
"grad_norm": 0.4326363205909729, |
|
"learning_rate": 4.985770407070659e-05, |
|
"loss": 3.0365, |
|
"step": 12990 |
|
}, |
|
{ |
|
"epoch": 0.1061146423230549, |
|
"grad_norm": 0.44705840945243835, |
|
"learning_rate": 4.985702033555757e-05, |
|
"loss": 3.0607, |
|
"step": 13020 |
|
}, |
|
{ |
|
"epoch": 0.10635914610720941, |
|
"grad_norm": 0.44811519980430603, |
|
"learning_rate": 4.9856334966372705e-05, |
|
"loss": 3.05, |
|
"step": 13050 |
|
}, |
|
{ |
|
"epoch": 0.10660364989136391, |
|
"grad_norm": 0.4571828544139862, |
|
"learning_rate": 4.985564796319705e-05, |
|
"loss": 3.0619, |
|
"step": 13080 |
|
}, |
|
{ |
|
"epoch": 0.10684815367551842, |
|
"grad_norm": 0.4468649923801422, |
|
"learning_rate": 4.985495932607576e-05, |
|
"loss": 3.0704, |
|
"step": 13110 |
|
}, |
|
{ |
|
"epoch": 0.10709265745967292, |
|
"grad_norm": 0.4374684989452362, |
|
"learning_rate": 4.9854269055054096e-05, |
|
"loss": 3.0614, |
|
"step": 13140 |
|
}, |
|
{ |
|
"epoch": 0.10733716124382743, |
|
"grad_norm": 0.4390091598033905, |
|
"learning_rate": 4.985357715017744e-05, |
|
"loss": 3.0511, |
|
"step": 13170 |
|
}, |
|
{ |
|
"epoch": 0.10758166502798193, |
|
"grad_norm": 0.4404134154319763, |
|
"learning_rate": 4.985288361149129e-05, |
|
"loss": 3.0577, |
|
"step": 13200 |
|
}, |
|
{ |
|
"epoch": 0.10782616881213643, |
|
"grad_norm": 0.4556957185268402, |
|
"learning_rate": 4.985218843904122e-05, |
|
"loss": 3.0632, |
|
"step": 13230 |
|
}, |
|
{ |
|
"epoch": 0.10807067259629094, |
|
"grad_norm": 0.4744901657104492, |
|
"learning_rate": 4.985149163287294e-05, |
|
"loss": 3.0525, |
|
"step": 13260 |
|
}, |
|
{ |
|
"epoch": 0.10831517638044544, |
|
"grad_norm": 0.42146775126457214, |
|
"learning_rate": 4.985079319303225e-05, |
|
"loss": 3.0456, |
|
"step": 13290 |
|
}, |
|
{ |
|
"epoch": 0.10855968016459995, |
|
"grad_norm": 0.4721812605857849, |
|
"learning_rate": 4.985009311956507e-05, |
|
"loss": 3.0412, |
|
"step": 13320 |
|
}, |
|
{ |
|
"epoch": 0.10880418394875445, |
|
"grad_norm": 0.44272100925445557, |
|
"learning_rate": 4.984939141251741e-05, |
|
"loss": 3.0591, |
|
"step": 13350 |
|
}, |
|
{ |
|
"epoch": 0.10904868773290896, |
|
"grad_norm": 0.4616432785987854, |
|
"learning_rate": 4.9848688071935415e-05, |
|
"loss": 3.0384, |
|
"step": 13380 |
|
}, |
|
{ |
|
"epoch": 0.10929319151706346, |
|
"grad_norm": 0.4424038231372833, |
|
"learning_rate": 4.98479830978653e-05, |
|
"loss": 3.0442, |
|
"step": 13410 |
|
}, |
|
{ |
|
"epoch": 0.10953769530121797, |
|
"grad_norm": 0.4387684464454651, |
|
"learning_rate": 4.9847276490353425e-05, |
|
"loss": 3.0406, |
|
"step": 13440 |
|
}, |
|
{ |
|
"epoch": 0.10978219908537247, |
|
"grad_norm": 0.44404199719429016, |
|
"learning_rate": 4.984656824944623e-05, |
|
"loss": 3.0146, |
|
"step": 13470 |
|
}, |
|
{ |
|
"epoch": 0.11002670286952697, |
|
"grad_norm": 0.4519595503807068, |
|
"learning_rate": 4.984585837519028e-05, |
|
"loss": 3.0314, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.11027120665368148, |
|
"grad_norm": 0.42673036456108093, |
|
"learning_rate": 4.984514686763224e-05, |
|
"loss": 3.0193, |
|
"step": 13530 |
|
}, |
|
{ |
|
"epoch": 0.11051571043783598, |
|
"grad_norm": 0.44936153292655945, |
|
"learning_rate": 4.984443372681887e-05, |
|
"loss": 3.0275, |
|
"step": 13560 |
|
}, |
|
{ |
|
"epoch": 0.11076021422199049, |
|
"grad_norm": 0.4443502128124237, |
|
"learning_rate": 4.9843718952797074e-05, |
|
"loss": 3.0316, |
|
"step": 13590 |
|
}, |
|
{ |
|
"epoch": 0.11100471800614499, |
|
"grad_norm": 0.46171683073043823, |
|
"learning_rate": 4.984300254561382e-05, |
|
"loss": 3.0148, |
|
"step": 13620 |
|
}, |
|
{ |
|
"epoch": 0.1112492217902995, |
|
"grad_norm": 0.4363279342651367, |
|
"learning_rate": 4.9842284505316206e-05, |
|
"loss": 3.0381, |
|
"step": 13650 |
|
}, |
|
{ |
|
"epoch": 0.111493725574454, |
|
"grad_norm": 0.4243530035018921, |
|
"learning_rate": 4.984156483195144e-05, |
|
"loss": 3.0072, |
|
"step": 13680 |
|
}, |
|
{ |
|
"epoch": 0.1117382293586085, |
|
"grad_norm": 0.4298417270183563, |
|
"learning_rate": 4.984084352556683e-05, |
|
"loss": 3.026, |
|
"step": 13710 |
|
}, |
|
{ |
|
"epoch": 0.11198273314276301, |
|
"grad_norm": 0.437334805727005, |
|
"learning_rate": 4.984012058620979e-05, |
|
"loss": 3.0263, |
|
"step": 13740 |
|
}, |
|
{ |
|
"epoch": 0.11222723692691751, |
|
"grad_norm": 0.44620245695114136, |
|
"learning_rate": 4.983939601392784e-05, |
|
"loss": 3.0338, |
|
"step": 13770 |
|
}, |
|
{ |
|
"epoch": 0.11247174071107202, |
|
"grad_norm": 0.4280051290988922, |
|
"learning_rate": 4.9838669808768616e-05, |
|
"loss": 3.037, |
|
"step": 13800 |
|
}, |
|
{ |
|
"epoch": 0.11271624449522652, |
|
"grad_norm": 0.4300813376903534, |
|
"learning_rate": 4.9837941970779865e-05, |
|
"loss": 3.0322, |
|
"step": 13830 |
|
}, |
|
{ |
|
"epoch": 0.11296074827938103, |
|
"grad_norm": 0.4635563790798187, |
|
"learning_rate": 4.983721250000942e-05, |
|
"loss": 3.0247, |
|
"step": 13860 |
|
}, |
|
{ |
|
"epoch": 0.11320525206353553, |
|
"grad_norm": 0.4525061845779419, |
|
"learning_rate": 4.983648139650524e-05, |
|
"loss": 3.0177, |
|
"step": 13890 |
|
}, |
|
{ |
|
"epoch": 0.11344975584769003, |
|
"grad_norm": 0.4455706477165222, |
|
"learning_rate": 4.983574866031538e-05, |
|
"loss": 3.019, |
|
"step": 13920 |
|
}, |
|
{ |
|
"epoch": 0.11369425963184454, |
|
"grad_norm": 0.43036848306655884, |
|
"learning_rate": 4.983501429148802e-05, |
|
"loss": 3.016, |
|
"step": 13950 |
|
}, |
|
{ |
|
"epoch": 0.11393876341599904, |
|
"grad_norm": 0.44014501571655273, |
|
"learning_rate": 4.983427829007144e-05, |
|
"loss": 3.0157, |
|
"step": 13980 |
|
}, |
|
{ |
|
"epoch": 0.11418326720015355, |
|
"grad_norm": 0.43280887603759766, |
|
"learning_rate": 4.9833540656114004e-05, |
|
"loss": 3.0267, |
|
"step": 14010 |
|
}, |
|
{ |
|
"epoch": 0.11442777098430805, |
|
"grad_norm": 0.4454876482486725, |
|
"learning_rate": 4.983280138966421e-05, |
|
"loss": 3.0367, |
|
"step": 14040 |
|
}, |
|
{ |
|
"epoch": 0.11467227476846256, |
|
"grad_norm": 0.42322927713394165, |
|
"learning_rate": 4.983206049077065e-05, |
|
"loss": 3.0045, |
|
"step": 14070 |
|
}, |
|
{ |
|
"epoch": 0.11491677855261706, |
|
"grad_norm": 0.44999608397483826, |
|
"learning_rate": 4.9831317959482044e-05, |
|
"loss": 3.0393, |
|
"step": 14100 |
|
}, |
|
{ |
|
"epoch": 0.11516128233677157, |
|
"grad_norm": 0.44285348057746887, |
|
"learning_rate": 4.983057379584719e-05, |
|
"loss": 3.0189, |
|
"step": 14130 |
|
}, |
|
{ |
|
"epoch": 0.11540578612092607, |
|
"grad_norm": 0.42630505561828613, |
|
"learning_rate": 4.982982799991501e-05, |
|
"loss": 3.0244, |
|
"step": 14160 |
|
}, |
|
{ |
|
"epoch": 0.11565028990508057, |
|
"grad_norm": 0.44115307927131653, |
|
"learning_rate": 4.982908057173454e-05, |
|
"loss": 3.0091, |
|
"step": 14190 |
|
}, |
|
{ |
|
"epoch": 0.11589479368923508, |
|
"grad_norm": 0.43445199728012085, |
|
"learning_rate": 4.98283315113549e-05, |
|
"loss": 3.0119, |
|
"step": 14220 |
|
}, |
|
{ |
|
"epoch": 0.11613929747338958, |
|
"grad_norm": 0.44717735052108765, |
|
"learning_rate": 4.982758081882534e-05, |
|
"loss": 3.0177, |
|
"step": 14250 |
|
}, |
|
{ |
|
"epoch": 0.11638380125754409, |
|
"grad_norm": 0.43636298179626465, |
|
"learning_rate": 4.982682849419521e-05, |
|
"loss": 3.0186, |
|
"step": 14280 |
|
}, |
|
{ |
|
"epoch": 0.11662830504169859, |
|
"grad_norm": 0.4618465304374695, |
|
"learning_rate": 4.982607453751396e-05, |
|
"loss": 3.0074, |
|
"step": 14310 |
|
}, |
|
{ |
|
"epoch": 0.1168728088258531, |
|
"grad_norm": 0.4471168518066406, |
|
"learning_rate": 4.982531894883116e-05, |
|
"loss": 3.0304, |
|
"step": 14340 |
|
}, |
|
{ |
|
"epoch": 0.1171173126100076, |
|
"grad_norm": 0.45274320244789124, |
|
"learning_rate": 4.9824561728196465e-05, |
|
"loss": 3.0252, |
|
"step": 14370 |
|
}, |
|
{ |
|
"epoch": 0.1173618163941621, |
|
"grad_norm": 0.45815110206604004, |
|
"learning_rate": 4.9823802875659674e-05, |
|
"loss": 3.0253, |
|
"step": 14400 |
|
}, |
|
{ |
|
"epoch": 0.11760632017831661, |
|
"grad_norm": 0.43060410022735596, |
|
"learning_rate": 4.9823042391270657e-05, |
|
"loss": 2.9925, |
|
"step": 14430 |
|
}, |
|
{ |
|
"epoch": 0.11785082396247111, |
|
"grad_norm": 0.45071136951446533, |
|
"learning_rate": 4.9822280275079416e-05, |
|
"loss": 3.0117, |
|
"step": 14460 |
|
}, |
|
{ |
|
"epoch": 0.11809532774662562, |
|
"grad_norm": 0.45227503776550293, |
|
"learning_rate": 4.9821516527136034e-05, |
|
"loss": 3.0199, |
|
"step": 14490 |
|
}, |
|
{ |
|
"epoch": 0.11833983153078012, |
|
"grad_norm": 0.4271472990512848, |
|
"learning_rate": 4.982075114749074e-05, |
|
"loss": 3.013, |
|
"step": 14520 |
|
}, |
|
{ |
|
"epoch": 0.11858433531493463, |
|
"grad_norm": 0.4607602655887604, |
|
"learning_rate": 4.9819984136193835e-05, |
|
"loss": 3.0219, |
|
"step": 14550 |
|
}, |
|
{ |
|
"epoch": 0.11882883909908913, |
|
"grad_norm": 0.448803573846817, |
|
"learning_rate": 4.9819215493295746e-05, |
|
"loss": 3.0206, |
|
"step": 14580 |
|
}, |
|
{ |
|
"epoch": 0.11907334288324363, |
|
"grad_norm": 0.4245416820049286, |
|
"learning_rate": 4.9818445218846995e-05, |
|
"loss": 2.9972, |
|
"step": 14610 |
|
}, |
|
{ |
|
"epoch": 0.11931784666739814, |
|
"grad_norm": 0.4541032910346985, |
|
"learning_rate": 4.9817673312898215e-05, |
|
"loss": 3.0341, |
|
"step": 14640 |
|
}, |
|
{ |
|
"epoch": 0.11956235045155264, |
|
"grad_norm": 0.44764143228530884, |
|
"learning_rate": 4.981689977550017e-05, |
|
"loss": 2.9945, |
|
"step": 14670 |
|
}, |
|
{ |
|
"epoch": 0.11980685423570715, |
|
"grad_norm": 0.45025044679641724, |
|
"learning_rate": 4.9816124606703684e-05, |
|
"loss": 3.0092, |
|
"step": 14700 |
|
}, |
|
{ |
|
"epoch": 0.12005135801986165, |
|
"grad_norm": 0.43760672211647034, |
|
"learning_rate": 4.9815347806559734e-05, |
|
"loss": 2.9982, |
|
"step": 14730 |
|
}, |
|
{ |
|
"epoch": 0.12029586180401616, |
|
"grad_norm": 0.47016751766204834, |
|
"learning_rate": 4.981456937511937e-05, |
|
"loss": 2.9968, |
|
"step": 14760 |
|
}, |
|
{ |
|
"epoch": 0.12054036558817066, |
|
"grad_norm": 0.42403316497802734, |
|
"learning_rate": 4.981378931243377e-05, |
|
"loss": 2.9937, |
|
"step": 14790 |
|
}, |
|
{ |
|
"epoch": 0.12078486937232517, |
|
"grad_norm": 0.44598811864852905, |
|
"learning_rate": 4.981300761855422e-05, |
|
"loss": 3.0144, |
|
"step": 14820 |
|
}, |
|
{ |
|
"epoch": 0.12102937315647967, |
|
"grad_norm": 0.42306816577911377, |
|
"learning_rate": 4.981222429353211e-05, |
|
"loss": 3.017, |
|
"step": 14850 |
|
}, |
|
{ |
|
"epoch": 0.12127387694063417, |
|
"grad_norm": 0.45428961515426636, |
|
"learning_rate": 4.981143933741891e-05, |
|
"loss": 3.009, |
|
"step": 14880 |
|
}, |
|
{ |
|
"epoch": 0.12151838072478868, |
|
"grad_norm": 0.4344576299190521, |
|
"learning_rate": 4.981065275026624e-05, |
|
"loss": 3.0311, |
|
"step": 14910 |
|
}, |
|
{ |
|
"epoch": 0.12176288450894318, |
|
"grad_norm": 0.4361552298069, |
|
"learning_rate": 4.9809864532125797e-05, |
|
"loss": 2.9942, |
|
"step": 14940 |
|
}, |
|
{ |
|
"epoch": 0.1220073882930977, |
|
"grad_norm": 0.45136308670043945, |
|
"learning_rate": 4.9809074683049415e-05, |
|
"loss": 2.9886, |
|
"step": 14970 |
|
}, |
|
{ |
|
"epoch": 0.12225189207725219, |
|
"grad_norm": 0.4495120942592621, |
|
"learning_rate": 4.9808283203089e-05, |
|
"loss": 2.9799, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 0.1224963958614067, |
|
"grad_norm": 0.45297014713287354, |
|
"learning_rate": 4.9807490092296586e-05, |
|
"loss": 3.0067, |
|
"step": 15030 |
|
}, |
|
{ |
|
"epoch": 0.1227408996455612, |
|
"grad_norm": 0.4508165419101715, |
|
"learning_rate": 4.980669535072431e-05, |
|
"loss": 3.0063, |
|
"step": 15060 |
|
}, |
|
{ |
|
"epoch": 0.1229854034297157, |
|
"grad_norm": 0.453021764755249, |
|
"learning_rate": 4.980589897842441e-05, |
|
"loss": 2.9853, |
|
"step": 15090 |
|
}, |
|
{ |
|
"epoch": 0.12322990721387021, |
|
"grad_norm": 0.45669054985046387, |
|
"learning_rate": 4.9805100975449256e-05, |
|
"loss": 2.9788, |
|
"step": 15120 |
|
}, |
|
{ |
|
"epoch": 0.12347441099802471, |
|
"grad_norm": 0.45175668597221375, |
|
"learning_rate": 4.98043013418513e-05, |
|
"loss": 2.9949, |
|
"step": 15150 |
|
}, |
|
{ |
|
"epoch": 0.12371891478217922, |
|
"grad_norm": 0.4426526129245758, |
|
"learning_rate": 4.980350007768309e-05, |
|
"loss": 2.9853, |
|
"step": 15180 |
|
}, |
|
{ |
|
"epoch": 0.12396341856633372, |
|
"grad_norm": 0.4432315528392792, |
|
"learning_rate": 4.9802697182997316e-05, |
|
"loss": 3.0084, |
|
"step": 15210 |
|
}, |
|
{ |
|
"epoch": 0.12420792235048823, |
|
"grad_norm": 0.4267187714576721, |
|
"learning_rate": 4.980189265784676e-05, |
|
"loss": 2.9764, |
|
"step": 15240 |
|
}, |
|
{ |
|
"epoch": 0.12445242613464273, |
|
"grad_norm": 0.45367953181266785, |
|
"learning_rate": 4.98010865022843e-05, |
|
"loss": 2.997, |
|
"step": 15270 |
|
}, |
|
{ |
|
"epoch": 0.12469692991879723, |
|
"grad_norm": 0.4352944791316986, |
|
"learning_rate": 4.980027871636293e-05, |
|
"loss": 2.989, |
|
"step": 15300 |
|
}, |
|
{ |
|
"epoch": 0.12494143370295174, |
|
"grad_norm": 0.43728742003440857, |
|
"learning_rate": 4.979946930013576e-05, |
|
"loss": 2.9732, |
|
"step": 15330 |
|
}, |
|
{ |
|
"epoch": 0.12518593748710624, |
|
"grad_norm": 0.43509089946746826, |
|
"learning_rate": 4.9798658253655996e-05, |
|
"loss": 2.9815, |
|
"step": 15360 |
|
}, |
|
{ |
|
"epoch": 0.12543044127126074, |
|
"grad_norm": 0.4618440866470337, |
|
"learning_rate": 4.9797845576976945e-05, |
|
"loss": 2.9945, |
|
"step": 15390 |
|
}, |
|
{ |
|
"epoch": 0.12567494505541527, |
|
"grad_norm": 0.44366157054901123, |
|
"learning_rate": 4.979703127015205e-05, |
|
"loss": 3.0167, |
|
"step": 15420 |
|
}, |
|
{ |
|
"epoch": 0.12591944883956976, |
|
"grad_norm": 0.43511441349983215, |
|
"learning_rate": 4.979621533323482e-05, |
|
"loss": 2.977, |
|
"step": 15450 |
|
}, |
|
{ |
|
"epoch": 0.12616395262372426, |
|
"grad_norm": 0.4174363911151886, |
|
"learning_rate": 4.97953977662789e-05, |
|
"loss": 2.9767, |
|
"step": 15480 |
|
}, |
|
{ |
|
"epoch": 0.12640845640787876, |
|
"grad_norm": 0.4421631395816803, |
|
"learning_rate": 4.9794578569338046e-05, |
|
"loss": 3.0044, |
|
"step": 15510 |
|
}, |
|
{ |
|
"epoch": 0.12665296019203326, |
|
"grad_norm": 0.4244731664657593, |
|
"learning_rate": 4.979375774246609e-05, |
|
"loss": 2.9865, |
|
"step": 15540 |
|
}, |
|
{ |
|
"epoch": 0.12689746397618779, |
|
"grad_norm": 0.4276571273803711, |
|
"learning_rate": 4.979293528571701e-05, |
|
"loss": 2.9608, |
|
"step": 15570 |
|
}, |
|
{ |
|
"epoch": 0.12714196776034228, |
|
"grad_norm": 0.44067880511283875, |
|
"learning_rate": 4.979211119914486e-05, |
|
"loss": 2.9793, |
|
"step": 15600 |
|
}, |
|
{ |
|
"epoch": 0.12738647154449678, |
|
"grad_norm": 0.4460121691226959, |
|
"learning_rate": 4.9791285482803824e-05, |
|
"loss": 2.9795, |
|
"step": 15630 |
|
}, |
|
{ |
|
"epoch": 0.12763097532865128, |
|
"grad_norm": 0.44630923867225647, |
|
"learning_rate": 4.979045813674817e-05, |
|
"loss": 2.9655, |
|
"step": 15660 |
|
}, |
|
{ |
|
"epoch": 0.1278754791128058, |
|
"grad_norm": 0.4406234323978424, |
|
"learning_rate": 4.9789629161032294e-05, |
|
"loss": 2.9752, |
|
"step": 15690 |
|
}, |
|
{ |
|
"epoch": 0.1281199828969603, |
|
"grad_norm": 0.45534536242485046, |
|
"learning_rate": 4.978879855571068e-05, |
|
"loss": 2.9874, |
|
"step": 15720 |
|
}, |
|
{ |
|
"epoch": 0.1283644866811148, |
|
"grad_norm": 0.4341179132461548, |
|
"learning_rate": 4.9787966320837946e-05, |
|
"loss": 2.9983, |
|
"step": 15750 |
|
}, |
|
{ |
|
"epoch": 0.1286089904652693, |
|
"grad_norm": 0.4406190514564514, |
|
"learning_rate": 4.97871324564688e-05, |
|
"loss": 2.9798, |
|
"step": 15780 |
|
}, |
|
{ |
|
"epoch": 0.1288534942494238, |
|
"grad_norm": 0.4403591752052307, |
|
"learning_rate": 4.978629696265804e-05, |
|
"loss": 2.9673, |
|
"step": 15810 |
|
}, |
|
{ |
|
"epoch": 0.12909799803357833, |
|
"grad_norm": 0.43992435932159424, |
|
"learning_rate": 4.9785459839460595e-05, |
|
"loss": 2.9673, |
|
"step": 15840 |
|
}, |
|
{ |
|
"epoch": 0.12934250181773282, |
|
"grad_norm": 0.4353017508983612, |
|
"learning_rate": 4.97846210869315e-05, |
|
"loss": 2.9677, |
|
"step": 15870 |
|
}, |
|
{ |
|
"epoch": 0.12958700560188732, |
|
"grad_norm": 0.4335249066352844, |
|
"learning_rate": 4.978378070512591e-05, |
|
"loss": 2.9625, |
|
"step": 15900 |
|
}, |
|
{ |
|
"epoch": 0.12983150938604182, |
|
"grad_norm": 0.4469107389450073, |
|
"learning_rate": 4.978293869409903e-05, |
|
"loss": 2.9773, |
|
"step": 15930 |
|
}, |
|
{ |
|
"epoch": 0.13007601317019635, |
|
"grad_norm": 0.4289211630821228, |
|
"learning_rate": 4.978209505390625e-05, |
|
"loss": 2.9819, |
|
"step": 15960 |
|
}, |
|
{ |
|
"epoch": 0.13032051695435085, |
|
"grad_norm": 0.45461341738700867, |
|
"learning_rate": 4.978124978460301e-05, |
|
"loss": 2.983, |
|
"step": 15990 |
|
}, |
|
{ |
|
"epoch": 0.13056502073850534, |
|
"grad_norm": 0.4294207990169525, |
|
"learning_rate": 4.978040288624488e-05, |
|
"loss": 2.9757, |
|
"step": 16020 |
|
}, |
|
{ |
|
"epoch": 0.13080952452265984, |
|
"grad_norm": 0.4259834289550781, |
|
"learning_rate": 4.977955435888753e-05, |
|
"loss": 2.97, |
|
"step": 16050 |
|
}, |
|
{ |
|
"epoch": 0.13105402830681434, |
|
"grad_norm": 0.4382823407649994, |
|
"learning_rate": 4.9778704202586736e-05, |
|
"loss": 2.9689, |
|
"step": 16080 |
|
}, |
|
{ |
|
"epoch": 0.13129853209096887, |
|
"grad_norm": 0.4408438503742218, |
|
"learning_rate": 4.9777852417398394e-05, |
|
"loss": 2.9689, |
|
"step": 16110 |
|
}, |
|
{ |
|
"epoch": 0.13154303587512337, |
|
"grad_norm": 0.440287321805954, |
|
"learning_rate": 4.977699900337849e-05, |
|
"loss": 2.9807, |
|
"step": 16140 |
|
}, |
|
{ |
|
"epoch": 0.13178753965927786, |
|
"grad_norm": 0.4474526643753052, |
|
"learning_rate": 4.977614396058313e-05, |
|
"loss": 2.9752, |
|
"step": 16170 |
|
}, |
|
{ |
|
"epoch": 0.13203204344343236, |
|
"grad_norm": 0.4456516206264496, |
|
"learning_rate": 4.977528728906853e-05, |
|
"loss": 2.9669, |
|
"step": 16200 |
|
}, |
|
{ |
|
"epoch": 0.13227654722758686, |
|
"grad_norm": 0.44889894127845764, |
|
"learning_rate": 4.977442898889099e-05, |
|
"loss": 2.9576, |
|
"step": 16230 |
|
}, |
|
{ |
|
"epoch": 0.1325210510117414, |
|
"grad_norm": 0.42843857407569885, |
|
"learning_rate": 4.977356906010694e-05, |
|
"loss": 2.9739, |
|
"step": 16260 |
|
}, |
|
{ |
|
"epoch": 0.13276555479589588, |
|
"grad_norm": 0.43735024333000183, |
|
"learning_rate": 4.977270750277291e-05, |
|
"loss": 2.9683, |
|
"step": 16290 |
|
}, |
|
{ |
|
"epoch": 0.13301005858005038, |
|
"grad_norm": 0.4472540318965912, |
|
"learning_rate": 4.9771844316945536e-05, |
|
"loss": 2.956, |
|
"step": 16320 |
|
}, |
|
{ |
|
"epoch": 0.13325456236420488, |
|
"grad_norm": 0.45051297545433044, |
|
"learning_rate": 4.9770979502681566e-05, |
|
"loss": 2.9828, |
|
"step": 16350 |
|
}, |
|
{ |
|
"epoch": 0.1334990661483594, |
|
"grad_norm": 0.4349469542503357, |
|
"learning_rate": 4.977011306003784e-05, |
|
"loss": 2.9661, |
|
"step": 16380 |
|
}, |
|
{ |
|
"epoch": 0.1337435699325139, |
|
"grad_norm": 0.4395361840724945, |
|
"learning_rate": 4.976924498907133e-05, |
|
"loss": 2.9624, |
|
"step": 16410 |
|
}, |
|
{ |
|
"epoch": 0.1339880737166684, |
|
"grad_norm": 0.4418069124221802, |
|
"learning_rate": 4.9768375289839083e-05, |
|
"loss": 2.9623, |
|
"step": 16440 |
|
}, |
|
{ |
|
"epoch": 0.1342325775008229, |
|
"grad_norm": 0.41756564378738403, |
|
"learning_rate": 4.976750396239828e-05, |
|
"loss": 2.949, |
|
"step": 16470 |
|
}, |
|
{ |
|
"epoch": 0.1344770812849774, |
|
"grad_norm": 0.4466126263141632, |
|
"learning_rate": 4.9766631006806205e-05, |
|
"loss": 2.9694, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 0.13472158506913193, |
|
"grad_norm": 0.4336778223514557, |
|
"learning_rate": 4.9765756423120235e-05, |
|
"loss": 2.9652, |
|
"step": 16530 |
|
}, |
|
{ |
|
"epoch": 0.13496608885328643, |
|
"grad_norm": 0.42424583435058594, |
|
"learning_rate": 4.976488021139787e-05, |
|
"loss": 2.9487, |
|
"step": 16560 |
|
}, |
|
{ |
|
"epoch": 0.13521059263744092, |
|
"grad_norm": 0.4211365878582001, |
|
"learning_rate": 4.9764002371696704e-05, |
|
"loss": 2.9573, |
|
"step": 16590 |
|
}, |
|
{ |
|
"epoch": 0.13545509642159542, |
|
"grad_norm": 0.42899149656295776, |
|
"learning_rate": 4.976312290407445e-05, |
|
"loss": 2.968, |
|
"step": 16620 |
|
}, |
|
{ |
|
"epoch": 0.13569960020574992, |
|
"grad_norm": 0.4524397850036621, |
|
"learning_rate": 4.9762241808588915e-05, |
|
"loss": 2.9687, |
|
"step": 16650 |
|
}, |
|
{ |
|
"epoch": 0.13594410398990445, |
|
"grad_norm": 0.4466136693954468, |
|
"learning_rate": 4.976135908529802e-05, |
|
"loss": 2.9614, |
|
"step": 16680 |
|
}, |
|
{ |
|
"epoch": 0.13618860777405895, |
|
"grad_norm": 0.4260255694389343, |
|
"learning_rate": 4.97604747342598e-05, |
|
"loss": 2.9626, |
|
"step": 16710 |
|
}, |
|
{ |
|
"epoch": 0.13643311155821344, |
|
"grad_norm": 0.432644248008728, |
|
"learning_rate": 4.9759588755532394e-05, |
|
"loss": 2.9642, |
|
"step": 16740 |
|
}, |
|
{ |
|
"epoch": 0.13667761534236794, |
|
"grad_norm": 0.4311199188232422, |
|
"learning_rate": 4.9758701149174026e-05, |
|
"loss": 2.946, |
|
"step": 16770 |
|
}, |
|
{ |
|
"epoch": 0.13692211912652247, |
|
"grad_norm": 0.44559580087661743, |
|
"learning_rate": 4.975781191524306e-05, |
|
"loss": 2.9699, |
|
"step": 16800 |
|
}, |
|
{ |
|
"epoch": 0.13716662291067697, |
|
"grad_norm": 0.4649328291416168, |
|
"learning_rate": 4.975692105379794e-05, |
|
"loss": 2.9657, |
|
"step": 16830 |
|
}, |
|
{ |
|
"epoch": 0.13741112669483146, |
|
"grad_norm": 0.43768975138664246, |
|
"learning_rate": 4.9756028564897236e-05, |
|
"loss": 2.9407, |
|
"step": 16860 |
|
}, |
|
{ |
|
"epoch": 0.13765563047898596, |
|
"grad_norm": 0.42234501242637634, |
|
"learning_rate": 4.975513444859963e-05, |
|
"loss": 2.9506, |
|
"step": 16890 |
|
}, |
|
{ |
|
"epoch": 0.13790013426314046, |
|
"grad_norm": 0.4412541389465332, |
|
"learning_rate": 4.9754238704963875e-05, |
|
"loss": 2.9661, |
|
"step": 16920 |
|
}, |
|
{ |
|
"epoch": 0.138144638047295, |
|
"grad_norm": 0.43382880091667175, |
|
"learning_rate": 4.975334133404887e-05, |
|
"loss": 2.9706, |
|
"step": 16950 |
|
}, |
|
{ |
|
"epoch": 0.13838914183144949, |
|
"grad_norm": 0.4374088943004608, |
|
"learning_rate": 4.97524423359136e-05, |
|
"loss": 2.9393, |
|
"step": 16980 |
|
}, |
|
{ |
|
"epoch": 0.13863364561560398, |
|
"grad_norm": 0.4129140377044678, |
|
"learning_rate": 4.9751541710617165e-05, |
|
"loss": 2.9577, |
|
"step": 17010 |
|
}, |
|
{ |
|
"epoch": 0.13887814939975848, |
|
"grad_norm": 0.43458837270736694, |
|
"learning_rate": 4.9750639458218774e-05, |
|
"loss": 2.9739, |
|
"step": 17040 |
|
}, |
|
{ |
|
"epoch": 0.139122653183913, |
|
"grad_norm": 0.4235387444496155, |
|
"learning_rate": 4.974973557877773e-05, |
|
"loss": 2.9592, |
|
"step": 17070 |
|
}, |
|
{ |
|
"epoch": 0.1393671569680675, |
|
"grad_norm": 0.4440494775772095, |
|
"learning_rate": 4.9748830072353455e-05, |
|
"loss": 2.9502, |
|
"step": 17100 |
|
}, |
|
{ |
|
"epoch": 0.139611660752222, |
|
"grad_norm": 0.4453585147857666, |
|
"learning_rate": 4.9747922939005484e-05, |
|
"loss": 2.9565, |
|
"step": 17130 |
|
}, |
|
{ |
|
"epoch": 0.1398561645363765, |
|
"grad_norm": 0.4328203797340393, |
|
"learning_rate": 4.974701417879344e-05, |
|
"loss": 2.9465, |
|
"step": 17160 |
|
}, |
|
{ |
|
"epoch": 0.140100668320531, |
|
"grad_norm": 0.4327595829963684, |
|
"learning_rate": 4.974610379177706e-05, |
|
"loss": 2.9355, |
|
"step": 17190 |
|
}, |
|
{ |
|
"epoch": 0.14034517210468553, |
|
"grad_norm": 0.4403304159641266, |
|
"learning_rate": 4.9745191778016196e-05, |
|
"loss": 2.9428, |
|
"step": 17220 |
|
}, |
|
{ |
|
"epoch": 0.14058967588884003, |
|
"grad_norm": 0.430889368057251, |
|
"learning_rate": 4.97442781375708e-05, |
|
"loss": 2.9428, |
|
"step": 17250 |
|
}, |
|
{ |
|
"epoch": 0.14083417967299453, |
|
"grad_norm": 0.4505791664123535, |
|
"learning_rate": 4.974336287050093e-05, |
|
"loss": 2.9362, |
|
"step": 17280 |
|
}, |
|
{ |
|
"epoch": 0.14107868345714902, |
|
"grad_norm": 0.43023189902305603, |
|
"learning_rate": 4.974244597686676e-05, |
|
"loss": 2.9485, |
|
"step": 17310 |
|
}, |
|
{ |
|
"epoch": 0.14132318724130352, |
|
"grad_norm": 0.41717565059661865, |
|
"learning_rate": 4.974152745672855e-05, |
|
"loss": 2.9298, |
|
"step": 17340 |
|
}, |
|
{ |
|
"epoch": 0.14156769102545805, |
|
"grad_norm": 0.4348154366016388, |
|
"learning_rate": 4.9740607310146706e-05, |
|
"loss": 2.9531, |
|
"step": 17370 |
|
}, |
|
{ |
|
"epoch": 0.14181219480961255, |
|
"grad_norm": 0.43419626355171204, |
|
"learning_rate": 4.973968553718169e-05, |
|
"loss": 2.9381, |
|
"step": 17400 |
|
}, |
|
{ |
|
"epoch": 0.14205669859376704, |
|
"grad_norm": 0.43489935994148254, |
|
"learning_rate": 4.9738762137894106e-05, |
|
"loss": 2.9605, |
|
"step": 17430 |
|
}, |
|
{ |
|
"epoch": 0.14230120237792154, |
|
"grad_norm": 0.4242507219314575, |
|
"learning_rate": 4.973783711234467e-05, |
|
"loss": 2.9653, |
|
"step": 17460 |
|
}, |
|
{ |
|
"epoch": 0.14254570616207607, |
|
"grad_norm": 0.46746551990509033, |
|
"learning_rate": 4.9736910460594164e-05, |
|
"loss": 2.9525, |
|
"step": 17490 |
|
}, |
|
{ |
|
"epoch": 0.14279020994623057, |
|
"grad_norm": 0.4230245053768158, |
|
"learning_rate": 4.973598218270352e-05, |
|
"loss": 2.9399, |
|
"step": 17520 |
|
}, |
|
{ |
|
"epoch": 0.14303471373038507, |
|
"grad_norm": 0.451238751411438, |
|
"learning_rate": 4.973505227873377e-05, |
|
"loss": 2.9428, |
|
"step": 17550 |
|
}, |
|
{ |
|
"epoch": 0.14327921751453956, |
|
"grad_norm": 0.43941864371299744, |
|
"learning_rate": 4.9734120748746026e-05, |
|
"loss": 2.9424, |
|
"step": 17580 |
|
}, |
|
{ |
|
"epoch": 0.14352372129869406, |
|
"grad_norm": 0.43805423378944397, |
|
"learning_rate": 4.973318759280153e-05, |
|
"loss": 2.9324, |
|
"step": 17610 |
|
}, |
|
{ |
|
"epoch": 0.1437682250828486, |
|
"grad_norm": 0.46820029616355896, |
|
"learning_rate": 4.973225281096162e-05, |
|
"loss": 2.9415, |
|
"step": 17640 |
|
}, |
|
{ |
|
"epoch": 0.1440127288670031, |
|
"grad_norm": 0.44885364174842834, |
|
"learning_rate": 4.973131640328776e-05, |
|
"loss": 2.9438, |
|
"step": 17670 |
|
}, |
|
{ |
|
"epoch": 0.14425723265115759, |
|
"grad_norm": 0.43716761469841003, |
|
"learning_rate": 4.97303783698415e-05, |
|
"loss": 2.929, |
|
"step": 17700 |
|
}, |
|
{ |
|
"epoch": 0.14450173643531208, |
|
"grad_norm": 0.4408782124519348, |
|
"learning_rate": 4.97294387106845e-05, |
|
"loss": 2.9418, |
|
"step": 17730 |
|
}, |
|
{ |
|
"epoch": 0.1447462402194666, |
|
"grad_norm": 0.4411155581474304, |
|
"learning_rate": 4.972849742587853e-05, |
|
"loss": 2.9492, |
|
"step": 17760 |
|
}, |
|
{ |
|
"epoch": 0.1449907440036211, |
|
"grad_norm": 0.44424518942832947, |
|
"learning_rate": 4.972755451548548e-05, |
|
"loss": 2.9455, |
|
"step": 17790 |
|
}, |
|
{ |
|
"epoch": 0.1452352477877756, |
|
"grad_norm": 0.4497133195400238, |
|
"learning_rate": 4.9726609979567316e-05, |
|
"loss": 2.9273, |
|
"step": 17820 |
|
}, |
|
{ |
|
"epoch": 0.1454797515719301, |
|
"grad_norm": 0.4441046416759491, |
|
"learning_rate": 4.972566381818614e-05, |
|
"loss": 2.9529, |
|
"step": 17850 |
|
}, |
|
{ |
|
"epoch": 0.1457242553560846, |
|
"grad_norm": 0.4226887822151184, |
|
"learning_rate": 4.972471603140415e-05, |
|
"loss": 2.9358, |
|
"step": 17880 |
|
}, |
|
{ |
|
"epoch": 0.14596875914023913, |
|
"grad_norm": 0.4297832250595093, |
|
"learning_rate": 4.972376661928365e-05, |
|
"loss": 2.9362, |
|
"step": 17910 |
|
}, |
|
{ |
|
"epoch": 0.14621326292439363, |
|
"grad_norm": 0.4289223551750183, |
|
"learning_rate": 4.972281558188706e-05, |
|
"loss": 2.9203, |
|
"step": 17940 |
|
}, |
|
{ |
|
"epoch": 0.14645776670854813, |
|
"grad_norm": 0.45321324467658997, |
|
"learning_rate": 4.972186291927689e-05, |
|
"loss": 2.941, |
|
"step": 17970 |
|
}, |
|
{ |
|
"epoch": 0.14670227049270262, |
|
"grad_norm": 0.4406587481498718, |
|
"learning_rate": 4.9720908631515757e-05, |
|
"loss": 2.9364, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 0.14694677427685712, |
|
"grad_norm": 0.44196027517318726, |
|
"learning_rate": 4.9719952718666406e-05, |
|
"loss": 2.93, |
|
"step": 18030 |
|
}, |
|
{ |
|
"epoch": 0.14719127806101165, |
|
"grad_norm": 0.41876834630966187, |
|
"learning_rate": 4.971899518079167e-05, |
|
"loss": 2.9286, |
|
"step": 18060 |
|
}, |
|
{ |
|
"epoch": 0.14743578184516615, |
|
"grad_norm": 0.42476391792297363, |
|
"learning_rate": 4.971803601795451e-05, |
|
"loss": 2.9239, |
|
"step": 18090 |
|
}, |
|
{ |
|
"epoch": 0.14768028562932065, |
|
"grad_norm": 0.43276798725128174, |
|
"learning_rate": 4.9717075230217955e-05, |
|
"loss": 2.9469, |
|
"step": 18120 |
|
}, |
|
{ |
|
"epoch": 0.14792478941347514, |
|
"grad_norm": 0.42786705493927, |
|
"learning_rate": 4.9716112817645176e-05, |
|
"loss": 2.9442, |
|
"step": 18150 |
|
}, |
|
{ |
|
"epoch": 0.14816929319762967, |
|
"grad_norm": 0.4399658739566803, |
|
"learning_rate": 4.971514878029945e-05, |
|
"loss": 2.9334, |
|
"step": 18180 |
|
}, |
|
{ |
|
"epoch": 0.14841379698178417, |
|
"grad_norm": 0.4298873841762543, |
|
"learning_rate": 4.971418311824413e-05, |
|
"loss": 2.9176, |
|
"step": 18210 |
|
}, |
|
{ |
|
"epoch": 0.14865830076593867, |
|
"grad_norm": 0.43509942293167114, |
|
"learning_rate": 4.971321583154271e-05, |
|
"loss": 2.9345, |
|
"step": 18240 |
|
}, |
|
{ |
|
"epoch": 0.14890280455009317, |
|
"grad_norm": 0.42680835723876953, |
|
"learning_rate": 4.9712246920258784e-05, |
|
"loss": 2.9274, |
|
"step": 18270 |
|
}, |
|
{ |
|
"epoch": 0.14914730833424766, |
|
"grad_norm": 0.4409879446029663, |
|
"learning_rate": 4.9711276384456024e-05, |
|
"loss": 2.9231, |
|
"step": 18300 |
|
}, |
|
{ |
|
"epoch": 0.1493918121184022, |
|
"grad_norm": 0.4345747232437134, |
|
"learning_rate": 4.9710304224198245e-05, |
|
"loss": 2.9267, |
|
"step": 18330 |
|
}, |
|
{ |
|
"epoch": 0.1496363159025567, |
|
"grad_norm": 0.4443499743938446, |
|
"learning_rate": 4.970933043954935e-05, |
|
"loss": 2.9307, |
|
"step": 18360 |
|
}, |
|
{ |
|
"epoch": 0.1498808196867112, |
|
"grad_norm": 0.4309675991535187, |
|
"learning_rate": 4.970835503057335e-05, |
|
"loss": 2.9174, |
|
"step": 18390 |
|
}, |
|
{ |
|
"epoch": 0.15012532347086568, |
|
"grad_norm": 0.4251437187194824, |
|
"learning_rate": 4.970737799733438e-05, |
|
"loss": 2.9375, |
|
"step": 18420 |
|
}, |
|
{ |
|
"epoch": 0.1503698272550202, |
|
"grad_norm": 0.4523163139820099, |
|
"learning_rate": 4.970639933989665e-05, |
|
"loss": 2.9424, |
|
"step": 18450 |
|
}, |
|
{ |
|
"epoch": 0.1506143310391747, |
|
"grad_norm": 0.43151840567588806, |
|
"learning_rate": 4.97054190583245e-05, |
|
"loss": 2.9284, |
|
"step": 18480 |
|
}, |
|
{ |
|
"epoch": 0.1508588348233292, |
|
"grad_norm": 0.43805721402168274, |
|
"learning_rate": 4.970443715268238e-05, |
|
"loss": 2.9266, |
|
"step": 18510 |
|
}, |
|
{ |
|
"epoch": 0.1511033386074837, |
|
"grad_norm": 0.4656009078025818, |
|
"learning_rate": 4.970345362303482e-05, |
|
"loss": 2.9274, |
|
"step": 18540 |
|
}, |
|
{ |
|
"epoch": 0.1513478423916382, |
|
"grad_norm": 0.4599588215351105, |
|
"learning_rate": 4.9702468469446496e-05, |
|
"loss": 2.9297, |
|
"step": 18570 |
|
}, |
|
{ |
|
"epoch": 0.15159234617579273, |
|
"grad_norm": 0.44205912947654724, |
|
"learning_rate": 4.9701481691982157e-05, |
|
"loss": 2.9084, |
|
"step": 18600 |
|
}, |
|
{ |
|
"epoch": 0.15183684995994723, |
|
"grad_norm": 0.43253156542778015, |
|
"learning_rate": 4.9700493290706665e-05, |
|
"loss": 2.9237, |
|
"step": 18630 |
|
}, |
|
{ |
|
"epoch": 0.15208135374410173, |
|
"grad_norm": 0.42774561047554016, |
|
"learning_rate": 4.969950326568501e-05, |
|
"loss": 2.9248, |
|
"step": 18660 |
|
}, |
|
{ |
|
"epoch": 0.15232585752825623, |
|
"grad_norm": 0.4493556618690491, |
|
"learning_rate": 4.969851161698227e-05, |
|
"loss": 2.9248, |
|
"step": 18690 |
|
}, |
|
{ |
|
"epoch": 0.15257036131241072, |
|
"grad_norm": 0.4323256015777588, |
|
"learning_rate": 4.969751834466363e-05, |
|
"loss": 2.923, |
|
"step": 18720 |
|
}, |
|
{ |
|
"epoch": 0.15281486509656525, |
|
"grad_norm": 0.42869383096694946, |
|
"learning_rate": 4.969652344879438e-05, |
|
"loss": 2.9237, |
|
"step": 18750 |
|
}, |
|
{ |
|
"epoch": 0.15305936888071975, |
|
"grad_norm": 0.44113072752952576, |
|
"learning_rate": 4.9695526929439925e-05, |
|
"loss": 2.9323, |
|
"step": 18780 |
|
}, |
|
{ |
|
"epoch": 0.15330387266487425, |
|
"grad_norm": 0.4490866959095001, |
|
"learning_rate": 4.969452878666578e-05, |
|
"loss": 2.9142, |
|
"step": 18810 |
|
}, |
|
{ |
|
"epoch": 0.15354837644902875, |
|
"grad_norm": 0.41400545835494995, |
|
"learning_rate": 4.969352902053756e-05, |
|
"loss": 2.9306, |
|
"step": 18840 |
|
}, |
|
{ |
|
"epoch": 0.15379288023318327, |
|
"grad_norm": 0.42999714612960815, |
|
"learning_rate": 4.9692527631120976e-05, |
|
"loss": 2.922, |
|
"step": 18870 |
|
}, |
|
{ |
|
"epoch": 0.15403738401733777, |
|
"grad_norm": 0.4393715262413025, |
|
"learning_rate": 4.969152461848187e-05, |
|
"loss": 2.9129, |
|
"step": 18900 |
|
}, |
|
{ |
|
"epoch": 0.15428188780149227, |
|
"grad_norm": 0.4195369780063629, |
|
"learning_rate": 4.969051998268617e-05, |
|
"loss": 2.9257, |
|
"step": 18930 |
|
}, |
|
{ |
|
"epoch": 0.15452639158564677, |
|
"grad_norm": 0.44224682450294495, |
|
"learning_rate": 4.968951372379992e-05, |
|
"loss": 2.9088, |
|
"step": 18960 |
|
}, |
|
{ |
|
"epoch": 0.15477089536980126, |
|
"grad_norm": 0.42597490549087524, |
|
"learning_rate": 4.9688505841889266e-05, |
|
"loss": 2.9001, |
|
"step": 18990 |
|
}, |
|
{ |
|
"epoch": 0.1550153991539558, |
|
"grad_norm": 0.4543912708759308, |
|
"learning_rate": 4.968749633702047e-05, |
|
"loss": 2.932, |
|
"step": 19020 |
|
}, |
|
{ |
|
"epoch": 0.1552599029381103, |
|
"grad_norm": 0.41840681433677673, |
|
"learning_rate": 4.968648520925988e-05, |
|
"loss": 2.927, |
|
"step": 19050 |
|
}, |
|
{ |
|
"epoch": 0.1555044067222648, |
|
"grad_norm": 0.4398231506347656, |
|
"learning_rate": 4.9685472458673986e-05, |
|
"loss": 2.9166, |
|
"step": 19080 |
|
}, |
|
{ |
|
"epoch": 0.15574891050641929, |
|
"grad_norm": 0.4245362877845764, |
|
"learning_rate": 4.968445808532935e-05, |
|
"loss": 2.9119, |
|
"step": 19110 |
|
}, |
|
{ |
|
"epoch": 0.1559934142905738, |
|
"grad_norm": 0.4447348415851593, |
|
"learning_rate": 4.9683442089292656e-05, |
|
"loss": 2.9158, |
|
"step": 19140 |
|
}, |
|
{ |
|
"epoch": 0.1562379180747283, |
|
"grad_norm": 0.45135971903800964, |
|
"learning_rate": 4.9682424470630685e-05, |
|
"loss": 2.9048, |
|
"step": 19170 |
|
}, |
|
{ |
|
"epoch": 0.1564824218588828, |
|
"grad_norm": 0.42726004123687744, |
|
"learning_rate": 4.968140522941035e-05, |
|
"loss": 2.9085, |
|
"step": 19200 |
|
}, |
|
{ |
|
"epoch": 0.1567269256430373, |
|
"grad_norm": 0.42716965079307556, |
|
"learning_rate": 4.9680384365698644e-05, |
|
"loss": 2.9229, |
|
"step": 19230 |
|
}, |
|
{ |
|
"epoch": 0.1569714294271918, |
|
"grad_norm": 0.4451904594898224, |
|
"learning_rate": 4.967936187956268e-05, |
|
"loss": 2.921, |
|
"step": 19260 |
|
}, |
|
{ |
|
"epoch": 0.15721593321134633, |
|
"grad_norm": 0.4485413134098053, |
|
"learning_rate": 4.967833777106966e-05, |
|
"loss": 2.9194, |
|
"step": 19290 |
|
}, |
|
{ |
|
"epoch": 0.15746043699550083, |
|
"grad_norm": 0.44135797023773193, |
|
"learning_rate": 4.9677312040286926e-05, |
|
"loss": 2.885, |
|
"step": 19320 |
|
}, |
|
{ |
|
"epoch": 0.15770494077965533, |
|
"grad_norm": 0.4556313753128052, |
|
"learning_rate": 4.967628468728189e-05, |
|
"loss": 2.923, |
|
"step": 19350 |
|
}, |
|
{ |
|
"epoch": 0.15794944456380983, |
|
"grad_norm": 0.4418553411960602, |
|
"learning_rate": 4.9675255712122105e-05, |
|
"loss": 2.9064, |
|
"step": 19380 |
|
}, |
|
{ |
|
"epoch": 0.15819394834796432, |
|
"grad_norm": 0.443284273147583, |
|
"learning_rate": 4.967422511487519e-05, |
|
"loss": 2.8951, |
|
"step": 19410 |
|
}, |
|
{ |
|
"epoch": 0.15843845213211885, |
|
"grad_norm": 0.4480453431606293, |
|
"learning_rate": 4.967319289560892e-05, |
|
"loss": 2.9151, |
|
"step": 19440 |
|
}, |
|
{ |
|
"epoch": 0.15868295591627335, |
|
"grad_norm": 0.4382655620574951, |
|
"learning_rate": 4.967215905439113e-05, |
|
"loss": 2.9263, |
|
"step": 19470 |
|
}, |
|
{ |
|
"epoch": 0.15892745970042785, |
|
"grad_norm": 0.44708484411239624, |
|
"learning_rate": 4.967112359128979e-05, |
|
"loss": 2.9088, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 0.15917196348458235, |
|
"grad_norm": 0.4404008984565735, |
|
"learning_rate": 4.967008650637297e-05, |
|
"loss": 2.8915, |
|
"step": 19530 |
|
}, |
|
{ |
|
"epoch": 0.15941646726873687, |
|
"grad_norm": 0.4447616636753082, |
|
"learning_rate": 4.9669047799708834e-05, |
|
"loss": 2.9082, |
|
"step": 19560 |
|
}, |
|
{ |
|
"epoch": 0.15966097105289137, |
|
"grad_norm": 0.41954031586647034, |
|
"learning_rate": 4.966800747136568e-05, |
|
"loss": 2.8891, |
|
"step": 19590 |
|
}, |
|
{ |
|
"epoch": 0.15990547483704587, |
|
"grad_norm": 0.4296923279762268, |
|
"learning_rate": 4.966696552141189e-05, |
|
"loss": 2.9053, |
|
"step": 19620 |
|
}, |
|
{ |
|
"epoch": 0.16014997862120037, |
|
"grad_norm": 0.4200703203678131, |
|
"learning_rate": 4.966592194991596e-05, |
|
"loss": 2.9006, |
|
"step": 19650 |
|
}, |
|
{ |
|
"epoch": 0.16039448240535487, |
|
"grad_norm": 0.42826735973358154, |
|
"learning_rate": 4.966487675694649e-05, |
|
"loss": 2.908, |
|
"step": 19680 |
|
}, |
|
{ |
|
"epoch": 0.1606389861895094, |
|
"grad_norm": 0.43031007051467896, |
|
"learning_rate": 4.966382994257218e-05, |
|
"loss": 2.9067, |
|
"step": 19710 |
|
}, |
|
{ |
|
"epoch": 0.1608834899736639, |
|
"grad_norm": 0.41722506284713745, |
|
"learning_rate": 4.966278150686186e-05, |
|
"loss": 2.882, |
|
"step": 19740 |
|
}, |
|
{ |
|
"epoch": 0.1611279937578184, |
|
"grad_norm": 0.4380910098552704, |
|
"learning_rate": 4.9661731449884444e-05, |
|
"loss": 2.8822, |
|
"step": 19770 |
|
}, |
|
{ |
|
"epoch": 0.1613724975419729, |
|
"grad_norm": 0.42654240131378174, |
|
"learning_rate": 4.966067977170896e-05, |
|
"loss": 2.9046, |
|
"step": 19800 |
|
}, |
|
{ |
|
"epoch": 0.1616170013261274, |
|
"grad_norm": 0.4350351393222809, |
|
"learning_rate": 4.9659626472404545e-05, |
|
"loss": 2.8921, |
|
"step": 19830 |
|
}, |
|
{ |
|
"epoch": 0.1618615051102819, |
|
"grad_norm": 0.45913204550743103, |
|
"learning_rate": 4.965857155204044e-05, |
|
"loss": 2.9024, |
|
"step": 19860 |
|
}, |
|
{ |
|
"epoch": 0.1621060088944364, |
|
"grad_norm": 0.448477566242218, |
|
"learning_rate": 4.965751501068598e-05, |
|
"loss": 2.9039, |
|
"step": 19890 |
|
}, |
|
{ |
|
"epoch": 0.1623505126785909, |
|
"grad_norm": 0.42940184473991394, |
|
"learning_rate": 4.965645684841065e-05, |
|
"loss": 2.9179, |
|
"step": 19920 |
|
}, |
|
{ |
|
"epoch": 0.1625950164627454, |
|
"grad_norm": 0.4369218647480011, |
|
"learning_rate": 4.965539706528398e-05, |
|
"loss": 2.9041, |
|
"step": 19950 |
|
}, |
|
{ |
|
"epoch": 0.16283952024689993, |
|
"grad_norm": 0.43815574049949646, |
|
"learning_rate": 4.965433566137565e-05, |
|
"loss": 2.8966, |
|
"step": 19980 |
|
}, |
|
{ |
|
"epoch": 0.16308402403105443, |
|
"grad_norm": 0.42448291182518005, |
|
"learning_rate": 4.965327263675543e-05, |
|
"loss": 2.8959, |
|
"step": 20010 |
|
}, |
|
{ |
|
"epoch": 0.16332852781520893, |
|
"grad_norm": 0.4157077968120575, |
|
"learning_rate": 4.965220799149321e-05, |
|
"loss": 2.8876, |
|
"step": 20040 |
|
}, |
|
{ |
|
"epoch": 0.16357303159936343, |
|
"grad_norm": 0.4500816762447357, |
|
"learning_rate": 4.9651141725658955e-05, |
|
"loss": 2.9075, |
|
"step": 20070 |
|
}, |
|
{ |
|
"epoch": 0.16381753538351793, |
|
"grad_norm": 0.4356308579444885, |
|
"learning_rate": 4.9650073839322786e-05, |
|
"loss": 2.9147, |
|
"step": 20100 |
|
}, |
|
{ |
|
"epoch": 0.16406203916767245, |
|
"grad_norm": 0.4416338801383972, |
|
"learning_rate": 4.964900433255489e-05, |
|
"loss": 2.9017, |
|
"step": 20130 |
|
}, |
|
{ |
|
"epoch": 0.16430654295182695, |
|
"grad_norm": 0.4255659580230713, |
|
"learning_rate": 4.9647933205425574e-05, |
|
"loss": 2.9, |
|
"step": 20160 |
|
}, |
|
{ |
|
"epoch": 0.16455104673598145, |
|
"grad_norm": 0.4527672827243805, |
|
"learning_rate": 4.9646860458005254e-05, |
|
"loss": 2.9076, |
|
"step": 20190 |
|
}, |
|
{ |
|
"epoch": 0.16479555052013595, |
|
"grad_norm": 0.45242664217948914, |
|
"learning_rate": 4.9645786090364436e-05, |
|
"loss": 2.9177, |
|
"step": 20220 |
|
}, |
|
{ |
|
"epoch": 0.16504005430429047, |
|
"grad_norm": 0.43450960516929626, |
|
"learning_rate": 4.964471010257377e-05, |
|
"loss": 2.8809, |
|
"step": 20250 |
|
}, |
|
{ |
|
"epoch": 0.16528455808844497, |
|
"grad_norm": 0.44094353914260864, |
|
"learning_rate": 4.964363249470396e-05, |
|
"loss": 2.8996, |
|
"step": 20280 |
|
}, |
|
{ |
|
"epoch": 0.16552906187259947, |
|
"grad_norm": 0.4236229658126831, |
|
"learning_rate": 4.964255326682588e-05, |
|
"loss": 2.9014, |
|
"step": 20310 |
|
}, |
|
{ |
|
"epoch": 0.16577356565675397, |
|
"grad_norm": 0.43658915162086487, |
|
"learning_rate": 4.964147241901045e-05, |
|
"loss": 2.8835, |
|
"step": 20340 |
|
}, |
|
{ |
|
"epoch": 0.16601806944090847, |
|
"grad_norm": 0.4210447371006012, |
|
"learning_rate": 4.964038995132872e-05, |
|
"loss": 2.9015, |
|
"step": 20370 |
|
}, |
|
{ |
|
"epoch": 0.166262573225063, |
|
"grad_norm": 0.4408273994922638, |
|
"learning_rate": 4.963930586385186e-05, |
|
"loss": 2.8933, |
|
"step": 20400 |
|
}, |
|
{ |
|
"epoch": 0.1665070770092175, |
|
"grad_norm": 0.4378444254398346, |
|
"learning_rate": 4.963822015665114e-05, |
|
"loss": 2.8775, |
|
"step": 20430 |
|
}, |
|
{ |
|
"epoch": 0.166751580793372, |
|
"grad_norm": 0.4379393458366394, |
|
"learning_rate": 4.963713282979792e-05, |
|
"loss": 2.8828, |
|
"step": 20460 |
|
}, |
|
{ |
|
"epoch": 0.1669960845775265, |
|
"grad_norm": 0.4111761748790741, |
|
"learning_rate": 4.963604388336368e-05, |
|
"loss": 2.9, |
|
"step": 20490 |
|
}, |
|
{ |
|
"epoch": 0.16724058836168101, |
|
"grad_norm": 0.4153888523578644, |
|
"learning_rate": 4.9634953317420006e-05, |
|
"loss": 2.8834, |
|
"step": 20520 |
|
}, |
|
{ |
|
"epoch": 0.1674850921458355, |
|
"grad_norm": 0.44652920961380005, |
|
"learning_rate": 4.963386113203859e-05, |
|
"loss": 2.8591, |
|
"step": 20550 |
|
}, |
|
{ |
|
"epoch": 0.16772959592999, |
|
"grad_norm": 0.4109705090522766, |
|
"learning_rate": 4.963276732729124e-05, |
|
"loss": 2.8794, |
|
"step": 20580 |
|
}, |
|
{ |
|
"epoch": 0.1679740997141445, |
|
"grad_norm": 0.417043000459671, |
|
"learning_rate": 4.9631671903249835e-05, |
|
"loss": 2.8822, |
|
"step": 20610 |
|
}, |
|
{ |
|
"epoch": 0.168218603498299, |
|
"grad_norm": 0.4661671221256256, |
|
"learning_rate": 4.963057485998641e-05, |
|
"loss": 2.8884, |
|
"step": 20640 |
|
}, |
|
{ |
|
"epoch": 0.16846310728245353, |
|
"grad_norm": 0.4327886700630188, |
|
"learning_rate": 4.962947619757307e-05, |
|
"loss": 2.8988, |
|
"step": 20670 |
|
}, |
|
{ |
|
"epoch": 0.16870761106660803, |
|
"grad_norm": 0.4549929201602936, |
|
"learning_rate": 4.962837591608203e-05, |
|
"loss": 2.8748, |
|
"step": 20700 |
|
}, |
|
{ |
|
"epoch": 0.16895211485076253, |
|
"grad_norm": 0.4588701128959656, |
|
"learning_rate": 4.962727401558563e-05, |
|
"loss": 2.8811, |
|
"step": 20730 |
|
}, |
|
{ |
|
"epoch": 0.16919661863491703, |
|
"grad_norm": 0.4168790578842163, |
|
"learning_rate": 4.962617049615631e-05, |
|
"loss": 2.8648, |
|
"step": 20760 |
|
}, |
|
{ |
|
"epoch": 0.16944112241907153, |
|
"grad_norm": 0.440508633852005, |
|
"learning_rate": 4.96250653578666e-05, |
|
"loss": 2.8815, |
|
"step": 20790 |
|
}, |
|
{ |
|
"epoch": 0.16968562620322605, |
|
"grad_norm": 0.41507402062416077, |
|
"learning_rate": 4.962395860078917e-05, |
|
"loss": 2.8689, |
|
"step": 20820 |
|
}, |
|
{ |
|
"epoch": 0.16993012998738055, |
|
"grad_norm": 0.4498741924762726, |
|
"learning_rate": 4.9622850224996744e-05, |
|
"loss": 2.8794, |
|
"step": 20850 |
|
}, |
|
{ |
|
"epoch": 0.17017463377153505, |
|
"grad_norm": 0.42679455876350403, |
|
"learning_rate": 4.962174023056221e-05, |
|
"loss": 2.8815, |
|
"step": 20880 |
|
}, |
|
{ |
|
"epoch": 0.17041913755568955, |
|
"grad_norm": 0.4224587380886078, |
|
"learning_rate": 4.962062861755853e-05, |
|
"loss": 2.8964, |
|
"step": 20910 |
|
}, |
|
{ |
|
"epoch": 0.17066364133984407, |
|
"grad_norm": 0.4392225444316864, |
|
"learning_rate": 4.9619515386058775e-05, |
|
"loss": 2.8821, |
|
"step": 20940 |
|
}, |
|
{ |
|
"epoch": 0.17090814512399857, |
|
"grad_norm": 0.43538761138916016, |
|
"learning_rate": 4.9618400536136124e-05, |
|
"loss": 2.8867, |
|
"step": 20970 |
|
}, |
|
{ |
|
"epoch": 0.17115264890815307, |
|
"grad_norm": 0.4309751093387604, |
|
"learning_rate": 4.9617284067863866e-05, |
|
"loss": 2.8721, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 0.17139715269230757, |
|
"grad_norm": 0.43987441062927246, |
|
"learning_rate": 4.9616165981315397e-05, |
|
"loss": 2.8818, |
|
"step": 21030 |
|
}, |
|
{ |
|
"epoch": 0.17164165647646207, |
|
"grad_norm": 0.449082612991333, |
|
"learning_rate": 4.961504627656422e-05, |
|
"loss": 2.8882, |
|
"step": 21060 |
|
}, |
|
{ |
|
"epoch": 0.1718861602606166, |
|
"grad_norm": 0.4370419979095459, |
|
"learning_rate": 4.961392495368393e-05, |
|
"loss": 2.8902, |
|
"step": 21090 |
|
}, |
|
{ |
|
"epoch": 0.1721306640447711, |
|
"grad_norm": 0.44723600149154663, |
|
"learning_rate": 4.961280201274826e-05, |
|
"loss": 2.9021, |
|
"step": 21120 |
|
}, |
|
{ |
|
"epoch": 0.1723751678289256, |
|
"grad_norm": 0.45456555485725403, |
|
"learning_rate": 4.9611677453831005e-05, |
|
"loss": 2.8602, |
|
"step": 21150 |
|
}, |
|
{ |
|
"epoch": 0.1726196716130801, |
|
"grad_norm": 0.4356628954410553, |
|
"learning_rate": 4.9610551277006115e-05, |
|
"loss": 2.876, |
|
"step": 21180 |
|
}, |
|
{ |
|
"epoch": 0.1728641753972346, |
|
"grad_norm": 0.4403671324253082, |
|
"learning_rate": 4.96094234823476e-05, |
|
"loss": 2.8809, |
|
"step": 21210 |
|
}, |
|
{ |
|
"epoch": 0.1731086791813891, |
|
"grad_norm": 0.4314226806163788, |
|
"learning_rate": 4.9608294069929605e-05, |
|
"loss": 2.8596, |
|
"step": 21240 |
|
}, |
|
{ |
|
"epoch": 0.1733531829655436, |
|
"grad_norm": 0.4421268701553345, |
|
"learning_rate": 4.960716303982638e-05, |
|
"loss": 2.8562, |
|
"step": 21270 |
|
}, |
|
{ |
|
"epoch": 0.1735976867496981, |
|
"grad_norm": 0.4297054409980774, |
|
"learning_rate": 4.960603039211227e-05, |
|
"loss": 2.8632, |
|
"step": 21300 |
|
}, |
|
{ |
|
"epoch": 0.1738421905338526, |
|
"grad_norm": 0.42435142397880554, |
|
"learning_rate": 4.960489612686174e-05, |
|
"loss": 2.8827, |
|
"step": 21330 |
|
}, |
|
{ |
|
"epoch": 0.17408669431800713, |
|
"grad_norm": 0.42332854866981506, |
|
"learning_rate": 4.960376024414935e-05, |
|
"loss": 2.8725, |
|
"step": 21360 |
|
}, |
|
{ |
|
"epoch": 0.17433119810216163, |
|
"grad_norm": 0.43352165818214417, |
|
"learning_rate": 4.9602622744049765e-05, |
|
"loss": 2.8714, |
|
"step": 21390 |
|
}, |
|
{ |
|
"epoch": 0.17457570188631613, |
|
"grad_norm": 0.44356927275657654, |
|
"learning_rate": 4.960148362663777e-05, |
|
"loss": 2.8753, |
|
"step": 21420 |
|
}, |
|
{ |
|
"epoch": 0.17482020567047063, |
|
"grad_norm": 0.43293410539627075, |
|
"learning_rate": 4.960034289198824e-05, |
|
"loss": 2.8716, |
|
"step": 21450 |
|
}, |
|
{ |
|
"epoch": 0.17506470945462513, |
|
"grad_norm": 0.4402593970298767, |
|
"learning_rate": 4.959920054017616e-05, |
|
"loss": 2.8837, |
|
"step": 21480 |
|
}, |
|
{ |
|
"epoch": 0.17530921323877965, |
|
"grad_norm": 0.4285391867160797, |
|
"learning_rate": 4.959805657127664e-05, |
|
"loss": 2.8776, |
|
"step": 21510 |
|
}, |
|
{ |
|
"epoch": 0.17555371702293415, |
|
"grad_norm": 0.4400288164615631, |
|
"learning_rate": 4.959691098536487e-05, |
|
"loss": 2.8794, |
|
"step": 21540 |
|
}, |
|
{ |
|
"epoch": 0.17579822080708865, |
|
"grad_norm": 0.4306489825248718, |
|
"learning_rate": 4.959576378251617e-05, |
|
"loss": 2.8744, |
|
"step": 21570 |
|
}, |
|
{ |
|
"epoch": 0.17604272459124315, |
|
"grad_norm": 0.4402148723602295, |
|
"learning_rate": 4.959461496280594e-05, |
|
"loss": 2.8749, |
|
"step": 21600 |
|
}, |
|
{ |
|
"epoch": 0.17628722837539768, |
|
"grad_norm": 0.44051840901374817, |
|
"learning_rate": 4.95934645263097e-05, |
|
"loss": 2.8714, |
|
"step": 21630 |
|
}, |
|
{ |
|
"epoch": 0.17653173215955217, |
|
"grad_norm": 0.4164459705352783, |
|
"learning_rate": 4.9592312473103086e-05, |
|
"loss": 2.8757, |
|
"step": 21660 |
|
}, |
|
{ |
|
"epoch": 0.17677623594370667, |
|
"grad_norm": 0.4289766550064087, |
|
"learning_rate": 4.9591158803261834e-05, |
|
"loss": 2.8675, |
|
"step": 21690 |
|
}, |
|
{ |
|
"epoch": 0.17702073972786117, |
|
"grad_norm": 0.42621496319770813, |
|
"learning_rate": 4.9590003516861774e-05, |
|
"loss": 2.852, |
|
"step": 21720 |
|
}, |
|
{ |
|
"epoch": 0.17726524351201567, |
|
"grad_norm": 0.43924206495285034, |
|
"learning_rate": 4.958884661397885e-05, |
|
"loss": 2.867, |
|
"step": 21750 |
|
}, |
|
{ |
|
"epoch": 0.1775097472961702, |
|
"grad_norm": 0.4479053020477295, |
|
"learning_rate": 4.958768809468912e-05, |
|
"loss": 2.8768, |
|
"step": 21780 |
|
}, |
|
{ |
|
"epoch": 0.1777542510803247, |
|
"grad_norm": 0.4436418414115906, |
|
"learning_rate": 4.958652795906874e-05, |
|
"loss": 2.8795, |
|
"step": 21810 |
|
}, |
|
{ |
|
"epoch": 0.1779987548644792, |
|
"grad_norm": 0.4646727740764618, |
|
"learning_rate": 4.9585366207193984e-05, |
|
"loss": 2.8738, |
|
"step": 21840 |
|
}, |
|
{ |
|
"epoch": 0.1782432586486337, |
|
"grad_norm": 0.44066762924194336, |
|
"learning_rate": 4.95842028391412e-05, |
|
"loss": 2.8975, |
|
"step": 21870 |
|
}, |
|
{ |
|
"epoch": 0.1784877624327882, |
|
"grad_norm": 0.4433143138885498, |
|
"learning_rate": 4.958303785498689e-05, |
|
"loss": 2.8845, |
|
"step": 21900 |
|
}, |
|
{ |
|
"epoch": 0.17873226621694271, |
|
"grad_norm": 0.45124372839927673, |
|
"learning_rate": 4.958187125480761e-05, |
|
"loss": 2.8767, |
|
"step": 21930 |
|
}, |
|
{ |
|
"epoch": 0.1789767700010972, |
|
"grad_norm": 0.44482797384262085, |
|
"learning_rate": 4.958070303868008e-05, |
|
"loss": 2.8577, |
|
"step": 21960 |
|
}, |
|
{ |
|
"epoch": 0.1792212737852517, |
|
"grad_norm": 0.4298214018344879, |
|
"learning_rate": 4.9579533206681066e-05, |
|
"loss": 2.8613, |
|
"step": 21990 |
|
}, |
|
{ |
|
"epoch": 0.1794657775694062, |
|
"grad_norm": 0.4243757426738739, |
|
"learning_rate": 4.957836175888749e-05, |
|
"loss": 2.8533, |
|
"step": 22020 |
|
}, |
|
{ |
|
"epoch": 0.17971028135356074, |
|
"grad_norm": 0.43930861353874207, |
|
"learning_rate": 4.957718869537635e-05, |
|
"loss": 2.8845, |
|
"step": 22050 |
|
}, |
|
{ |
|
"epoch": 0.17995478513771523, |
|
"grad_norm": 0.42320388555526733, |
|
"learning_rate": 4.957601401622477e-05, |
|
"loss": 2.8757, |
|
"step": 22080 |
|
}, |
|
{ |
|
"epoch": 0.18019928892186973, |
|
"grad_norm": 0.4299392104148865, |
|
"learning_rate": 4.957483772150996e-05, |
|
"loss": 2.8794, |
|
"step": 22110 |
|
}, |
|
{ |
|
"epoch": 0.18044379270602423, |
|
"grad_norm": 0.42219462990760803, |
|
"learning_rate": 4.957365981130925e-05, |
|
"loss": 2.8451, |
|
"step": 22140 |
|
}, |
|
{ |
|
"epoch": 0.18068829649017873, |
|
"grad_norm": 0.4573104977607727, |
|
"learning_rate": 4.957248028570007e-05, |
|
"loss": 2.8643, |
|
"step": 22170 |
|
}, |
|
{ |
|
"epoch": 0.18093280027433326, |
|
"grad_norm": 0.41791173815727234, |
|
"learning_rate": 4.9571299144759965e-05, |
|
"loss": 2.872, |
|
"step": 22200 |
|
}, |
|
{ |
|
"epoch": 0.18117730405848775, |
|
"grad_norm": 0.42580413818359375, |
|
"learning_rate": 4.957011638856658e-05, |
|
"loss": 2.8639, |
|
"step": 22230 |
|
}, |
|
{ |
|
"epoch": 0.18142180784264225, |
|
"grad_norm": 0.4491201937198639, |
|
"learning_rate": 4.956893201719766e-05, |
|
"loss": 2.8571, |
|
"step": 22260 |
|
}, |
|
{ |
|
"epoch": 0.18166631162679675, |
|
"grad_norm": 0.4404660761356354, |
|
"learning_rate": 4.956774603073107e-05, |
|
"loss": 2.8626, |
|
"step": 22290 |
|
}, |
|
{ |
|
"epoch": 0.18191081541095128, |
|
"grad_norm": 0.4323899447917938, |
|
"learning_rate": 4.956655842924477e-05, |
|
"loss": 2.877, |
|
"step": 22320 |
|
}, |
|
{ |
|
"epoch": 0.18215531919510577, |
|
"grad_norm": 0.42996013164520264, |
|
"learning_rate": 4.9565369212816825e-05, |
|
"loss": 2.858, |
|
"step": 22350 |
|
}, |
|
{ |
|
"epoch": 0.18239982297926027, |
|
"grad_norm": 0.42361024022102356, |
|
"learning_rate": 4.9564178381525414e-05, |
|
"loss": 2.8497, |
|
"step": 22380 |
|
}, |
|
{ |
|
"epoch": 0.18264432676341477, |
|
"grad_norm": 0.42209547758102417, |
|
"learning_rate": 4.956298593544883e-05, |
|
"loss": 2.8664, |
|
"step": 22410 |
|
}, |
|
{ |
|
"epoch": 0.18288883054756927, |
|
"grad_norm": 0.42259085178375244, |
|
"learning_rate": 4.9561791874665445e-05, |
|
"loss": 2.8416, |
|
"step": 22440 |
|
}, |
|
{ |
|
"epoch": 0.1831333343317238, |
|
"grad_norm": 0.4607374668121338, |
|
"learning_rate": 4.956059619925376e-05, |
|
"loss": 2.8708, |
|
"step": 22470 |
|
}, |
|
{ |
|
"epoch": 0.1833778381158783, |
|
"grad_norm": 0.43727099895477295, |
|
"learning_rate": 4.9559398909292376e-05, |
|
"loss": 2.8641, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 0.1836223419000328, |
|
"grad_norm": 0.42699259519577026, |
|
"learning_rate": 4.955820000486e-05, |
|
"loss": 2.8532, |
|
"step": 22530 |
|
}, |
|
{ |
|
"epoch": 0.1838668456841873, |
|
"grad_norm": 0.4515063464641571, |
|
"learning_rate": 4.9556999486035444e-05, |
|
"loss": 2.8692, |
|
"step": 22560 |
|
}, |
|
{ |
|
"epoch": 0.1841113494683418, |
|
"grad_norm": 0.4204477369785309, |
|
"learning_rate": 4.9555797352897625e-05, |
|
"loss": 2.8586, |
|
"step": 22590 |
|
}, |
|
{ |
|
"epoch": 0.18435585325249632, |
|
"grad_norm": 0.43957340717315674, |
|
"learning_rate": 4.955459360552558e-05, |
|
"loss": 2.8553, |
|
"step": 22620 |
|
}, |
|
{ |
|
"epoch": 0.18460035703665081, |
|
"grad_norm": 0.4496155381202698, |
|
"learning_rate": 4.9553388243998424e-05, |
|
"loss": 2.8722, |
|
"step": 22650 |
|
}, |
|
{ |
|
"epoch": 0.1848448608208053, |
|
"grad_norm": 0.4415959119796753, |
|
"learning_rate": 4.9552181268395404e-05, |
|
"loss": 2.8521, |
|
"step": 22680 |
|
}, |
|
{ |
|
"epoch": 0.1850893646049598, |
|
"grad_norm": 0.43609628081321716, |
|
"learning_rate": 4.955097267879586e-05, |
|
"loss": 2.8404, |
|
"step": 22710 |
|
}, |
|
{ |
|
"epoch": 0.18533386838911434, |
|
"grad_norm": 0.43372586369514465, |
|
"learning_rate": 4.954976247527924e-05, |
|
"loss": 2.8526, |
|
"step": 22740 |
|
}, |
|
{ |
|
"epoch": 0.18557837217326884, |
|
"grad_norm": 0.4543731212615967, |
|
"learning_rate": 4.9548550657925095e-05, |
|
"loss": 2.8468, |
|
"step": 22770 |
|
}, |
|
{ |
|
"epoch": 0.18582287595742333, |
|
"grad_norm": 0.4426426589488983, |
|
"learning_rate": 4.954733722681311e-05, |
|
"loss": 2.8554, |
|
"step": 22800 |
|
}, |
|
{ |
|
"epoch": 0.18606737974157783, |
|
"grad_norm": 0.44420433044433594, |
|
"learning_rate": 4.954612218202303e-05, |
|
"loss": 2.8546, |
|
"step": 22830 |
|
}, |
|
{ |
|
"epoch": 0.18631188352573233, |
|
"grad_norm": 0.41634631156921387, |
|
"learning_rate": 4.954490552363473e-05, |
|
"loss": 2.8629, |
|
"step": 22860 |
|
}, |
|
{ |
|
"epoch": 0.18655638730988686, |
|
"grad_norm": 0.44227173924446106, |
|
"learning_rate": 4.95436872517282e-05, |
|
"loss": 2.8612, |
|
"step": 22890 |
|
}, |
|
{ |
|
"epoch": 0.18680089109404135, |
|
"grad_norm": 0.4149799048900604, |
|
"learning_rate": 4.954246736638352e-05, |
|
"loss": 2.8627, |
|
"step": 22920 |
|
}, |
|
{ |
|
"epoch": 0.18704539487819585, |
|
"grad_norm": 0.43874844908714294, |
|
"learning_rate": 4.954124586768088e-05, |
|
"loss": 2.8516, |
|
"step": 22950 |
|
}, |
|
{ |
|
"epoch": 0.18728989866235035, |
|
"grad_norm": 0.4350124001502991, |
|
"learning_rate": 4.9540022755700587e-05, |
|
"loss": 2.8583, |
|
"step": 22980 |
|
}, |
|
{ |
|
"epoch": 0.18753440244650488, |
|
"grad_norm": 0.43529069423675537, |
|
"learning_rate": 4.953879803052304e-05, |
|
"loss": 2.8631, |
|
"step": 23010 |
|
}, |
|
{ |
|
"epoch": 0.18777890623065938, |
|
"grad_norm": 0.43385937809944153, |
|
"learning_rate": 4.9537571692228744e-05, |
|
"loss": 2.8493, |
|
"step": 23040 |
|
}, |
|
{ |
|
"epoch": 0.18802341001481387, |
|
"grad_norm": 0.4345625936985016, |
|
"learning_rate": 4.953634374089832e-05, |
|
"loss": 2.8463, |
|
"step": 23070 |
|
}, |
|
{ |
|
"epoch": 0.18826791379896837, |
|
"grad_norm": 0.44867751002311707, |
|
"learning_rate": 4.953511417661249e-05, |
|
"loss": 2.8507, |
|
"step": 23100 |
|
}, |
|
{ |
|
"epoch": 0.18851241758312287, |
|
"grad_norm": 0.4386790692806244, |
|
"learning_rate": 4.953388299945209e-05, |
|
"loss": 2.847, |
|
"step": 23130 |
|
}, |
|
{ |
|
"epoch": 0.1887569213672774, |
|
"grad_norm": 0.4632754325866699, |
|
"learning_rate": 4.9532650209498045e-05, |
|
"loss": 2.8569, |
|
"step": 23160 |
|
}, |
|
{ |
|
"epoch": 0.1890014251514319, |
|
"grad_norm": 0.4407399296760559, |
|
"learning_rate": 4.9531415806831394e-05, |
|
"loss": 2.8541, |
|
"step": 23190 |
|
}, |
|
{ |
|
"epoch": 0.1892459289355864, |
|
"grad_norm": 0.45401519536972046, |
|
"learning_rate": 4.953017979153329e-05, |
|
"loss": 2.8702, |
|
"step": 23220 |
|
}, |
|
{ |
|
"epoch": 0.1894904327197409, |
|
"grad_norm": 0.42486798763275146, |
|
"learning_rate": 4.952894216368498e-05, |
|
"loss": 2.8701, |
|
"step": 23250 |
|
}, |
|
{ |
|
"epoch": 0.1897349365038954, |
|
"grad_norm": 0.4279034733772278, |
|
"learning_rate": 4.952770292336782e-05, |
|
"loss": 2.8429, |
|
"step": 23280 |
|
}, |
|
{ |
|
"epoch": 0.18997944028804992, |
|
"grad_norm": 0.4184633791446686, |
|
"learning_rate": 4.9526462070663294e-05, |
|
"loss": 2.8358, |
|
"step": 23310 |
|
}, |
|
{ |
|
"epoch": 0.19022394407220442, |
|
"grad_norm": 0.4264056086540222, |
|
"learning_rate": 4.952521960565295e-05, |
|
"loss": 2.8448, |
|
"step": 23340 |
|
}, |
|
{ |
|
"epoch": 0.1904684478563589, |
|
"grad_norm": 0.43503764271736145, |
|
"learning_rate": 4.952397552841846e-05, |
|
"loss": 2.851, |
|
"step": 23370 |
|
}, |
|
{ |
|
"epoch": 0.1907129516405134, |
|
"grad_norm": 0.4345065653324127, |
|
"learning_rate": 4.952272983904164e-05, |
|
"loss": 2.8468, |
|
"step": 23400 |
|
}, |
|
{ |
|
"epoch": 0.19095745542466794, |
|
"grad_norm": 0.4394157826900482, |
|
"learning_rate": 4.9521482537604345e-05, |
|
"loss": 2.837, |
|
"step": 23430 |
|
}, |
|
{ |
|
"epoch": 0.19120195920882244, |
|
"grad_norm": 0.4336976408958435, |
|
"learning_rate": 4.9520233624188585e-05, |
|
"loss": 2.8516, |
|
"step": 23460 |
|
}, |
|
{ |
|
"epoch": 0.19144646299297693, |
|
"grad_norm": 0.42082545161247253, |
|
"learning_rate": 4.951898309887646e-05, |
|
"loss": 2.8616, |
|
"step": 23490 |
|
}, |
|
{ |
|
"epoch": 0.19169096677713143, |
|
"grad_norm": 0.44079628586769104, |
|
"learning_rate": 4.951773096175016e-05, |
|
"loss": 2.8371, |
|
"step": 23520 |
|
}, |
|
{ |
|
"epoch": 0.19193547056128593, |
|
"grad_norm": 0.4336305558681488, |
|
"learning_rate": 4.9516477212892024e-05, |
|
"loss": 2.8398, |
|
"step": 23550 |
|
}, |
|
{ |
|
"epoch": 0.19217997434544046, |
|
"grad_norm": 0.42380383610725403, |
|
"learning_rate": 4.9515221852384455e-05, |
|
"loss": 2.8444, |
|
"step": 23580 |
|
}, |
|
{ |
|
"epoch": 0.19242447812959496, |
|
"grad_norm": 0.43711090087890625, |
|
"learning_rate": 4.9513964880309974e-05, |
|
"loss": 2.8399, |
|
"step": 23610 |
|
}, |
|
{ |
|
"epoch": 0.19266898191374945, |
|
"grad_norm": 0.44011425971984863, |
|
"learning_rate": 4.951270629675121e-05, |
|
"loss": 2.8585, |
|
"step": 23640 |
|
}, |
|
{ |
|
"epoch": 0.19291348569790395, |
|
"grad_norm": 0.4214531183242798, |
|
"learning_rate": 4.951144610179092e-05, |
|
"loss": 2.8444, |
|
"step": 23670 |
|
}, |
|
{ |
|
"epoch": 0.19315798948205848, |
|
"grad_norm": 0.43613433837890625, |
|
"learning_rate": 4.9510184295511915e-05, |
|
"loss": 2.85, |
|
"step": 23700 |
|
}, |
|
{ |
|
"epoch": 0.19340249326621298, |
|
"grad_norm": 0.43342262506484985, |
|
"learning_rate": 4.950892087799716e-05, |
|
"loss": 2.8515, |
|
"step": 23730 |
|
}, |
|
{ |
|
"epoch": 0.19364699705036748, |
|
"grad_norm": 0.46385619044303894, |
|
"learning_rate": 4.950765584932971e-05, |
|
"loss": 2.8542, |
|
"step": 23760 |
|
}, |
|
{ |
|
"epoch": 0.19389150083452197, |
|
"grad_norm": 0.43448546528816223, |
|
"learning_rate": 4.950638920959273e-05, |
|
"loss": 2.8407, |
|
"step": 23790 |
|
}, |
|
{ |
|
"epoch": 0.19413600461867647, |
|
"grad_norm": 0.43169257044792175, |
|
"learning_rate": 4.950512095886947e-05, |
|
"loss": 2.8203, |
|
"step": 23820 |
|
}, |
|
{ |
|
"epoch": 0.194380508402831, |
|
"grad_norm": 0.4383741617202759, |
|
"learning_rate": 4.950385109724331e-05, |
|
"loss": 2.8434, |
|
"step": 23850 |
|
}, |
|
{ |
|
"epoch": 0.1946250121869855, |
|
"grad_norm": 0.44045546650886536, |
|
"learning_rate": 4.950257962479773e-05, |
|
"loss": 2.8484, |
|
"step": 23880 |
|
}, |
|
{ |
|
"epoch": 0.19486951597114, |
|
"grad_norm": 0.4332634210586548, |
|
"learning_rate": 4.95013065416163e-05, |
|
"loss": 2.8428, |
|
"step": 23910 |
|
}, |
|
{ |
|
"epoch": 0.1951140197552945, |
|
"grad_norm": 0.4307137429714203, |
|
"learning_rate": 4.950003184778272e-05, |
|
"loss": 2.8264, |
|
"step": 23940 |
|
}, |
|
{ |
|
"epoch": 0.195358523539449, |
|
"grad_norm": 0.4232560694217682, |
|
"learning_rate": 4.9498755543380795e-05, |
|
"loss": 2.8307, |
|
"step": 23970 |
|
}, |
|
{ |
|
"epoch": 0.19560302732360352, |
|
"grad_norm": 0.42863690853118896, |
|
"learning_rate": 4.949747762849441e-05, |
|
"loss": 2.8349, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 0.19584753110775802, |
|
"grad_norm": 0.4395289719104767, |
|
"learning_rate": 4.949619810320757e-05, |
|
"loss": 2.838, |
|
"step": 24030 |
|
}, |
|
{ |
|
"epoch": 0.19609203489191251, |
|
"grad_norm": 0.4364834725856781, |
|
"learning_rate": 4.9494916967604404e-05, |
|
"loss": 2.8399, |
|
"step": 24060 |
|
}, |
|
{ |
|
"epoch": 0.196336538676067, |
|
"grad_norm": 0.4557901918888092, |
|
"learning_rate": 4.949363422176911e-05, |
|
"loss": 2.8317, |
|
"step": 24090 |
|
}, |
|
{ |
|
"epoch": 0.19658104246022154, |
|
"grad_norm": 0.4531985819339752, |
|
"learning_rate": 4.9492349865786034e-05, |
|
"loss": 2.8307, |
|
"step": 24120 |
|
}, |
|
{ |
|
"epoch": 0.19682554624437604, |
|
"grad_norm": 0.4378800392150879, |
|
"learning_rate": 4.9491063899739596e-05, |
|
"loss": 2.8351, |
|
"step": 24150 |
|
}, |
|
{ |
|
"epoch": 0.19707005002853054, |
|
"grad_norm": 0.4349973201751709, |
|
"learning_rate": 4.948977632371432e-05, |
|
"loss": 2.832, |
|
"step": 24180 |
|
}, |
|
{ |
|
"epoch": 0.19731455381268503, |
|
"grad_norm": 0.4363759756088257, |
|
"learning_rate": 4.948848713779487e-05, |
|
"loss": 2.8357, |
|
"step": 24210 |
|
}, |
|
{ |
|
"epoch": 0.19755905759683953, |
|
"grad_norm": 0.42884907126426697, |
|
"learning_rate": 4.948719634206598e-05, |
|
"loss": 2.8272, |
|
"step": 24240 |
|
}, |
|
{ |
|
"epoch": 0.19780356138099406, |
|
"grad_norm": 0.4273601174354553, |
|
"learning_rate": 4.9485903936612504e-05, |
|
"loss": 2.8675, |
|
"step": 24270 |
|
}, |
|
{ |
|
"epoch": 0.19804806516514856, |
|
"grad_norm": 0.4261528253555298, |
|
"learning_rate": 4.948460992151941e-05, |
|
"loss": 2.8254, |
|
"step": 24300 |
|
}, |
|
{ |
|
"epoch": 0.19829256894930306, |
|
"grad_norm": 0.41921886801719666, |
|
"learning_rate": 4.948331429687175e-05, |
|
"loss": 2.8469, |
|
"step": 24330 |
|
}, |
|
{ |
|
"epoch": 0.19853707273345755, |
|
"grad_norm": 0.436755508184433, |
|
"learning_rate": 4.9482017062754704e-05, |
|
"loss": 2.8461, |
|
"step": 24360 |
|
}, |
|
{ |
|
"epoch": 0.19878157651761208, |
|
"grad_norm": 0.4177142381668091, |
|
"learning_rate": 4.9480718219253555e-05, |
|
"loss": 2.8252, |
|
"step": 24390 |
|
}, |
|
{ |
|
"epoch": 0.19902608030176658, |
|
"grad_norm": 0.4457404911518097, |
|
"learning_rate": 4.947941776645366e-05, |
|
"loss": 2.8434, |
|
"step": 24420 |
|
}, |
|
{ |
|
"epoch": 0.19927058408592108, |
|
"grad_norm": 0.44040533900260925, |
|
"learning_rate": 4.9478115704440545e-05, |
|
"loss": 2.8336, |
|
"step": 24450 |
|
}, |
|
{ |
|
"epoch": 0.19951508787007557, |
|
"grad_norm": 0.4262928068637848, |
|
"learning_rate": 4.947681203329977e-05, |
|
"loss": 2.8407, |
|
"step": 24480 |
|
}, |
|
{ |
|
"epoch": 0.19975959165423007, |
|
"grad_norm": 0.4242199957370758, |
|
"learning_rate": 4.947550675311706e-05, |
|
"loss": 2.8235, |
|
"step": 24510 |
|
}, |
|
{ |
|
"epoch": 0.2000040954383846, |
|
"grad_norm": 0.4429243803024292, |
|
"learning_rate": 4.94741998639782e-05, |
|
"loss": 2.8327, |
|
"step": 24540 |
|
}, |
|
{ |
|
"epoch": 0.2002485992225391, |
|
"grad_norm": 0.42853012681007385, |
|
"learning_rate": 4.947289136596912e-05, |
|
"loss": 2.844, |
|
"step": 24570 |
|
}, |
|
{ |
|
"epoch": 0.2004931030066936, |
|
"grad_norm": 0.4213114082813263, |
|
"learning_rate": 4.9471581259175816e-05, |
|
"loss": 2.8381, |
|
"step": 24600 |
|
}, |
|
{ |
|
"epoch": 0.2007376067908481, |
|
"grad_norm": 0.42277562618255615, |
|
"learning_rate": 4.9470269543684436e-05, |
|
"loss": 2.8443, |
|
"step": 24630 |
|
}, |
|
{ |
|
"epoch": 0.2009821105750026, |
|
"grad_norm": 0.42599666118621826, |
|
"learning_rate": 4.9468956219581186e-05, |
|
"loss": 2.8377, |
|
"step": 24660 |
|
}, |
|
{ |
|
"epoch": 0.20122661435915712, |
|
"grad_norm": 0.4356340169906616, |
|
"learning_rate": 4.946764128695242e-05, |
|
"loss": 2.8322, |
|
"step": 24690 |
|
}, |
|
{ |
|
"epoch": 0.20147111814331162, |
|
"grad_norm": 0.42555105686187744, |
|
"learning_rate": 4.9466324745884565e-05, |
|
"loss": 2.8418, |
|
"step": 24720 |
|
}, |
|
{ |
|
"epoch": 0.20171562192746612, |
|
"grad_norm": 0.4337792992591858, |
|
"learning_rate": 4.946500659646417e-05, |
|
"loss": 2.8426, |
|
"step": 24750 |
|
}, |
|
{ |
|
"epoch": 0.20196012571162061, |
|
"grad_norm": 0.4453223943710327, |
|
"learning_rate": 4.9463686838777887e-05, |
|
"loss": 2.8329, |
|
"step": 24780 |
|
}, |
|
{ |
|
"epoch": 0.20220462949577514, |
|
"grad_norm": 0.45427435636520386, |
|
"learning_rate": 4.946236547291248e-05, |
|
"loss": 2.825, |
|
"step": 24810 |
|
}, |
|
{ |
|
"epoch": 0.20244913327992964, |
|
"grad_norm": 0.42173105478286743, |
|
"learning_rate": 4.94610424989548e-05, |
|
"loss": 2.8412, |
|
"step": 24840 |
|
}, |
|
{ |
|
"epoch": 0.20269363706408414, |
|
"grad_norm": 0.4376053810119629, |
|
"learning_rate": 4.9459717916991834e-05, |
|
"loss": 2.8071, |
|
"step": 24870 |
|
}, |
|
{ |
|
"epoch": 0.20293814084823864, |
|
"grad_norm": 0.4222453832626343, |
|
"learning_rate": 4.945839172711063e-05, |
|
"loss": 2.8369, |
|
"step": 24900 |
|
}, |
|
{ |
|
"epoch": 0.20318264463239313, |
|
"grad_norm": 0.4197344481945038, |
|
"learning_rate": 4.9457063929398394e-05, |
|
"loss": 2.8411, |
|
"step": 24930 |
|
}, |
|
{ |
|
"epoch": 0.20342714841654766, |
|
"grad_norm": 0.41742411255836487, |
|
"learning_rate": 4.94557345239424e-05, |
|
"loss": 2.8138, |
|
"step": 24960 |
|
}, |
|
{ |
|
"epoch": 0.20367165220070216, |
|
"grad_norm": 0.4322751462459564, |
|
"learning_rate": 4.945440351083003e-05, |
|
"loss": 2.832, |
|
"step": 24990 |
|
}, |
|
{ |
|
"epoch": 0.20391615598485666, |
|
"grad_norm": 0.42838114500045776, |
|
"learning_rate": 4.94530708901488e-05, |
|
"loss": 2.8517, |
|
"step": 25020 |
|
}, |
|
{ |
|
"epoch": 0.20416065976901115, |
|
"grad_norm": 0.44520124793052673, |
|
"learning_rate": 4.9451736661986306e-05, |
|
"loss": 2.8259, |
|
"step": 25050 |
|
}, |
|
{ |
|
"epoch": 0.20440516355316568, |
|
"grad_norm": 0.4274967610836029, |
|
"learning_rate": 4.945040082643026e-05, |
|
"loss": 2.8189, |
|
"step": 25080 |
|
}, |
|
{ |
|
"epoch": 0.20464966733732018, |
|
"grad_norm": 0.5348169207572937, |
|
"learning_rate": 4.944906338356846e-05, |
|
"loss": 2.8208, |
|
"step": 25110 |
|
}, |
|
{ |
|
"epoch": 0.20489417112147468, |
|
"grad_norm": 0.43682798743247986, |
|
"learning_rate": 4.944772433348884e-05, |
|
"loss": 2.8243, |
|
"step": 25140 |
|
}, |
|
{ |
|
"epoch": 0.20513867490562918, |
|
"grad_norm": 0.44586774706840515, |
|
"learning_rate": 4.9446383676279435e-05, |
|
"loss": 2.8349, |
|
"step": 25170 |
|
}, |
|
{ |
|
"epoch": 0.20538317868978367, |
|
"grad_norm": 0.42513328790664673, |
|
"learning_rate": 4.944504141202836e-05, |
|
"loss": 2.8211, |
|
"step": 25200 |
|
}, |
|
{ |
|
"epoch": 0.2056276824739382, |
|
"grad_norm": 0.41674286127090454, |
|
"learning_rate": 4.944369754082385e-05, |
|
"loss": 2.815, |
|
"step": 25230 |
|
}, |
|
{ |
|
"epoch": 0.2058721862580927, |
|
"grad_norm": 0.4303615391254425, |
|
"learning_rate": 4.944235206275426e-05, |
|
"loss": 2.8233, |
|
"step": 25260 |
|
}, |
|
{ |
|
"epoch": 0.2061166900422472, |
|
"grad_norm": 0.44005194306373596, |
|
"learning_rate": 4.9441004977908036e-05, |
|
"loss": 2.8205, |
|
"step": 25290 |
|
} |
|
], |
|
"logging_steps": 30, |
|
"max_steps": 368091, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.4201350438977536e+19, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|