|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.05868090819708105, |
|
"eval_steps": 100000000, |
|
"global_step": 7200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.15012613848348e-06, |
|
"grad_norm": 3.4710192680358887, |
|
"learning_rate": 1.0000000000000001e-07, |
|
"loss": 11.2605, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0002445037841545044, |
|
"grad_norm": 5.464046955108643, |
|
"learning_rate": 3e-06, |
|
"loss": 10.9977, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0004890075683090088, |
|
"grad_norm": 1.1376756429672241, |
|
"learning_rate": 6e-06, |
|
"loss": 9.4082, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0007335113524635132, |
|
"grad_norm": 1.2878344058990479, |
|
"learning_rate": 9e-06, |
|
"loss": 8.549, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0009780151366180175, |
|
"grad_norm": 1.2800588607788086, |
|
"learning_rate": 1.2e-05, |
|
"loss": 8.0361, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.001222518920772522, |
|
"grad_norm": 1.4315314292907715, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.653, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.0014670227049270264, |
|
"grad_norm": 1.381317377090454, |
|
"learning_rate": 1.8e-05, |
|
"loss": 7.4179, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.0017115264890815308, |
|
"grad_norm": 1.7989460229873657, |
|
"learning_rate": 2.1e-05, |
|
"loss": 7.2012, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.001956030273236035, |
|
"grad_norm": 1.3431414365768433, |
|
"learning_rate": 2.4e-05, |
|
"loss": 7.0383, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.0022005340573905395, |
|
"grad_norm": 1.028826117515564, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 6.866, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.002445037841545044, |
|
"grad_norm": 1.201025128364563, |
|
"learning_rate": 3e-05, |
|
"loss": 6.717, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.0026895416256995483, |
|
"grad_norm": 1.1023098230361938, |
|
"learning_rate": 3.3e-05, |
|
"loss": 6.5535, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.0029340454098540527, |
|
"grad_norm": 1.2839674949645996, |
|
"learning_rate": 3.6e-05, |
|
"loss": 6.4022, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.003178549194008557, |
|
"grad_norm": 2.267265796661377, |
|
"learning_rate": 3.9000000000000006e-05, |
|
"loss": 6.2858, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.0034230529781630616, |
|
"grad_norm": 1.0635628700256348, |
|
"learning_rate": 4.2e-05, |
|
"loss": 6.1681, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.0036675567623175656, |
|
"grad_norm": 1.263838768005371, |
|
"learning_rate": 4.5e-05, |
|
"loss": 6.0728, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.00391206054647207, |
|
"grad_norm": 1.4611454010009766, |
|
"learning_rate": 4.8e-05, |
|
"loss": 5.972, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.0041565643306265745, |
|
"grad_norm": 1.0120561122894287, |
|
"learning_rate": 4.999999990869806e-05, |
|
"loss": 5.8619, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.004401068114781079, |
|
"grad_norm": 1.1349974870681763, |
|
"learning_rate": 4.999999853916893e-05, |
|
"loss": 5.785, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.004645571898935583, |
|
"grad_norm": 1.0840613842010498, |
|
"learning_rate": 4.9999995526204936e-05, |
|
"loss": 5.7071, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.004890075683090088, |
|
"grad_norm": 1.258074402809143, |
|
"learning_rate": 4.999999086980628e-05, |
|
"loss": 5.6199, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.005134579467244592, |
|
"grad_norm": 1.284726858139038, |
|
"learning_rate": 4.999998456997326e-05, |
|
"loss": 5.5465, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.005379083251399097, |
|
"grad_norm": 1.2079874277114868, |
|
"learning_rate": 4.999997662670628e-05, |
|
"loss": 5.4816, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.005623587035553601, |
|
"grad_norm": 1.3364052772521973, |
|
"learning_rate": 4.999996704000589e-05, |
|
"loss": 5.4079, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.0058680908197081055, |
|
"grad_norm": 0.9860705137252808, |
|
"learning_rate": 4.99999558098727e-05, |
|
"loss": 5.3598, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.00611259460386261, |
|
"grad_norm": 1.2071930170059204, |
|
"learning_rate": 4.9999942936307445e-05, |
|
"loss": 5.2884, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.006357098388017114, |
|
"grad_norm": 0.8959563970565796, |
|
"learning_rate": 4.9999928419310994e-05, |
|
"loss": 5.2391, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.006601602172171619, |
|
"grad_norm": 1.2356096506118774, |
|
"learning_rate": 4.999991225888427e-05, |
|
"loss": 5.1879, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.006846105956326123, |
|
"grad_norm": 0.9705113172531128, |
|
"learning_rate": 4.999989445502837e-05, |
|
"loss": 5.1424, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.007090609740480627, |
|
"grad_norm": 0.9504437446594238, |
|
"learning_rate": 4.9999875007744436e-05, |
|
"loss": 5.0966, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.007335113524635131, |
|
"grad_norm": 0.9488673806190491, |
|
"learning_rate": 4.9999853917033756e-05, |
|
"loss": 5.0424, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.007579617308789636, |
|
"grad_norm": 0.959373950958252, |
|
"learning_rate": 4.999983118289773e-05, |
|
"loss": 5.0387, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.00782412109294414, |
|
"grad_norm": 0.8465414643287659, |
|
"learning_rate": 4.999980680533782e-05, |
|
"loss": 4.9769, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.008068624877098645, |
|
"grad_norm": 0.8328993916511536, |
|
"learning_rate": 4.999978078435567e-05, |
|
"loss": 4.9335, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.008313128661253149, |
|
"grad_norm": 0.8107655644416809, |
|
"learning_rate": 4.999975311995295e-05, |
|
"loss": 4.9214, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.008557632445407654, |
|
"grad_norm": 0.8149654865264893, |
|
"learning_rate": 4.99997238121315e-05, |
|
"loss": 4.8651, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.008802136229562158, |
|
"grad_norm": 0.8837414979934692, |
|
"learning_rate": 4.999969286089325e-05, |
|
"loss": 4.8327, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.009046640013716663, |
|
"grad_norm": 1.1360137462615967, |
|
"learning_rate": 4.9999660266240235e-05, |
|
"loss": 4.7906, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.009291143797871167, |
|
"grad_norm": 0.7992026209831238, |
|
"learning_rate": 4.9999626028174585e-05, |
|
"loss": 4.7612, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.00953564758202567, |
|
"grad_norm": 0.8481825590133667, |
|
"learning_rate": 4.999959014669856e-05, |
|
"loss": 4.7106, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.009780151366180176, |
|
"grad_norm": 0.8183879256248474, |
|
"learning_rate": 4.9999552621814513e-05, |
|
"loss": 4.6993, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.010024655150334679, |
|
"grad_norm": 0.8460689187049866, |
|
"learning_rate": 4.9999513453524917e-05, |
|
"loss": 4.6664, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.010269158934489184, |
|
"grad_norm": 0.8723706007003784, |
|
"learning_rate": 4.9999472641832336e-05, |
|
"loss": 4.6371, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.010513662718643688, |
|
"grad_norm": 0.7682787179946899, |
|
"learning_rate": 4.999943018673946e-05, |
|
"loss": 4.6184, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.010758166502798193, |
|
"grad_norm": 0.9049955606460571, |
|
"learning_rate": 4.999938608824909e-05, |
|
"loss": 4.5968, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.011002670286952697, |
|
"grad_norm": 0.7860899567604065, |
|
"learning_rate": 4.999934034636411e-05, |
|
"loss": 4.5266, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.011247174071107202, |
|
"grad_norm": 0.7918768525123596, |
|
"learning_rate": 4.999929296108753e-05, |
|
"loss": 4.5069, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.011491677855261706, |
|
"grad_norm": 0.6713089942932129, |
|
"learning_rate": 4.9999243932422466e-05, |
|
"loss": 4.4663, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.011736181639416211, |
|
"grad_norm": 0.756101131439209, |
|
"learning_rate": 4.999919326037215e-05, |
|
"loss": 4.4782, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.011980685423570715, |
|
"grad_norm": 0.7067996263504028, |
|
"learning_rate": 4.99991409449399e-05, |
|
"loss": 4.4388, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.01222518920772522, |
|
"grad_norm": 0.7132194638252258, |
|
"learning_rate": 4.999908698612916e-05, |
|
"loss": 4.4135, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.012469692991879723, |
|
"grad_norm": 0.7282501459121704, |
|
"learning_rate": 4.9999031383943486e-05, |
|
"loss": 4.4057, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.012714196776034229, |
|
"grad_norm": 0.6851722598075867, |
|
"learning_rate": 4.999897413838651e-05, |
|
"loss": 4.3612, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.012958700560188732, |
|
"grad_norm": 0.7953110337257385, |
|
"learning_rate": 4.999891524946202e-05, |
|
"loss": 4.3268, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.013203204344343238, |
|
"grad_norm": 0.726719319820404, |
|
"learning_rate": 4.999885471717387e-05, |
|
"loss": 4.314, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.013447708128497741, |
|
"grad_norm": 0.7354792356491089, |
|
"learning_rate": 4.999879254152605e-05, |
|
"loss": 4.2807, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.013692211912652246, |
|
"grad_norm": 0.7072017788887024, |
|
"learning_rate": 4.999872872252265e-05, |
|
"loss": 4.282, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.01393671569680675, |
|
"grad_norm": 0.690112292766571, |
|
"learning_rate": 4.999866326016785e-05, |
|
"loss": 4.2686, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.014181219480961254, |
|
"grad_norm": 0.697634756565094, |
|
"learning_rate": 4.999859615446596e-05, |
|
"loss": 4.2662, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.014425723265115759, |
|
"grad_norm": 0.6999862790107727, |
|
"learning_rate": 4.99985274054214e-05, |
|
"loss": 4.2207, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.014670227049270262, |
|
"grad_norm": 0.6920334100723267, |
|
"learning_rate": 4.999845701303868e-05, |
|
"loss": 4.2163, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.014914730833424768, |
|
"grad_norm": 0.6884493827819824, |
|
"learning_rate": 4.999838497732243e-05, |
|
"loss": 4.209, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.015159234617579271, |
|
"grad_norm": 0.776447057723999, |
|
"learning_rate": 4.999831129827739e-05, |
|
"loss": 4.1856, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.015403738401733777, |
|
"grad_norm": 0.7332949042320251, |
|
"learning_rate": 4.9998235975908394e-05, |
|
"loss": 4.156, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.01564824218588828, |
|
"grad_norm": 0.6691558361053467, |
|
"learning_rate": 4.99981590102204e-05, |
|
"loss": 4.1339, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.015892745970042785, |
|
"grad_norm": 0.6789201498031616, |
|
"learning_rate": 4.9998080401218464e-05, |
|
"loss": 4.1344, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.01613724975419729, |
|
"grad_norm": 0.6654175519943237, |
|
"learning_rate": 4.999800014890777e-05, |
|
"loss": 4.1295, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.016381753538351793, |
|
"grad_norm": 0.6632519364356995, |
|
"learning_rate": 4.9997918253293555e-05, |
|
"loss": 4.1036, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.016626257322506298, |
|
"grad_norm": 0.6618740558624268, |
|
"learning_rate": 4.999783471438124e-05, |
|
"loss": 4.0857, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.016870761106660803, |
|
"grad_norm": 0.6384952068328857, |
|
"learning_rate": 4.999774953217631e-05, |
|
"loss": 4.0755, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.01711526489081531, |
|
"grad_norm": 0.6808215975761414, |
|
"learning_rate": 4.9997662706684345e-05, |
|
"loss": 4.0568, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.01735976867496981, |
|
"grad_norm": 0.6514068841934204, |
|
"learning_rate": 4.999757423791107e-05, |
|
"loss": 4.0474, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.017604272459124316, |
|
"grad_norm": 0.6691142320632935, |
|
"learning_rate": 4.9997484125862306e-05, |
|
"loss": 4.0467, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.01784877624327882, |
|
"grad_norm": 0.6240984201431274, |
|
"learning_rate": 4.999739237054395e-05, |
|
"loss": 4.0259, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.018093280027433326, |
|
"grad_norm": 0.6286123991012573, |
|
"learning_rate": 4.9997298971962065e-05, |
|
"loss": 4.0098, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.018337783811587828, |
|
"grad_norm": 0.6232910752296448, |
|
"learning_rate": 4.999720393012277e-05, |
|
"loss": 4.0203, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.018582287595742333, |
|
"grad_norm": 0.6792376041412354, |
|
"learning_rate": 4.999710724503233e-05, |
|
"loss": 3.9909, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.01882679137989684, |
|
"grad_norm": 0.6824718117713928, |
|
"learning_rate": 4.9997008916697075e-05, |
|
"loss": 3.9922, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.01907129516405134, |
|
"grad_norm": 0.6520410776138306, |
|
"learning_rate": 4.999690894512349e-05, |
|
"loss": 3.9533, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.019315798948205846, |
|
"grad_norm": 0.6467030048370361, |
|
"learning_rate": 4.999680733031814e-05, |
|
"loss": 3.9464, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.01956030273236035, |
|
"grad_norm": 0.6356080770492554, |
|
"learning_rate": 4.9996704072287716e-05, |
|
"loss": 3.9288, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.019804806516514856, |
|
"grad_norm": 0.6498362421989441, |
|
"learning_rate": 4.9996599171038984e-05, |
|
"loss": 3.9345, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.020049310300669358, |
|
"grad_norm": 0.6107562184333801, |
|
"learning_rate": 4.999649262657886e-05, |
|
"loss": 3.9368, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.020293814084823864, |
|
"grad_norm": 0.6478250622749329, |
|
"learning_rate": 4.999638443891434e-05, |
|
"loss": 3.9273, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.02053831786897837, |
|
"grad_norm": 0.631263256072998, |
|
"learning_rate": 4.999627460805253e-05, |
|
"loss": 3.9136, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.020782821653132874, |
|
"grad_norm": 0.6120920777320862, |
|
"learning_rate": 4.999616313400066e-05, |
|
"loss": 3.8834, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.021027325437287376, |
|
"grad_norm": 0.6151197552680969, |
|
"learning_rate": 4.999605001676605e-05, |
|
"loss": 3.8994, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.02127182922144188, |
|
"grad_norm": 0.623715877532959, |
|
"learning_rate": 4.9995935256356144e-05, |
|
"loss": 3.8929, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.021516333005596387, |
|
"grad_norm": 0.6688769459724426, |
|
"learning_rate": 4.9995818852778476e-05, |
|
"loss": 3.8499, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.021760836789750892, |
|
"grad_norm": 0.6272155046463013, |
|
"learning_rate": 4.999570080604071e-05, |
|
"loss": 3.8861, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.022005340573905394, |
|
"grad_norm": 0.597653329372406, |
|
"learning_rate": 4.99955811161506e-05, |
|
"loss": 3.8674, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.0222498443580599, |
|
"grad_norm": 0.5608483552932739, |
|
"learning_rate": 4.9995459783116004e-05, |
|
"loss": 3.8493, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.022494348142214404, |
|
"grad_norm": 0.5991063117980957, |
|
"learning_rate": 4.999533680694493e-05, |
|
"loss": 3.8454, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.02273885192636891, |
|
"grad_norm": 0.5738102793693542, |
|
"learning_rate": 4.9995212187645416e-05, |
|
"loss": 3.8395, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.02298335571052341, |
|
"grad_norm": 0.6145568490028381, |
|
"learning_rate": 4.9995085925225693e-05, |
|
"loss": 3.8313, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.023227859494677917, |
|
"grad_norm": 0.6019515991210938, |
|
"learning_rate": 4.999495801969404e-05, |
|
"loss": 3.8277, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.023472363278832422, |
|
"grad_norm": 0.6177758574485779, |
|
"learning_rate": 4.9994828471058876e-05, |
|
"loss": 3.8083, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.023716867062986924, |
|
"grad_norm": 0.5616528987884521, |
|
"learning_rate": 4.9994697279328714e-05, |
|
"loss": 3.8114, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.02396137084714143, |
|
"grad_norm": 0.6194447875022888, |
|
"learning_rate": 4.9994564444512176e-05, |
|
"loss": 3.8183, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.024205874631295934, |
|
"grad_norm": 0.578895092010498, |
|
"learning_rate": 4.9994429966618e-05, |
|
"loss": 3.7871, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.02445037841545044, |
|
"grad_norm": 0.6014060378074646, |
|
"learning_rate": 4.999429384565502e-05, |
|
"loss": 3.7711, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.02469488219960494, |
|
"grad_norm": 0.5589067339897156, |
|
"learning_rate": 4.999415608163217e-05, |
|
"loss": 3.7533, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.024939385983759447, |
|
"grad_norm": 0.5722873210906982, |
|
"learning_rate": 4.999401667455854e-05, |
|
"loss": 3.7585, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.025183889767913952, |
|
"grad_norm": 0.5734965801239014, |
|
"learning_rate": 4.9993875624443274e-05, |
|
"loss": 3.77, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.025428393552068457, |
|
"grad_norm": 0.5524207353591919, |
|
"learning_rate": 4.9993732931295646e-05, |
|
"loss": 3.718, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.02567289733622296, |
|
"grad_norm": 0.5776082277297974, |
|
"learning_rate": 4.999358859512503e-05, |
|
"loss": 3.7573, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.025917401120377465, |
|
"grad_norm": 0.5799595713615417, |
|
"learning_rate": 4.9993442615940936e-05, |
|
"loss": 3.7552, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.02616190490453197, |
|
"grad_norm": 0.5820346474647522, |
|
"learning_rate": 4.999329499375292e-05, |
|
"loss": 3.7394, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.026406408688686475, |
|
"grad_norm": 0.5450282096862793, |
|
"learning_rate": 4.999314572857074e-05, |
|
"loss": 3.7393, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.026650912472840977, |
|
"grad_norm": 0.6213249564170837, |
|
"learning_rate": 4.9992994820404174e-05, |
|
"loss": 3.7191, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.026895416256995482, |
|
"grad_norm": 0.5940688848495483, |
|
"learning_rate": 4.999284226926314e-05, |
|
"loss": 3.719, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.027139920041149988, |
|
"grad_norm": 0.5779993534088135, |
|
"learning_rate": 4.999268807515768e-05, |
|
"loss": 3.7092, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.027384423825304493, |
|
"grad_norm": 0.5899255871772766, |
|
"learning_rate": 4.999253223809792e-05, |
|
"loss": 3.6939, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.027628927609458995, |
|
"grad_norm": 0.6330375671386719, |
|
"learning_rate": 4.999237475809411e-05, |
|
"loss": 3.7102, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.0278734313936135, |
|
"grad_norm": 0.5771914720535278, |
|
"learning_rate": 4.99922156351566e-05, |
|
"loss": 3.7196, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.028117935177768005, |
|
"grad_norm": 0.5769143104553223, |
|
"learning_rate": 4.999205486929586e-05, |
|
"loss": 3.7127, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.028362438961922507, |
|
"grad_norm": 0.5581954121589661, |
|
"learning_rate": 4.999189246052245e-05, |
|
"loss": 3.689, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.028606942746077012, |
|
"grad_norm": 0.6041043400764465, |
|
"learning_rate": 4.999172840884704e-05, |
|
"loss": 3.6831, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.028851446530231518, |
|
"grad_norm": 0.5458335280418396, |
|
"learning_rate": 4.999156271428043e-05, |
|
"loss": 3.6694, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.029095950314386023, |
|
"grad_norm": 0.5470607280731201, |
|
"learning_rate": 4.9991395376833496e-05, |
|
"loss": 3.6702, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.029340454098540525, |
|
"grad_norm": 0.5878787040710449, |
|
"learning_rate": 4.999122639651725e-05, |
|
"loss": 3.6492, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.02958495788269503, |
|
"grad_norm": 0.5691691637039185, |
|
"learning_rate": 4.9991055773342795e-05, |
|
"loss": 3.6812, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.029829461666849535, |
|
"grad_norm": 0.5548356771469116, |
|
"learning_rate": 4.9990883507321354e-05, |
|
"loss": 3.645, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.03007396545100404, |
|
"grad_norm": 0.5686156153678894, |
|
"learning_rate": 4.999070959846424e-05, |
|
"loss": 3.6505, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.030318469235158543, |
|
"grad_norm": 0.5596534013748169, |
|
"learning_rate": 4.999053404678289e-05, |
|
"loss": 3.6532, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.030562973019313048, |
|
"grad_norm": 0.54421067237854, |
|
"learning_rate": 4.999035685228884e-05, |
|
"loss": 3.6238, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.030807476803467553, |
|
"grad_norm": 0.56732177734375, |
|
"learning_rate": 4.999017801499375e-05, |
|
"loss": 3.6449, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.03105198058762206, |
|
"grad_norm": 0.5608410239219666, |
|
"learning_rate": 4.998999753490937e-05, |
|
"loss": 3.6344, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.03129648437177656, |
|
"grad_norm": 0.5919491648674011, |
|
"learning_rate": 4.998981541204757e-05, |
|
"loss": 3.6213, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.031540988155931066, |
|
"grad_norm": 0.5795233249664307, |
|
"learning_rate": 4.998963164642031e-05, |
|
"loss": 3.6239, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.03178549194008557, |
|
"grad_norm": 0.5669205784797668, |
|
"learning_rate": 4.9989446238039676e-05, |
|
"loss": 3.5831, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.032029995724240076, |
|
"grad_norm": 0.5817368626594543, |
|
"learning_rate": 4.998925918691786e-05, |
|
"loss": 3.6074, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.03227449950839458, |
|
"grad_norm": 0.5662333369255066, |
|
"learning_rate": 4.998907049306715e-05, |
|
"loss": 3.5874, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.03251900329254909, |
|
"grad_norm": 0.5641735792160034, |
|
"learning_rate": 4.998888015649996e-05, |
|
"loss": 3.6056, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.032763507076703585, |
|
"grad_norm": 0.524918794631958, |
|
"learning_rate": 4.99886881772288e-05, |
|
"loss": 3.5963, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.03300801086085809, |
|
"grad_norm": 0.5625722408294678, |
|
"learning_rate": 4.998849455526628e-05, |
|
"loss": 3.5917, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.033252514645012596, |
|
"grad_norm": 0.5612478852272034, |
|
"learning_rate": 4.998829929062515e-05, |
|
"loss": 3.5792, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.0334970184291671, |
|
"grad_norm": 0.5586293935775757, |
|
"learning_rate": 4.998810238331822e-05, |
|
"loss": 3.5708, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.033741522213321606, |
|
"grad_norm": 0.53324955701828, |
|
"learning_rate": 4.998790383335845e-05, |
|
"loss": 3.5686, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.03398602599747611, |
|
"grad_norm": 0.5210742950439453, |
|
"learning_rate": 4.9987703640758894e-05, |
|
"loss": 3.575, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.03423052978163062, |
|
"grad_norm": 0.5591189861297607, |
|
"learning_rate": 4.99875018055327e-05, |
|
"loss": 3.5717, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.034475033565785115, |
|
"grad_norm": 0.5435970425605774, |
|
"learning_rate": 4.998729832769315e-05, |
|
"loss": 3.5638, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.03471953734993962, |
|
"grad_norm": 0.5489551424980164, |
|
"learning_rate": 4.998709320725361e-05, |
|
"loss": 3.574, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.034964041134094126, |
|
"grad_norm": 0.5275290012359619, |
|
"learning_rate": 4.998688644422756e-05, |
|
"loss": 3.5695, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.03520854491824863, |
|
"grad_norm": 0.583881139755249, |
|
"learning_rate": 4.998667803862861e-05, |
|
"loss": 3.5703, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.03545304870240314, |
|
"grad_norm": 0.5317121744155884, |
|
"learning_rate": 4.9986467990470445e-05, |
|
"loss": 3.5668, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.03569755248655764, |
|
"grad_norm": 0.5981696248054504, |
|
"learning_rate": 4.998625629976688e-05, |
|
"loss": 3.5411, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.03594205627071215, |
|
"grad_norm": 0.55133455991745, |
|
"learning_rate": 4.998604296653182e-05, |
|
"loss": 3.521, |
|
"step": 4410 |
|
}, |
|
{ |
|
"epoch": 0.03618656005486665, |
|
"grad_norm": 0.5481094717979431, |
|
"learning_rate": 4.99858279907793e-05, |
|
"loss": 3.5421, |
|
"step": 4440 |
|
}, |
|
{ |
|
"epoch": 0.03643106383902115, |
|
"grad_norm": 0.5218031406402588, |
|
"learning_rate": 4.998561137252346e-05, |
|
"loss": 3.5305, |
|
"step": 4470 |
|
}, |
|
{ |
|
"epoch": 0.036675567623175656, |
|
"grad_norm": 0.5458360910415649, |
|
"learning_rate": 4.9985393111778525e-05, |
|
"loss": 3.5332, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.03692007140733016, |
|
"grad_norm": 0.5501233339309692, |
|
"learning_rate": 4.998517320855884e-05, |
|
"loss": 3.5485, |
|
"step": 4530 |
|
}, |
|
{ |
|
"epoch": 0.03716457519148467, |
|
"grad_norm": 0.5359978675842285, |
|
"learning_rate": 4.998495166287887e-05, |
|
"loss": 3.534, |
|
"step": 4560 |
|
}, |
|
{ |
|
"epoch": 0.03740907897563917, |
|
"grad_norm": 0.5447133183479309, |
|
"learning_rate": 4.998472847475318e-05, |
|
"loss": 3.5176, |
|
"step": 4590 |
|
}, |
|
{ |
|
"epoch": 0.03765358275979368, |
|
"grad_norm": 0.5201069712638855, |
|
"learning_rate": 4.998450364419643e-05, |
|
"loss": 3.518, |
|
"step": 4620 |
|
}, |
|
{ |
|
"epoch": 0.03789808654394818, |
|
"grad_norm": 0.5251840353012085, |
|
"learning_rate": 4.998427717122342e-05, |
|
"loss": 3.5021, |
|
"step": 4650 |
|
}, |
|
{ |
|
"epoch": 0.03814259032810268, |
|
"grad_norm": 0.5204648375511169, |
|
"learning_rate": 4.9984049055849024e-05, |
|
"loss": 3.505, |
|
"step": 4680 |
|
}, |
|
{ |
|
"epoch": 0.038387094112257186, |
|
"grad_norm": 0.5137141942977905, |
|
"learning_rate": 4.9983819298088234e-05, |
|
"loss": 3.4997, |
|
"step": 4710 |
|
}, |
|
{ |
|
"epoch": 0.03863159789641169, |
|
"grad_norm": 0.5232805609703064, |
|
"learning_rate": 4.9983587897956166e-05, |
|
"loss": 3.5049, |
|
"step": 4740 |
|
}, |
|
{ |
|
"epoch": 0.0388761016805662, |
|
"grad_norm": 0.5415229201316833, |
|
"learning_rate": 4.998335485546802e-05, |
|
"loss": 3.5123, |
|
"step": 4770 |
|
}, |
|
{ |
|
"epoch": 0.0391206054647207, |
|
"grad_norm": 0.5097187161445618, |
|
"learning_rate": 4.998312017063912e-05, |
|
"loss": 3.4839, |
|
"step": 4800 |
|
}, |
|
{ |
|
"epoch": 0.03936510924887521, |
|
"grad_norm": 0.5637670755386353, |
|
"learning_rate": 4.9982883843484895e-05, |
|
"loss": 3.5084, |
|
"step": 4830 |
|
}, |
|
{ |
|
"epoch": 0.03960961303302971, |
|
"grad_norm": 0.5436129570007324, |
|
"learning_rate": 4.998264587402088e-05, |
|
"loss": 3.5184, |
|
"step": 4860 |
|
}, |
|
{ |
|
"epoch": 0.03985411681718422, |
|
"grad_norm": 0.5133230090141296, |
|
"learning_rate": 4.9982406262262726e-05, |
|
"loss": 3.48, |
|
"step": 4890 |
|
}, |
|
{ |
|
"epoch": 0.040098620601338716, |
|
"grad_norm": 0.5678510069847107, |
|
"learning_rate": 4.9982165008226175e-05, |
|
"loss": 3.4899, |
|
"step": 4920 |
|
}, |
|
{ |
|
"epoch": 0.04034312438549322, |
|
"grad_norm": 0.5121597647666931, |
|
"learning_rate": 4.998192211192708e-05, |
|
"loss": 3.4942, |
|
"step": 4950 |
|
}, |
|
{ |
|
"epoch": 0.04058762816964773, |
|
"grad_norm": 0.537929117679596, |
|
"learning_rate": 4.9981677573381423e-05, |
|
"loss": 3.4721, |
|
"step": 4980 |
|
}, |
|
{ |
|
"epoch": 0.04083213195380223, |
|
"grad_norm": 0.537289023399353, |
|
"learning_rate": 4.9981431392605274e-05, |
|
"loss": 3.4919, |
|
"step": 5010 |
|
}, |
|
{ |
|
"epoch": 0.04107663573795674, |
|
"grad_norm": 0.5594364404678345, |
|
"learning_rate": 4.998118356961481e-05, |
|
"loss": 3.4665, |
|
"step": 5040 |
|
}, |
|
{ |
|
"epoch": 0.04132113952211124, |
|
"grad_norm": 0.5009840726852417, |
|
"learning_rate": 4.998093410442632e-05, |
|
"loss": 3.4524, |
|
"step": 5070 |
|
}, |
|
{ |
|
"epoch": 0.04156564330626575, |
|
"grad_norm": 0.53801429271698, |
|
"learning_rate": 4.998068299705623e-05, |
|
"loss": 3.4629, |
|
"step": 5100 |
|
}, |
|
{ |
|
"epoch": 0.041810147090420254, |
|
"grad_norm": 0.5134426355361938, |
|
"learning_rate": 4.9980430247521014e-05, |
|
"loss": 3.4504, |
|
"step": 5130 |
|
}, |
|
{ |
|
"epoch": 0.04205465087457475, |
|
"grad_norm": 0.5095092058181763, |
|
"learning_rate": 4.998017585583731e-05, |
|
"loss": 3.4684, |
|
"step": 5160 |
|
}, |
|
{ |
|
"epoch": 0.04229915465872926, |
|
"grad_norm": 0.5233584642410278, |
|
"learning_rate": 4.9979919822021824e-05, |
|
"loss": 3.4726, |
|
"step": 5190 |
|
}, |
|
{ |
|
"epoch": 0.04254365844288376, |
|
"grad_norm": 0.5375773906707764, |
|
"learning_rate": 4.99796621460914e-05, |
|
"loss": 3.4502, |
|
"step": 5220 |
|
}, |
|
{ |
|
"epoch": 0.04278816222703827, |
|
"grad_norm": 0.5292348265647888, |
|
"learning_rate": 4.9979402828062963e-05, |
|
"loss": 3.4494, |
|
"step": 5250 |
|
}, |
|
{ |
|
"epoch": 0.04303266601119277, |
|
"grad_norm": 0.5170340538024902, |
|
"learning_rate": 4.997914186795358e-05, |
|
"loss": 3.4671, |
|
"step": 5280 |
|
}, |
|
{ |
|
"epoch": 0.04327716979534728, |
|
"grad_norm": 0.5027357339859009, |
|
"learning_rate": 4.9978879265780385e-05, |
|
"loss": 3.4424, |
|
"step": 5310 |
|
}, |
|
{ |
|
"epoch": 0.043521673579501784, |
|
"grad_norm": 0.4898991584777832, |
|
"learning_rate": 4.997861502156066e-05, |
|
"loss": 3.4515, |
|
"step": 5340 |
|
}, |
|
{ |
|
"epoch": 0.04376617736365628, |
|
"grad_norm": 0.5362656712532043, |
|
"learning_rate": 4.997834913531176e-05, |
|
"loss": 3.4209, |
|
"step": 5370 |
|
}, |
|
{ |
|
"epoch": 0.04401068114781079, |
|
"grad_norm": 0.5150277614593506, |
|
"learning_rate": 4.9978081607051176e-05, |
|
"loss": 3.4575, |
|
"step": 5400 |
|
}, |
|
{ |
|
"epoch": 0.04425518493196529, |
|
"grad_norm": 0.5086013674736023, |
|
"learning_rate": 4.997781243679648e-05, |
|
"loss": 3.4377, |
|
"step": 5430 |
|
}, |
|
{ |
|
"epoch": 0.0444996887161198, |
|
"grad_norm": 0.538194477558136, |
|
"learning_rate": 4.9977541624565374e-05, |
|
"loss": 3.4282, |
|
"step": 5460 |
|
}, |
|
{ |
|
"epoch": 0.0447441925002743, |
|
"grad_norm": 0.5045614838600159, |
|
"learning_rate": 4.9977269170375665e-05, |
|
"loss": 3.4109, |
|
"step": 5490 |
|
}, |
|
{ |
|
"epoch": 0.04498869628442881, |
|
"grad_norm": 0.5368480086326599, |
|
"learning_rate": 4.997699507424526e-05, |
|
"loss": 3.4341, |
|
"step": 5520 |
|
}, |
|
{ |
|
"epoch": 0.045233200068583314, |
|
"grad_norm": 0.6185830235481262, |
|
"learning_rate": 4.997671933619218e-05, |
|
"loss": 3.4203, |
|
"step": 5550 |
|
}, |
|
{ |
|
"epoch": 0.04547770385273782, |
|
"grad_norm": 0.4984918534755707, |
|
"learning_rate": 4.9976441956234546e-05, |
|
"loss": 3.4309, |
|
"step": 5580 |
|
}, |
|
{ |
|
"epoch": 0.04572220763689232, |
|
"grad_norm": 0.5066754221916199, |
|
"learning_rate": 4.99761629343906e-05, |
|
"loss": 3.3986, |
|
"step": 5610 |
|
}, |
|
{ |
|
"epoch": 0.04596671142104682, |
|
"grad_norm": 0.5132448673248291, |
|
"learning_rate": 4.9975882270678676e-05, |
|
"loss": 3.4126, |
|
"step": 5640 |
|
}, |
|
{ |
|
"epoch": 0.04621121520520133, |
|
"grad_norm": 0.5501627922058105, |
|
"learning_rate": 4.997559996511723e-05, |
|
"loss": 3.4057, |
|
"step": 5670 |
|
}, |
|
{ |
|
"epoch": 0.04645571898935583, |
|
"grad_norm": 0.4679185152053833, |
|
"learning_rate": 4.997531601772481e-05, |
|
"loss": 3.428, |
|
"step": 5700 |
|
}, |
|
{ |
|
"epoch": 0.04670022277351034, |
|
"grad_norm": 0.5020308494567871, |
|
"learning_rate": 4.9975030428520084e-05, |
|
"loss": 3.3977, |
|
"step": 5730 |
|
}, |
|
{ |
|
"epoch": 0.046944726557664844, |
|
"grad_norm": 0.5141638517379761, |
|
"learning_rate": 4.997474319752184e-05, |
|
"loss": 3.4044, |
|
"step": 5760 |
|
}, |
|
{ |
|
"epoch": 0.04718923034181935, |
|
"grad_norm": 0.5127893090248108, |
|
"learning_rate": 4.997445432474895e-05, |
|
"loss": 3.4071, |
|
"step": 5790 |
|
}, |
|
{ |
|
"epoch": 0.04743373412597385, |
|
"grad_norm": 0.5528402924537659, |
|
"learning_rate": 4.9974163810220406e-05, |
|
"loss": 3.3891, |
|
"step": 5820 |
|
}, |
|
{ |
|
"epoch": 0.04767823791012835, |
|
"grad_norm": 0.5092859864234924, |
|
"learning_rate": 4.99738716539553e-05, |
|
"loss": 3.4118, |
|
"step": 5850 |
|
}, |
|
{ |
|
"epoch": 0.04792274169428286, |
|
"grad_norm": 0.49626457691192627, |
|
"learning_rate": 4.997357785597284e-05, |
|
"loss": 3.3834, |
|
"step": 5880 |
|
}, |
|
{ |
|
"epoch": 0.048167245478437364, |
|
"grad_norm": 0.5132192969322205, |
|
"learning_rate": 4.997328241629234e-05, |
|
"loss": 3.3944, |
|
"step": 5910 |
|
}, |
|
{ |
|
"epoch": 0.04841174926259187, |
|
"grad_norm": 0.5104256868362427, |
|
"learning_rate": 4.997298533493323e-05, |
|
"loss": 3.4001, |
|
"step": 5940 |
|
}, |
|
{ |
|
"epoch": 0.048656253046746374, |
|
"grad_norm": 0.507205069065094, |
|
"learning_rate": 4.997268661191503e-05, |
|
"loss": 3.3867, |
|
"step": 5970 |
|
}, |
|
{ |
|
"epoch": 0.04890075683090088, |
|
"grad_norm": 0.6091153025627136, |
|
"learning_rate": 4.9972386247257385e-05, |
|
"loss": 3.3984, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.049145260615055385, |
|
"grad_norm": 0.4803309142589569, |
|
"learning_rate": 4.9972084240980025e-05, |
|
"loss": 3.3703, |
|
"step": 6030 |
|
}, |
|
{ |
|
"epoch": 0.04938976439920988, |
|
"grad_norm": 0.515164852142334, |
|
"learning_rate": 4.997178059310281e-05, |
|
"loss": 3.3733, |
|
"step": 6060 |
|
}, |
|
{ |
|
"epoch": 0.04963426818336439, |
|
"grad_norm": 0.515275776386261, |
|
"learning_rate": 4.997147530364571e-05, |
|
"loss": 3.3802, |
|
"step": 6090 |
|
}, |
|
{ |
|
"epoch": 0.049878771967518894, |
|
"grad_norm": 0.5258405208587646, |
|
"learning_rate": 4.9971168372628793e-05, |
|
"loss": 3.3705, |
|
"step": 6120 |
|
}, |
|
{ |
|
"epoch": 0.0501232757516734, |
|
"grad_norm": 0.49037066102027893, |
|
"learning_rate": 4.997085980007222e-05, |
|
"loss": 3.37, |
|
"step": 6150 |
|
}, |
|
{ |
|
"epoch": 0.050367779535827904, |
|
"grad_norm": 0.47182220220565796, |
|
"learning_rate": 4.99705495859963e-05, |
|
"loss": 3.3705, |
|
"step": 6180 |
|
}, |
|
{ |
|
"epoch": 0.05061228331998241, |
|
"grad_norm": 0.4967211186885834, |
|
"learning_rate": 4.99702377304214e-05, |
|
"loss": 3.3743, |
|
"step": 6210 |
|
}, |
|
{ |
|
"epoch": 0.050856787104136915, |
|
"grad_norm": 0.7469291090965271, |
|
"learning_rate": 4.9969924233368036e-05, |
|
"loss": 3.3732, |
|
"step": 6240 |
|
}, |
|
{ |
|
"epoch": 0.05110129088829142, |
|
"grad_norm": 0.5237293839454651, |
|
"learning_rate": 4.996960909485681e-05, |
|
"loss": 3.3646, |
|
"step": 6270 |
|
}, |
|
{ |
|
"epoch": 0.05134579467244592, |
|
"grad_norm": 0.48839274048805237, |
|
"learning_rate": 4.9969292314908446e-05, |
|
"loss": 3.3665, |
|
"step": 6300 |
|
}, |
|
{ |
|
"epoch": 0.051590298456600424, |
|
"grad_norm": 0.48733407258987427, |
|
"learning_rate": 4.996897389354376e-05, |
|
"loss": 3.3646, |
|
"step": 6330 |
|
}, |
|
{ |
|
"epoch": 0.05183480224075493, |
|
"grad_norm": 0.4938340187072754, |
|
"learning_rate": 4.9968653830783684e-05, |
|
"loss": 3.3542, |
|
"step": 6360 |
|
}, |
|
{ |
|
"epoch": 0.052079306024909434, |
|
"grad_norm": 0.5001193284988403, |
|
"learning_rate": 4.996833212664927e-05, |
|
"loss": 3.3583, |
|
"step": 6390 |
|
}, |
|
{ |
|
"epoch": 0.05232380980906394, |
|
"grad_norm": 0.4772029221057892, |
|
"learning_rate": 4.996800878116166e-05, |
|
"loss": 3.336, |
|
"step": 6420 |
|
}, |
|
{ |
|
"epoch": 0.052568313593218445, |
|
"grad_norm": 0.49077659845352173, |
|
"learning_rate": 4.99676837943421e-05, |
|
"loss": 3.3681, |
|
"step": 6450 |
|
}, |
|
{ |
|
"epoch": 0.05281281737737295, |
|
"grad_norm": 0.5102148056030273, |
|
"learning_rate": 4.996735716621196e-05, |
|
"loss": 3.3437, |
|
"step": 6480 |
|
}, |
|
{ |
|
"epoch": 0.05305732116152745, |
|
"grad_norm": 0.5012289881706238, |
|
"learning_rate": 4.996702889679272e-05, |
|
"loss": 3.3536, |
|
"step": 6510 |
|
}, |
|
{ |
|
"epoch": 0.053301824945681954, |
|
"grad_norm": 0.4870162308216095, |
|
"learning_rate": 4.996669898610595e-05, |
|
"loss": 3.3513, |
|
"step": 6540 |
|
}, |
|
{ |
|
"epoch": 0.05354632872983646, |
|
"grad_norm": 0.48452699184417725, |
|
"learning_rate": 4.996636743417334e-05, |
|
"loss": 3.3415, |
|
"step": 6570 |
|
}, |
|
{ |
|
"epoch": 0.053790832513990965, |
|
"grad_norm": 0.4973999559879303, |
|
"learning_rate": 4.996603424101669e-05, |
|
"loss": 3.3304, |
|
"step": 6600 |
|
}, |
|
{ |
|
"epoch": 0.05403533629814547, |
|
"grad_norm": 0.4833717942237854, |
|
"learning_rate": 4.996569940665789e-05, |
|
"loss": 3.3424, |
|
"step": 6630 |
|
}, |
|
{ |
|
"epoch": 0.054279840082299975, |
|
"grad_norm": 0.5074206590652466, |
|
"learning_rate": 4.996536293111896e-05, |
|
"loss": 3.3302, |
|
"step": 6660 |
|
}, |
|
{ |
|
"epoch": 0.05452434386645448, |
|
"grad_norm": 0.4932290315628052, |
|
"learning_rate": 4.996502481442202e-05, |
|
"loss": 3.3388, |
|
"step": 6690 |
|
}, |
|
{ |
|
"epoch": 0.054768847650608986, |
|
"grad_norm": 0.48740679025650024, |
|
"learning_rate": 4.9964685056589314e-05, |
|
"loss": 3.3182, |
|
"step": 6720 |
|
}, |
|
{ |
|
"epoch": 0.055013351434763484, |
|
"grad_norm": 0.48752760887145996, |
|
"learning_rate": 4.996434365764314e-05, |
|
"loss": 3.3065, |
|
"step": 6750 |
|
}, |
|
{ |
|
"epoch": 0.05525785521891799, |
|
"grad_norm": 0.50692218542099, |
|
"learning_rate": 4.996400061760597e-05, |
|
"loss": 3.3379, |
|
"step": 6780 |
|
}, |
|
{ |
|
"epoch": 0.055502359003072495, |
|
"grad_norm": 0.479159414768219, |
|
"learning_rate": 4.996365593650033e-05, |
|
"loss": 3.3317, |
|
"step": 6810 |
|
}, |
|
{ |
|
"epoch": 0.055746862787227, |
|
"grad_norm": 0.498662531375885, |
|
"learning_rate": 4.99633096143489e-05, |
|
"loss": 3.3306, |
|
"step": 6840 |
|
}, |
|
{ |
|
"epoch": 0.055991366571381505, |
|
"grad_norm": 1.4371449947357178, |
|
"learning_rate": 4.9962961651174436e-05, |
|
"loss": 3.3334, |
|
"step": 6870 |
|
}, |
|
{ |
|
"epoch": 0.05623587035553601, |
|
"grad_norm": 0.49862873554229736, |
|
"learning_rate": 4.9962612046999827e-05, |
|
"loss": 3.3142, |
|
"step": 6900 |
|
}, |
|
{ |
|
"epoch": 0.056480374139690516, |
|
"grad_norm": 0.4759610593318939, |
|
"learning_rate": 4.996226080184803e-05, |
|
"loss": 3.3238, |
|
"step": 6930 |
|
}, |
|
{ |
|
"epoch": 0.056724877923845014, |
|
"grad_norm": 0.4844242334365845, |
|
"learning_rate": 4.996190791574215e-05, |
|
"loss": 3.3197, |
|
"step": 6960 |
|
}, |
|
{ |
|
"epoch": 0.05696938170799952, |
|
"grad_norm": 0.46844130754470825, |
|
"learning_rate": 4.996155338870538e-05, |
|
"loss": 3.2949, |
|
"step": 6990 |
|
}, |
|
{ |
|
"epoch": 0.057213885492154025, |
|
"grad_norm": 0.4850478768348694, |
|
"learning_rate": 4.9961197220761035e-05, |
|
"loss": 3.3143, |
|
"step": 7020 |
|
}, |
|
{ |
|
"epoch": 0.05745838927630853, |
|
"grad_norm": 0.4838846027851105, |
|
"learning_rate": 4.996083941193252e-05, |
|
"loss": 3.3015, |
|
"step": 7050 |
|
}, |
|
{ |
|
"epoch": 0.057702893060463036, |
|
"grad_norm": 0.49992483854293823, |
|
"learning_rate": 4.9960479962243367e-05, |
|
"loss": 3.3099, |
|
"step": 7080 |
|
}, |
|
{ |
|
"epoch": 0.05794739684461754, |
|
"grad_norm": 0.49964553117752075, |
|
"learning_rate": 4.996011887171719e-05, |
|
"loss": 3.3046, |
|
"step": 7110 |
|
}, |
|
{ |
|
"epoch": 0.058191900628772046, |
|
"grad_norm": 0.4723115563392639, |
|
"learning_rate": 4.995975614037773e-05, |
|
"loss": 3.3009, |
|
"step": 7140 |
|
}, |
|
{ |
|
"epoch": 0.05843640441292655, |
|
"grad_norm": 0.48575958609580994, |
|
"learning_rate": 4.995939176824883e-05, |
|
"loss": 3.3018, |
|
"step": 7170 |
|
}, |
|
{ |
|
"epoch": 0.05868090819708105, |
|
"grad_norm": 0.5264491438865662, |
|
"learning_rate": 4.995902575535446e-05, |
|
"loss": 3.2877, |
|
"step": 7200 |
|
} |
|
], |
|
"logging_steps": 30, |
|
"max_steps": 368091, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 100, |
|
"total_flos": 6.887340836388864e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|