|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.9977298524404086, |
|
"eval_steps": 500, |
|
"global_step": 660, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.003026863412788498, |
|
"grad_norm": 3.246234368297727, |
|
"learning_rate": 3.0303030303030305e-07, |
|
"loss": 0.6692, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006053726825576996, |
|
"grad_norm": 3.519523982551671, |
|
"learning_rate": 6.060606060606061e-07, |
|
"loss": 0.7362, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.009080590238365494, |
|
"grad_norm": 3.119285429594766, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 0.6832, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.012107453651153992, |
|
"grad_norm": 2.958794605345133, |
|
"learning_rate": 1.2121212121212122e-06, |
|
"loss": 0.6941, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01513431706394249, |
|
"grad_norm": 3.4081256121813044, |
|
"learning_rate": 1.5151515151515152e-06, |
|
"loss": 0.7021, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.018161180476730987, |
|
"grad_norm": 2.934334498283987, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 0.6857, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.021188043889519486, |
|
"grad_norm": 2.866608314088944, |
|
"learning_rate": 2.1212121212121216e-06, |
|
"loss": 0.6767, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.024214907302307985, |
|
"grad_norm": 2.4403529943238293, |
|
"learning_rate": 2.4242424242424244e-06, |
|
"loss": 0.6347, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02724177071509648, |
|
"grad_norm": 1.8332788326742038, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 0.6467, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.03026863412788498, |
|
"grad_norm": 1.5617546121877062, |
|
"learning_rate": 3.0303030303030305e-06, |
|
"loss": 0.6099, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03329549754067348, |
|
"grad_norm": 1.5167320092903038, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 0.599, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.036322360953461974, |
|
"grad_norm": 1.7598970825240179, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 0.5806, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03934922436625047, |
|
"grad_norm": 2.6229679325523265, |
|
"learning_rate": 3.93939393939394e-06, |
|
"loss": 0.5883, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.04237608777903897, |
|
"grad_norm": 2.408407684623741, |
|
"learning_rate": 4.242424242424243e-06, |
|
"loss": 0.581, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.04540295119182747, |
|
"grad_norm": 1.7237597128728994, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 0.5623, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04842981460461597, |
|
"grad_norm": 1.5962159137756355, |
|
"learning_rate": 4.848484848484849e-06, |
|
"loss": 0.5778, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.051456678017404466, |
|
"grad_norm": 1.4352898602460271, |
|
"learning_rate": 5.151515151515152e-06, |
|
"loss": 0.5623, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.05448354143019296, |
|
"grad_norm": 1.5679002346292847, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 0.5669, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.057510404842981463, |
|
"grad_norm": 1.4158994227499913, |
|
"learning_rate": 5.7575757575757586e-06, |
|
"loss": 0.5489, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.06053726825576996, |
|
"grad_norm": 1.3128550674563217, |
|
"learning_rate": 6.060606060606061e-06, |
|
"loss": 0.5653, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.06356413166855845, |
|
"grad_norm": 1.0813779193678437, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 0.5422, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.06659099508134696, |
|
"grad_norm": 1.2807372186003796, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 0.5471, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.06961785849413545, |
|
"grad_norm": 1.0783194592627532, |
|
"learning_rate": 6.969696969696971e-06, |
|
"loss": 0.5265, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.07264472190692395, |
|
"grad_norm": 1.0032227177910558, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 0.5219, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.07567158531971245, |
|
"grad_norm": 1.0332709727731562, |
|
"learning_rate": 7.5757575757575764e-06, |
|
"loss": 0.5041, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.07869844873250094, |
|
"grad_norm": 0.9921372239184776, |
|
"learning_rate": 7.87878787878788e-06, |
|
"loss": 0.5312, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.08172531214528944, |
|
"grad_norm": 0.9202391443585783, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 0.49, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.08475217555807794, |
|
"grad_norm": 1.0333497686578816, |
|
"learning_rate": 8.484848484848486e-06, |
|
"loss": 0.5199, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.08777903897086645, |
|
"grad_norm": 1.0532454416938568, |
|
"learning_rate": 8.787878787878788e-06, |
|
"loss": 0.4935, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.09080590238365494, |
|
"grad_norm": 0.9086034032201422, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 0.5088, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09383276579644344, |
|
"grad_norm": 0.9785565294040836, |
|
"learning_rate": 9.393939393939396e-06, |
|
"loss": 0.5027, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.09685962920923194, |
|
"grad_norm": 0.9966577665909482, |
|
"learning_rate": 9.696969696969698e-06, |
|
"loss": 0.4996, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.09988649262202043, |
|
"grad_norm": 0.8707379713464868, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4797, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.10291335603480893, |
|
"grad_norm": 1.0628137571935732, |
|
"learning_rate": 1.0303030303030304e-05, |
|
"loss": 0.5101, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.10594021944759743, |
|
"grad_norm": 0.9465771709574616, |
|
"learning_rate": 1.0606060606060606e-05, |
|
"loss": 0.4964, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.10896708286038592, |
|
"grad_norm": 0.9176273337339736, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 0.5291, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.11199394627317442, |
|
"grad_norm": 0.9945596905937605, |
|
"learning_rate": 1.1212121212121212e-05, |
|
"loss": 0.5095, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.11502080968596293, |
|
"grad_norm": 1.032380349741403, |
|
"learning_rate": 1.1515151515151517e-05, |
|
"loss": 0.4815, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.11804767309875142, |
|
"grad_norm": 1.0278944269450714, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 0.5202, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.12107453651153992, |
|
"grad_norm": 1.0241610796947704, |
|
"learning_rate": 1.2121212121212122e-05, |
|
"loss": 0.4742, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.12410139992432842, |
|
"grad_norm": 0.8854584226272779, |
|
"learning_rate": 1.2424242424242425e-05, |
|
"loss": 0.4706, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1271282633371169, |
|
"grad_norm": 1.0951961852783316, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 0.4699, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1301551267499054, |
|
"grad_norm": 0.9627861588113897, |
|
"learning_rate": 1.3030303030303032e-05, |
|
"loss": 0.4814, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.13318199016269391, |
|
"grad_norm": 0.9385490139801577, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 0.4905, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1362088535754824, |
|
"grad_norm": 1.1062822652462938, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 0.4737, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1392357169882709, |
|
"grad_norm": 0.9813474210659214, |
|
"learning_rate": 1.3939393939393942e-05, |
|
"loss": 0.4896, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.1422625804010594, |
|
"grad_norm": 0.9365391755397732, |
|
"learning_rate": 1.4242424242424245e-05, |
|
"loss": 0.4724, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.1452894438138479, |
|
"grad_norm": 1.0763635952081345, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 0.4802, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.14831630722663638, |
|
"grad_norm": 0.9256982774463752, |
|
"learning_rate": 1.484848484848485e-05, |
|
"loss": 0.4885, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.1513431706394249, |
|
"grad_norm": 1.0703549876071117, |
|
"learning_rate": 1.5151515151515153e-05, |
|
"loss": 0.4794, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1543700340522134, |
|
"grad_norm": 0.983257527471304, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 0.5145, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.15739689746500188, |
|
"grad_norm": 0.8475889379784336, |
|
"learning_rate": 1.575757575757576e-05, |
|
"loss": 0.4703, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1604237608777904, |
|
"grad_norm": 0.9669895573064388, |
|
"learning_rate": 1.606060606060606e-05, |
|
"loss": 0.4772, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.16345062429057888, |
|
"grad_norm": 1.0001054436546986, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 0.4742, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.16647748770336737, |
|
"grad_norm": 0.7999113559003902, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 0.4688, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1695043511161559, |
|
"grad_norm": 1.149282988973501, |
|
"learning_rate": 1.6969696969696972e-05, |
|
"loss": 0.498, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.17253121452894438, |
|
"grad_norm": 1.0625452606931989, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 0.5031, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.1755580779417329, |
|
"grad_norm": 0.9229543813240766, |
|
"learning_rate": 1.7575757575757576e-05, |
|
"loss": 0.5142, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.17858494135452138, |
|
"grad_norm": 1.156622595340011, |
|
"learning_rate": 1.787878787878788e-05, |
|
"loss": 0.504, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.18161180476730987, |
|
"grad_norm": 1.0884187527350624, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 0.4735, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.1846386681800984, |
|
"grad_norm": 1.0199149392813884, |
|
"learning_rate": 1.8484848484848487e-05, |
|
"loss": 0.4901, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.18766553159288688, |
|
"grad_norm": 1.0745417894556961, |
|
"learning_rate": 1.8787878787878792e-05, |
|
"loss": 0.4955, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.19069239500567536, |
|
"grad_norm": 0.9332393388388748, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 0.4836, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.19371925841846388, |
|
"grad_norm": 1.050752406108598, |
|
"learning_rate": 1.9393939393939395e-05, |
|
"loss": 0.4759, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.19674612183125237, |
|
"grad_norm": 1.023771396703429, |
|
"learning_rate": 1.96969696969697e-05, |
|
"loss": 0.462, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.19977298524404086, |
|
"grad_norm": 0.9065089380115087, |
|
"learning_rate": 2e-05, |
|
"loss": 0.4593, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.20279984865682937, |
|
"grad_norm": 0.923973350966011, |
|
"learning_rate": 1.9999860139251737e-05, |
|
"loss": 0.468, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.20582671206961786, |
|
"grad_norm": 0.9924198679975271, |
|
"learning_rate": 1.9999440560919153e-05, |
|
"loss": 0.4957, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.20885357548240635, |
|
"grad_norm": 0.9389605372327295, |
|
"learning_rate": 1.9998741276738753e-05, |
|
"loss": 0.4817, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.21188043889519487, |
|
"grad_norm": 1.1117771082790315, |
|
"learning_rate": 1.999776230627102e-05, |
|
"loss": 0.4808, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21490730230798336, |
|
"grad_norm": 0.9563452575157496, |
|
"learning_rate": 1.9996503676899863e-05, |
|
"loss": 0.4938, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.21793416572077184, |
|
"grad_norm": 0.9451663840168154, |
|
"learning_rate": 1.9994965423831853e-05, |
|
"loss": 0.4772, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.22096102913356036, |
|
"grad_norm": 0.9047547020802529, |
|
"learning_rate": 1.9993147590095232e-05, |
|
"loss": 0.4851, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.22398789254634885, |
|
"grad_norm": 0.8284760798915037, |
|
"learning_rate": 1.999105022653872e-05, |
|
"loss": 0.4494, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.22701475595913734, |
|
"grad_norm": 0.9300554587133534, |
|
"learning_rate": 1.9988673391830082e-05, |
|
"loss": 0.4813, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.23004161937192585, |
|
"grad_norm": 0.851077051033196, |
|
"learning_rate": 1.9986017152454497e-05, |
|
"loss": 0.4765, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.23306848278471434, |
|
"grad_norm": 0.9203623212068904, |
|
"learning_rate": 1.9983081582712684e-05, |
|
"loss": 0.4757, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.23609534619750283, |
|
"grad_norm": 0.8243584901013207, |
|
"learning_rate": 1.9979866764718846e-05, |
|
"loss": 0.4507, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.23912220961029135, |
|
"grad_norm": 0.8650945763575749, |
|
"learning_rate": 1.997637278839835e-05, |
|
"loss": 0.4857, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.24214907302307984, |
|
"grad_norm": 0.887881975677164, |
|
"learning_rate": 1.9972599751485225e-05, |
|
"loss": 0.4725, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.24517593643586832, |
|
"grad_norm": 0.8471467906278604, |
|
"learning_rate": 1.9968547759519426e-05, |
|
"loss": 0.4753, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.24820279984865684, |
|
"grad_norm": 0.9805260776906332, |
|
"learning_rate": 1.9964216925843876e-05, |
|
"loss": 0.5065, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.2512296632614453, |
|
"grad_norm": 0.8122093257342329, |
|
"learning_rate": 1.9959607371601303e-05, |
|
"loss": 0.4492, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2542565266742338, |
|
"grad_norm": 0.8683504982182351, |
|
"learning_rate": 1.9954719225730847e-05, |
|
"loss": 0.473, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.25728339008702233, |
|
"grad_norm": 0.8284590042984584, |
|
"learning_rate": 1.994955262496446e-05, |
|
"loss": 0.4921, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2603102534998108, |
|
"grad_norm": 0.9224365803353558, |
|
"learning_rate": 1.9944107713823068e-05, |
|
"loss": 0.4759, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2633371169125993, |
|
"grad_norm": 0.8531131456474766, |
|
"learning_rate": 1.9938384644612542e-05, |
|
"loss": 0.4769, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.26636398032538783, |
|
"grad_norm": 0.9635314304942465, |
|
"learning_rate": 1.9932383577419432e-05, |
|
"loss": 0.4823, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2693908437381763, |
|
"grad_norm": 0.8990933560390352, |
|
"learning_rate": 1.9926104680106484e-05, |
|
"loss": 0.4782, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.2724177071509648, |
|
"grad_norm": 0.8778190589914089, |
|
"learning_rate": 1.9919548128307954e-05, |
|
"loss": 0.4792, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.2754445705637533, |
|
"grad_norm": 0.8834210153521557, |
|
"learning_rate": 1.9912714105424694e-05, |
|
"loss": 0.458, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2784714339765418, |
|
"grad_norm": 0.8875575911429685, |
|
"learning_rate": 1.990560280261901e-05, |
|
"loss": 0.4931, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2814982973893303, |
|
"grad_norm": 0.817480238794449, |
|
"learning_rate": 1.989821441880933e-05, |
|
"loss": 0.441, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.2845251608021188, |
|
"grad_norm": 0.8502460856012417, |
|
"learning_rate": 1.9890549160664633e-05, |
|
"loss": 0.4649, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2875520242149073, |
|
"grad_norm": 0.8896227133136069, |
|
"learning_rate": 1.9882607242598663e-05, |
|
"loss": 0.4673, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.2905788876276958, |
|
"grad_norm": 0.8480998407083018, |
|
"learning_rate": 1.9874388886763944e-05, |
|
"loss": 0.4561, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2936057510404843, |
|
"grad_norm": 0.8542143691157155, |
|
"learning_rate": 1.9865894323045558e-05, |
|
"loss": 0.4849, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.29663261445327277, |
|
"grad_norm": 1.0291798239076009, |
|
"learning_rate": 1.9857123789054707e-05, |
|
"loss": 0.5023, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2996594778660613, |
|
"grad_norm": 0.7966280932477555, |
|
"learning_rate": 1.9848077530122083e-05, |
|
"loss": 0.4725, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3026863412788498, |
|
"grad_norm": 0.9201677500224749, |
|
"learning_rate": 1.9838755799290993e-05, |
|
"loss": 0.4624, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.30571320469163826, |
|
"grad_norm": 1.0217681666195215, |
|
"learning_rate": 1.9829158857310288e-05, |
|
"loss": 0.4817, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.3087400681044268, |
|
"grad_norm": 0.911872517431247, |
|
"learning_rate": 1.9819286972627066e-05, |
|
"loss": 0.4805, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.3117669315172153, |
|
"grad_norm": 1.0576146095810577, |
|
"learning_rate": 1.9809140421379168e-05, |
|
"loss": 0.4978, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.31479379493000376, |
|
"grad_norm": 0.855375508857259, |
|
"learning_rate": 1.979871948738743e-05, |
|
"loss": 0.4597, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.3178206583427923, |
|
"grad_norm": 1.026705740455224, |
|
"learning_rate": 1.978802446214779e-05, |
|
"loss": 0.4739, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.3208475217555808, |
|
"grad_norm": 0.8954225722975215, |
|
"learning_rate": 1.9777055644823087e-05, |
|
"loss": 0.4534, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.32387438516836925, |
|
"grad_norm": 1.3370208300122295, |
|
"learning_rate": 1.9765813342234726e-05, |
|
"loss": 0.4837, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.32690124858115777, |
|
"grad_norm": 0.9737632463915928, |
|
"learning_rate": 1.9754297868854075e-05, |
|
"loss": 0.4858, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3299281119939463, |
|
"grad_norm": 0.978912223200775, |
|
"learning_rate": 1.9742509546793673e-05, |
|
"loss": 0.4435, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.33295497540673474, |
|
"grad_norm": 0.7240104142164331, |
|
"learning_rate": 1.973044870579824e-05, |
|
"loss": 0.4677, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.33598183881952326, |
|
"grad_norm": 1.041914878497896, |
|
"learning_rate": 1.9718115683235418e-05, |
|
"loss": 0.4662, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.3390087022323118, |
|
"grad_norm": 0.8060191700540364, |
|
"learning_rate": 1.970551082408636e-05, |
|
"loss": 0.4545, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.3420355656451003, |
|
"grad_norm": 0.8296978732089841, |
|
"learning_rate": 1.969263448093608e-05, |
|
"loss": 0.4543, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.34506242905788875, |
|
"grad_norm": 0.9847399992196527, |
|
"learning_rate": 1.9679487013963566e-05, |
|
"loss": 0.4768, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.34808929247067727, |
|
"grad_norm": 0.8173722191430757, |
|
"learning_rate": 1.9666068790931733e-05, |
|
"loss": 0.4771, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.3511161558834658, |
|
"grad_norm": 0.8460671859353361, |
|
"learning_rate": 1.9652380187177128e-05, |
|
"loss": 0.468, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.35414301929625425, |
|
"grad_norm": 0.8707438691166606, |
|
"learning_rate": 1.9638421585599422e-05, |
|
"loss": 0.475, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.35716988270904276, |
|
"grad_norm": 0.8409536993835216, |
|
"learning_rate": 1.9624193376650708e-05, |
|
"loss": 0.4687, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3601967461218313, |
|
"grad_norm": 0.8495325387261942, |
|
"learning_rate": 1.960969595832457e-05, |
|
"loss": 0.4543, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.36322360953461974, |
|
"grad_norm": 0.7963565437790904, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.4784, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.36625047294740826, |
|
"grad_norm": 0.7589896085584487, |
|
"learning_rate": 1.957989512315489e-05, |
|
"loss": 0.4382, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.3692773363601968, |
|
"grad_norm": 0.9050315045169787, |
|
"learning_rate": 1.956459253990476e-05, |
|
"loss": 0.4595, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.37230419977298523, |
|
"grad_norm": 0.8196049322105241, |
|
"learning_rate": 1.9549022414440738e-05, |
|
"loss": 0.465, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.37533106318577375, |
|
"grad_norm": 0.9617614962964403, |
|
"learning_rate": 1.9533185182292705e-05, |
|
"loss": 0.4703, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.37835792659856227, |
|
"grad_norm": 0.830844411308809, |
|
"learning_rate": 1.9517081286462082e-05, |
|
"loss": 0.4697, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3813847900113507, |
|
"grad_norm": 0.9039111454739887, |
|
"learning_rate": 1.9500711177409456e-05, |
|
"loss": 0.472, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.38441165342413924, |
|
"grad_norm": 0.9069164562624633, |
|
"learning_rate": 1.9484075313041968e-05, |
|
"loss": 0.477, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.38743851683692776, |
|
"grad_norm": 0.7579005169820392, |
|
"learning_rate": 1.9467174158700507e-05, |
|
"loss": 0.4426, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3904653802497162, |
|
"grad_norm": 0.944570097212392, |
|
"learning_rate": 1.9450008187146685e-05, |
|
"loss": 0.4328, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.39349224366250474, |
|
"grad_norm": 0.795107578241477, |
|
"learning_rate": 1.9432577878549635e-05, |
|
"loss": 0.473, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.39651910707529325, |
|
"grad_norm": 0.8489326390048557, |
|
"learning_rate": 1.9414883720472557e-05, |
|
"loss": 0.4656, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3995459704880817, |
|
"grad_norm": 0.91472450332288, |
|
"learning_rate": 1.9396926207859085e-05, |
|
"loss": 0.4767, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.40257283390087023, |
|
"grad_norm": 0.7607588999053772, |
|
"learning_rate": 1.937870584301945e-05, |
|
"loss": 0.4592, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.40559969731365875, |
|
"grad_norm": 0.9303390902785805, |
|
"learning_rate": 1.9360223135616423e-05, |
|
"loss": 0.4686, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.4086265607264472, |
|
"grad_norm": 0.8723947780547209, |
|
"learning_rate": 1.9341478602651068e-05, |
|
"loss": 0.4738, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4116534241392357, |
|
"grad_norm": 0.8119372278812987, |
|
"learning_rate": 1.932247276844826e-05, |
|
"loss": 0.4366, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.41468028755202424, |
|
"grad_norm": 0.9632084095918596, |
|
"learning_rate": 1.9303206164642037e-05, |
|
"loss": 0.4773, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.4177071509648127, |
|
"grad_norm": 0.8146873436571448, |
|
"learning_rate": 1.9283679330160726e-05, |
|
"loss": 0.4786, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4207340143776012, |
|
"grad_norm": 0.8948680327977362, |
|
"learning_rate": 1.9263892811211865e-05, |
|
"loss": 0.4659, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.42376087779038973, |
|
"grad_norm": 0.7895746277278684, |
|
"learning_rate": 1.9243847161266924e-05, |
|
"loss": 0.4755, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.4267877412031782, |
|
"grad_norm": 0.8162872799786652, |
|
"learning_rate": 1.9223542941045817e-05, |
|
"loss": 0.4822, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.4298146046159667, |
|
"grad_norm": 0.8400643440549476, |
|
"learning_rate": 1.920298071850123e-05, |
|
"loss": 0.4514, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.4328414680287552, |
|
"grad_norm": 0.7270907531393834, |
|
"learning_rate": 1.9182161068802742e-05, |
|
"loss": 0.429, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.4358683314415437, |
|
"grad_norm": 0.9107839786821103, |
|
"learning_rate": 1.9161084574320696e-05, |
|
"loss": 0.4581, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.4388951948543322, |
|
"grad_norm": 0.847601776657153, |
|
"learning_rate": 1.913975182460996e-05, |
|
"loss": 0.4529, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.4419220582671207, |
|
"grad_norm": 0.769812391053037, |
|
"learning_rate": 1.9118163416393392e-05, |
|
"loss": 0.4431, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.4449489216799092, |
|
"grad_norm": 0.8626951405883634, |
|
"learning_rate": 1.9096319953545186e-05, |
|
"loss": 0.465, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.4479757850926977, |
|
"grad_norm": 0.783043695016772, |
|
"learning_rate": 1.9074222047073945e-05, |
|
"loss": 0.4769, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.4510026485054862, |
|
"grad_norm": 0.8489788988531767, |
|
"learning_rate": 1.9051870315105626e-05, |
|
"loss": 0.478, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.4540295119182747, |
|
"grad_norm": 0.8469877885706599, |
|
"learning_rate": 1.9029265382866216e-05, |
|
"loss": 0.4751, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.4570563753310632, |
|
"grad_norm": 0.8042210613301104, |
|
"learning_rate": 1.9006407882664256e-05, |
|
"loss": 0.4455, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.4600832387438517, |
|
"grad_norm": 0.791663045651865, |
|
"learning_rate": 1.8983298453873172e-05, |
|
"loss": 0.465, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.46311010215664017, |
|
"grad_norm": 0.7954685891487775, |
|
"learning_rate": 1.895993774291336e-05, |
|
"loss": 0.4617, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.4661369655694287, |
|
"grad_norm": 0.7254541733969726, |
|
"learning_rate": 1.8936326403234125e-05, |
|
"loss": 0.441, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.4691638289822172, |
|
"grad_norm": 0.8431316692777195, |
|
"learning_rate": 1.891246509529539e-05, |
|
"loss": 0.4803, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.47219069239500566, |
|
"grad_norm": 0.8357936098804112, |
|
"learning_rate": 1.8888354486549238e-05, |
|
"loss": 0.4727, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4752175558077942, |
|
"grad_norm": 0.7903116458017203, |
|
"learning_rate": 1.886399525142122e-05, |
|
"loss": 0.462, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4782444192205827, |
|
"grad_norm": 0.7441824699825185, |
|
"learning_rate": 1.8839388071291506e-05, |
|
"loss": 0.4462, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.48127128263337116, |
|
"grad_norm": 0.9372383118691926, |
|
"learning_rate": 1.881453363447582e-05, |
|
"loss": 0.4673, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.48429814604615967, |
|
"grad_norm": 0.7812171757508565, |
|
"learning_rate": 1.8789432636206197e-05, |
|
"loss": 0.4325, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4873250094589482, |
|
"grad_norm": 0.9937389472612614, |
|
"learning_rate": 1.8764085778611507e-05, |
|
"loss": 0.4696, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.49035187287173665, |
|
"grad_norm": 0.8251836174614435, |
|
"learning_rate": 1.873849377069785e-05, |
|
"loss": 0.4388, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.49337873628452517, |
|
"grad_norm": 0.9233474929813643, |
|
"learning_rate": 1.87126573283287e-05, |
|
"loss": 0.4655, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.4964055996973137, |
|
"grad_norm": 0.8050813918997429, |
|
"learning_rate": 1.8686577174204887e-05, |
|
"loss": 0.4761, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.49943246311010214, |
|
"grad_norm": 0.8236133705615286, |
|
"learning_rate": 1.866025403784439e-05, |
|
"loss": 0.4457, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5024593265228906, |
|
"grad_norm": 0.9391237541481741, |
|
"learning_rate": 1.863368865556191e-05, |
|
"loss": 0.4542, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.5054861899356792, |
|
"grad_norm": 0.7825454424396067, |
|
"learning_rate": 1.8606881770448305e-05, |
|
"loss": 0.437, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.5085130533484676, |
|
"grad_norm": 0.8954156583766139, |
|
"learning_rate": 1.8579834132349773e-05, |
|
"loss": 0.4412, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5115399167612561, |
|
"grad_norm": 0.7175213351214396, |
|
"learning_rate": 1.8552546497846893e-05, |
|
"loss": 0.432, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.5145667801740447, |
|
"grad_norm": 0.84206216164572, |
|
"learning_rate": 1.8525019630233463e-05, |
|
"loss": 0.4769, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.5175936435868331, |
|
"grad_norm": 0.7897188520422634, |
|
"learning_rate": 1.8497254299495147e-05, |
|
"loss": 0.4511, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.5206205069996216, |
|
"grad_norm": 0.763821358658057, |
|
"learning_rate": 1.8469251282287925e-05, |
|
"loss": 0.4682, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.5236473704124102, |
|
"grad_norm": 0.7995360092301088, |
|
"learning_rate": 1.8441011361916387e-05, |
|
"loss": 0.4312, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.5266742338251986, |
|
"grad_norm": 0.7465526031827853, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.4468, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.5297010972379871, |
|
"grad_norm": 0.7987158337055593, |
|
"learning_rate": 1.8383823978010077e-05, |
|
"loss": 0.4608, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.5327279606507757, |
|
"grad_norm": 0.764860921458296, |
|
"learning_rate": 1.8354878114129368e-05, |
|
"loss": 0.4221, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.5357548240635641, |
|
"grad_norm": 0.7667664561763302, |
|
"learning_rate": 1.8325698546347714e-05, |
|
"loss": 0.4653, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.5387816874763526, |
|
"grad_norm": 0.7325767793030226, |
|
"learning_rate": 1.8296286090880362e-05, |
|
"loss": 0.449, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.5418085508891411, |
|
"grad_norm": 0.7419675848302357, |
|
"learning_rate": 1.8266641570456915e-05, |
|
"loss": 0.4434, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.5448354143019296, |
|
"grad_norm": 0.7401077249570182, |
|
"learning_rate": 1.8236765814298328e-05, |
|
"loss": 0.4471, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.5478622777147181, |
|
"grad_norm": 0.7333461952207433, |
|
"learning_rate": 1.820665965809373e-05, |
|
"loss": 0.4444, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.5508891411275066, |
|
"grad_norm": 0.7849887257018009, |
|
"learning_rate": 1.8176323943977034e-05, |
|
"loss": 0.4522, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.5539160045402951, |
|
"grad_norm": 0.7834860623045447, |
|
"learning_rate": 1.814575952050336e-05, |
|
"loss": 0.4506, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.5569428679530836, |
|
"grad_norm": 0.8104704668704734, |
|
"learning_rate": 1.8114967242625342e-05, |
|
"loss": 0.4564, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.5599697313658721, |
|
"grad_norm": 0.760923153880631, |
|
"learning_rate": 1.808394797166919e-05, |
|
"loss": 0.4499, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.5629965947786606, |
|
"grad_norm": 0.7236329330956655, |
|
"learning_rate": 1.8052702575310588e-05, |
|
"loss": 0.4201, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.5660234581914491, |
|
"grad_norm": 0.7774653932201646, |
|
"learning_rate": 1.802123192755044e-05, |
|
"loss": 0.4558, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.5690503216042376, |
|
"grad_norm": 0.7272247130384513, |
|
"learning_rate": 1.7989536908690413e-05, |
|
"loss": 0.4278, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.5720771850170261, |
|
"grad_norm": 0.7738829207784225, |
|
"learning_rate": 1.7957618405308323e-05, |
|
"loss": 0.4547, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.5751040484298146, |
|
"grad_norm": 0.7091385214064434, |
|
"learning_rate": 1.792547731023332e-05, |
|
"loss": 0.4215, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.5781309118426031, |
|
"grad_norm": 0.7431619094564793, |
|
"learning_rate": 1.789311452252092e-05, |
|
"loss": 0.4189, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.5811577752553916, |
|
"grad_norm": 0.73369199568987, |
|
"learning_rate": 1.7860530947427878e-05, |
|
"loss": 0.436, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.58418463866818, |
|
"grad_norm": 0.8201929566237153, |
|
"learning_rate": 1.782772749638682e-05, |
|
"loss": 0.4479, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5872115020809686, |
|
"grad_norm": 0.7685426255998636, |
|
"learning_rate": 1.779470508698079e-05, |
|
"loss": 0.4419, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5902383654937571, |
|
"grad_norm": 0.7226115165102761, |
|
"learning_rate": 1.776146464291757e-05, |
|
"loss": 0.4463, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5932652289065455, |
|
"grad_norm": 0.7855677815812193, |
|
"learning_rate": 1.772800709400383e-05, |
|
"loss": 0.4584, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.5962920923193341, |
|
"grad_norm": 0.6924557125908215, |
|
"learning_rate": 1.7694333376119144e-05, |
|
"loss": 0.4225, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.5993189557321226, |
|
"grad_norm": 0.8461832504306739, |
|
"learning_rate": 1.766044443118978e-05, |
|
"loss": 0.4447, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.602345819144911, |
|
"grad_norm": 0.7339685550341394, |
|
"learning_rate": 1.762634120716238e-05, |
|
"loss": 0.4284, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.6053726825576996, |
|
"grad_norm": 0.7470257901089572, |
|
"learning_rate": 1.7592024657977432e-05, |
|
"loss": 0.4532, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.6083995459704881, |
|
"grad_norm": 0.7749940770566907, |
|
"learning_rate": 1.7557495743542586e-05, |
|
"loss": 0.4408, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.6114264093832765, |
|
"grad_norm": 0.7549019594913166, |
|
"learning_rate": 1.75227554297058e-05, |
|
"loss": 0.4334, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.6144532727960651, |
|
"grad_norm": 0.6966347900391833, |
|
"learning_rate": 1.7487804688228327e-05, |
|
"loss": 0.4493, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.6174801362088536, |
|
"grad_norm": 0.9386422497499951, |
|
"learning_rate": 1.745264449675755e-05, |
|
"loss": 0.4511, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.620506999621642, |
|
"grad_norm": 0.7380768067585627, |
|
"learning_rate": 1.7417275838799596e-05, |
|
"loss": 0.444, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.6235338630344306, |
|
"grad_norm": 0.9470079720341071, |
|
"learning_rate": 1.7381699703691866e-05, |
|
"loss": 0.4548, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.626560726447219, |
|
"grad_norm": 0.7811030430085432, |
|
"learning_rate": 1.734591708657533e-05, |
|
"loss": 0.4246, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.6295875898600075, |
|
"grad_norm": 1.0093808233334745, |
|
"learning_rate": 1.730992898836672e-05, |
|
"loss": 0.4453, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.6326144532727961, |
|
"grad_norm": 0.9163250111999918, |
|
"learning_rate": 1.7273736415730488e-05, |
|
"loss": 0.4257, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.6356413166855845, |
|
"grad_norm": 0.8893326579766935, |
|
"learning_rate": 1.72373403810507e-05, |
|
"loss": 0.4587, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.638668180098373, |
|
"grad_norm": 1.001399915806959, |
|
"learning_rate": 1.720074190240269e-05, |
|
"loss": 0.4331, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.6416950435111616, |
|
"grad_norm": 0.8104952099470419, |
|
"learning_rate": 1.7163942003524574e-05, |
|
"loss": 0.4504, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.64472190692395, |
|
"grad_norm": 0.9331740632550443, |
|
"learning_rate": 1.7126941713788633e-05, |
|
"loss": 0.476, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.6477487703367385, |
|
"grad_norm": 0.7593251250993972, |
|
"learning_rate": 1.70897420681725e-05, |
|
"loss": 0.4474, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.6507756337495271, |
|
"grad_norm": 0.8579831900494261, |
|
"learning_rate": 1.7052344107230244e-05, |
|
"loss": 0.4494, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.6538024971623155, |
|
"grad_norm": 0.796881274592069, |
|
"learning_rate": 1.7014748877063212e-05, |
|
"loss": 0.4513, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.656829360575104, |
|
"grad_norm": 0.7672911031417039, |
|
"learning_rate": 1.697695742929082e-05, |
|
"loss": 0.4424, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.6598562239878926, |
|
"grad_norm": 0.8043885195941772, |
|
"learning_rate": 1.693897082102109e-05, |
|
"loss": 0.4391, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.662883087400681, |
|
"grad_norm": 0.6749475045971177, |
|
"learning_rate": 1.6900790114821122e-05, |
|
"loss": 0.4322, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.6659099508134695, |
|
"grad_norm": 0.7469561804033368, |
|
"learning_rate": 1.686241637868734e-05, |
|
"loss": 0.4347, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.6689368142262581, |
|
"grad_norm": 0.8095125248822435, |
|
"learning_rate": 1.682385068601563e-05, |
|
"loss": 0.4411, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.6719636776390465, |
|
"grad_norm": 0.8015242102927481, |
|
"learning_rate": 1.6785094115571323e-05, |
|
"loss": 0.4392, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.6749905410518351, |
|
"grad_norm": 0.7425720141360357, |
|
"learning_rate": 1.674614775145901e-05, |
|
"loss": 0.4405, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.6780174044646236, |
|
"grad_norm": 0.7645372380328769, |
|
"learning_rate": 1.670701268309221e-05, |
|
"loss": 0.4421, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.681044267877412, |
|
"grad_norm": 0.7532151414813228, |
|
"learning_rate": 1.666769000516292e-05, |
|
"loss": 0.438, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.6840711312902006, |
|
"grad_norm": 0.784186608237733, |
|
"learning_rate": 1.6628180817610963e-05, |
|
"loss": 0.4283, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.687097994702989, |
|
"grad_norm": 0.7118227720403716, |
|
"learning_rate": 1.658848622559325e-05, |
|
"loss": 0.429, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.6901248581157775, |
|
"grad_norm": 0.67381378377153, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4397, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.6931517215285661, |
|
"grad_norm": 0.7853842479092966, |
|
"learning_rate": 1.6508545274687936e-05, |
|
"loss": 0.4634, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.6961785849413545, |
|
"grad_norm": 0.7272349960705609, |
|
"learning_rate": 1.6468301151920576e-05, |
|
"loss": 0.4426, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.699205448354143, |
|
"grad_norm": 0.7941436682938671, |
|
"learning_rate": 1.6427876096865394e-05, |
|
"loss": 0.4465, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.7022323117669316, |
|
"grad_norm": 0.6988363957813541, |
|
"learning_rate": 1.6387271240298082e-05, |
|
"loss": 0.4419, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.70525917517972, |
|
"grad_norm": 0.8034462401359156, |
|
"learning_rate": 1.6346487718023762e-05, |
|
"loss": 0.4583, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.7082860385925085, |
|
"grad_norm": 0.817521700827119, |
|
"learning_rate": 1.6305526670845225e-05, |
|
"loss": 0.4443, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.7113129020052971, |
|
"grad_norm": 0.8294143548271078, |
|
"learning_rate": 1.6264389244531015e-05, |
|
"loss": 0.454, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.7143397654180855, |
|
"grad_norm": 0.7669233723268118, |
|
"learning_rate": 1.6223076589783368e-05, |
|
"loss": 0.4402, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.717366628830874, |
|
"grad_norm": 0.8130202800441148, |
|
"learning_rate": 1.6181589862206053e-05, |
|
"loss": 0.443, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.7203934922436626, |
|
"grad_norm": 0.8158320706396698, |
|
"learning_rate": 1.613993022227202e-05, |
|
"loss": 0.4849, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.723420355656451, |
|
"grad_norm": 0.7644446627590479, |
|
"learning_rate": 1.6098098835290955e-05, |
|
"loss": 0.4204, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.7264472190692395, |
|
"grad_norm": 0.8042813069950406, |
|
"learning_rate": 1.6056096871376667e-05, |
|
"loss": 0.423, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.729474082482028, |
|
"grad_norm": 0.7130307888058273, |
|
"learning_rate": 1.6013925505414386e-05, |
|
"loss": 0.4375, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.7325009458948165, |
|
"grad_norm": 0.7290852344125401, |
|
"learning_rate": 1.5971585917027864e-05, |
|
"loss": 0.4336, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.735527809307605, |
|
"grad_norm": 0.717438110221978, |
|
"learning_rate": 1.5929079290546408e-05, |
|
"loss": 0.4219, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.7385546727203935, |
|
"grad_norm": 0.6984033560066402, |
|
"learning_rate": 1.5886406814971728e-05, |
|
"loss": 0.4281, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.741581536133182, |
|
"grad_norm": 0.7078083108194928, |
|
"learning_rate": 1.584356968394471e-05, |
|
"loss": 0.427, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.7446083995459705, |
|
"grad_norm": 0.8127899724160252, |
|
"learning_rate": 1.5800569095711983e-05, |
|
"loss": 0.4724, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.747635262958759, |
|
"grad_norm": 0.7518761048625047, |
|
"learning_rate": 1.575740625309244e-05, |
|
"loss": 0.4719, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.7506621263715475, |
|
"grad_norm": 0.7680322297991256, |
|
"learning_rate": 1.5714082363443576e-05, |
|
"loss": 0.4352, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.753688989784336, |
|
"grad_norm": 0.7420045092746247, |
|
"learning_rate": 1.5670598638627707e-05, |
|
"loss": 0.4408, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.7567158531971245, |
|
"grad_norm": 0.7693133387370426, |
|
"learning_rate": 1.5626956294978103e-05, |
|
"loss": 0.4319, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.759742716609913, |
|
"grad_norm": 0.6769376384307038, |
|
"learning_rate": 1.5583156553264923e-05, |
|
"loss": 0.4216, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.7627695800227015, |
|
"grad_norm": 1.1156462927132476, |
|
"learning_rate": 1.5539200638661106e-05, |
|
"loss": 0.433, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.76579644343549, |
|
"grad_norm": 0.7212125381995664, |
|
"learning_rate": 1.5495089780708062e-05, |
|
"loss": 0.449, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.7688233068482785, |
|
"grad_norm": 0.699670277587977, |
|
"learning_rate": 1.5450825213281317e-05, |
|
"loss": 0.4474, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.771850170261067, |
|
"grad_norm": 0.7060895917288469, |
|
"learning_rate": 1.5406408174555978e-05, |
|
"loss": 0.4392, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.7748770336738555, |
|
"grad_norm": 0.6988865708478922, |
|
"learning_rate": 1.5361839906972095e-05, |
|
"loss": 0.4233, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.777903897086644, |
|
"grad_norm": 0.76513732300348, |
|
"learning_rate": 1.531712165719992e-05, |
|
"loss": 0.4289, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.7809307604994324, |
|
"grad_norm": 0.7843195298938961, |
|
"learning_rate": 1.5272254676105026e-05, |
|
"loss": 0.4265, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.783957623912221, |
|
"grad_norm": 0.8630947434883658, |
|
"learning_rate": 1.5227240218713326e-05, |
|
"loss": 0.4389, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.7869844873250095, |
|
"grad_norm": 0.766683485759987, |
|
"learning_rate": 1.5182079544175957e-05, |
|
"loss": 0.437, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7900113507377979, |
|
"grad_norm": 0.8248086969086892, |
|
"learning_rate": 1.5136773915734067e-05, |
|
"loss": 0.4538, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.7930382141505865, |
|
"grad_norm": 0.756958809019444, |
|
"learning_rate": 1.5091324600683472e-05, |
|
"loss": 0.407, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.796065077563375, |
|
"grad_norm": 0.7009008238346668, |
|
"learning_rate": 1.5045732870339213e-05, |
|
"loss": 0.3991, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.7990919409761634, |
|
"grad_norm": 0.7712741341076254, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.4311, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.802118804388952, |
|
"grad_norm": 0.7203632888434074, |
|
"learning_rate": 1.4954127268912525e-05, |
|
"loss": 0.434, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.8051456678017405, |
|
"grad_norm": 0.7571719661189444, |
|
"learning_rate": 1.4908115960235683e-05, |
|
"loss": 0.4514, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.8081725312145289, |
|
"grad_norm": 0.7557582398928391, |
|
"learning_rate": 1.4861967361004687e-05, |
|
"loss": 0.4462, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.8111993946273175, |
|
"grad_norm": 0.7617915518345832, |
|
"learning_rate": 1.4815682762095065e-05, |
|
"loss": 0.4188, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.814226258040106, |
|
"grad_norm": 0.7094186411041384, |
|
"learning_rate": 1.476926345818654e-05, |
|
"loss": 0.437, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.8172531214528944, |
|
"grad_norm": 0.7056592154873851, |
|
"learning_rate": 1.472271074772683e-05, |
|
"loss": 0.4259, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.820279984865683, |
|
"grad_norm": 0.7130911922867578, |
|
"learning_rate": 1.4676025932895315e-05, |
|
"loss": 0.4003, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.8233068482784714, |
|
"grad_norm": 0.7007673286419005, |
|
"learning_rate": 1.4629210319566626e-05, |
|
"loss": 0.4229, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.8263337116912599, |
|
"grad_norm": 0.716673274710857, |
|
"learning_rate": 1.4582265217274105e-05, |
|
"loss": 0.4268, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.8293605751040485, |
|
"grad_norm": 0.7277453055014509, |
|
"learning_rate": 1.4535191939173179e-05, |
|
"loss": 0.4228, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.8323874385168369, |
|
"grad_norm": 0.688032003571378, |
|
"learning_rate": 1.4487991802004625e-05, |
|
"loss": 0.4197, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.8354143019296254, |
|
"grad_norm": 0.6875700288253241, |
|
"learning_rate": 1.4440666126057743e-05, |
|
"loss": 0.4364, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.838441165342414, |
|
"grad_norm": 0.6630974536473169, |
|
"learning_rate": 1.4393216235133427e-05, |
|
"loss": 0.4242, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.8414680287552024, |
|
"grad_norm": 0.698763301476341, |
|
"learning_rate": 1.4345643456507126e-05, |
|
"loss": 0.4179, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.8444948921679909, |
|
"grad_norm": 0.7610228540995554, |
|
"learning_rate": 1.4297949120891718e-05, |
|
"loss": 0.4034, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.8475217555807795, |
|
"grad_norm": 0.8012295566004533, |
|
"learning_rate": 1.4250134562400301e-05, |
|
"loss": 0.436, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.8505486189935679, |
|
"grad_norm": 0.7643838942197712, |
|
"learning_rate": 1.4202201118508863e-05, |
|
"loss": 0.4254, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.8535754824063564, |
|
"grad_norm": 0.6833171323777872, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.429, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.856602345819145, |
|
"grad_norm": 0.9017845379122181, |
|
"learning_rate": 1.4105982941019751e-05, |
|
"loss": 0.4664, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.8596292092319334, |
|
"grad_norm": 0.7468591344957848, |
|
"learning_rate": 1.405770089885134e-05, |
|
"loss": 0.4392, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.8626560726447219, |
|
"grad_norm": 0.7612569283012177, |
|
"learning_rate": 1.4009305354066138e-05, |
|
"loss": 0.4281, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.8656829360575105, |
|
"grad_norm": 0.7439607167517932, |
|
"learning_rate": 1.396079766039157e-05, |
|
"loss": 0.4117, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.8687097994702989, |
|
"grad_norm": 0.6739506763467701, |
|
"learning_rate": 1.39121791746921e-05, |
|
"loss": 0.4368, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.8717366628830874, |
|
"grad_norm": 0.7603407386659166, |
|
"learning_rate": 1.3863451256931286e-05, |
|
"loss": 0.4053, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.874763526295876, |
|
"grad_norm": 0.7154917024348987, |
|
"learning_rate": 1.381461527013374e-05, |
|
"loss": 0.4237, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.8777903897086644, |
|
"grad_norm": 0.7256261893864302, |
|
"learning_rate": 1.3765672580346986e-05, |
|
"loss": 0.4284, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.8808172531214529, |
|
"grad_norm": 0.7662086448283414, |
|
"learning_rate": 1.3716624556603275e-05, |
|
"loss": 0.4483, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.8838441165342414, |
|
"grad_norm": 0.7254975858326916, |
|
"learning_rate": 1.3667472570881264e-05, |
|
"loss": 0.4232, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.8868709799470299, |
|
"grad_norm": 0.7369163057364929, |
|
"learning_rate": 1.361821799806765e-05, |
|
"loss": 0.4251, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.8898978433598184, |
|
"grad_norm": 0.7989498463159742, |
|
"learning_rate": 1.356886221591872e-05, |
|
"loss": 0.4204, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.8929247067726069, |
|
"grad_norm": 0.659748864267664, |
|
"learning_rate": 1.3519406605021797e-05, |
|
"loss": 0.409, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.8959515701853954, |
|
"grad_norm": 0.7201563876966998, |
|
"learning_rate": 1.3469852548756626e-05, |
|
"loss": 0.4121, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.8989784335981839, |
|
"grad_norm": 0.7705226143354177, |
|
"learning_rate": 1.342020143325669e-05, |
|
"loss": 0.4486, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.9020052970109724, |
|
"grad_norm": 0.7489836631418199, |
|
"learning_rate": 1.3370454647370418e-05, |
|
"loss": 0.431, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.9050321604237609, |
|
"grad_norm": 0.7328820517793598, |
|
"learning_rate": 1.3320613582622354e-05, |
|
"loss": 0.4271, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.9080590238365494, |
|
"grad_norm": 0.7203074420755232, |
|
"learning_rate": 1.3270679633174219e-05, |
|
"loss": 0.4291, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.9110858872493379, |
|
"grad_norm": 0.8349248566616958, |
|
"learning_rate": 1.3220654195785917e-05, |
|
"loss": 0.458, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.9141127506621264, |
|
"grad_norm": 0.702034470585398, |
|
"learning_rate": 1.3170538669776469e-05, |
|
"loss": 0.4415, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.9171396140749148, |
|
"grad_norm": 0.7771764513868211, |
|
"learning_rate": 1.3120334456984871e-05, |
|
"loss": 0.4257, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.9201664774877034, |
|
"grad_norm": 0.8498222992151726, |
|
"learning_rate": 1.3070042961730878e-05, |
|
"loss": 0.4168, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.9231933409004919, |
|
"grad_norm": 0.661741039345333, |
|
"learning_rate": 1.3019665590775717e-05, |
|
"loss": 0.4064, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.9262202043132803, |
|
"grad_norm": 0.8260039376941007, |
|
"learning_rate": 1.296920375328275e-05, |
|
"loss": 0.4274, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.9292470677260689, |
|
"grad_norm": 0.7926607444545334, |
|
"learning_rate": 1.2918658860778046e-05, |
|
"loss": 0.4344, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.9322739311388574, |
|
"grad_norm": 0.6946906041592793, |
|
"learning_rate": 1.2868032327110904e-05, |
|
"loss": 0.4043, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.9353007945516458, |
|
"grad_norm": 0.8036351389530777, |
|
"learning_rate": 1.2817325568414299e-05, |
|
"loss": 0.4336, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.9383276579644344, |
|
"grad_norm": 0.7478065935592791, |
|
"learning_rate": 1.2766540003065272e-05, |
|
"loss": 0.4106, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.9413545213772229, |
|
"grad_norm": 0.7835536942367415, |
|
"learning_rate": 1.2715677051645259e-05, |
|
"loss": 0.4352, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.9443813847900113, |
|
"grad_norm": 0.7313993187002241, |
|
"learning_rate": 1.266473813690035e-05, |
|
"loss": 0.4196, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.9474082482027999, |
|
"grad_norm": 0.7036261623809529, |
|
"learning_rate": 1.2613724683701491e-05, |
|
"loss": 0.4332, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.9504351116155884, |
|
"grad_norm": 0.7420565645495166, |
|
"learning_rate": 1.2562638119004627e-05, |
|
"loss": 0.4438, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.9534619750283768, |
|
"grad_norm": 0.7114923487390403, |
|
"learning_rate": 1.2511479871810792e-05, |
|
"loss": 0.4381, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.9564888384411654, |
|
"grad_norm": 0.6647754074842811, |
|
"learning_rate": 1.2460251373126136e-05, |
|
"loss": 0.4097, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.9595157018539539, |
|
"grad_norm": 0.6712613275967304, |
|
"learning_rate": 1.2408954055921884e-05, |
|
"loss": 0.4198, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.9625425652667423, |
|
"grad_norm": 0.6953847891205012, |
|
"learning_rate": 1.2357589355094275e-05, |
|
"loss": 0.4131, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.9655694286795309, |
|
"grad_norm": 0.7136121332091482, |
|
"learning_rate": 1.2306158707424402e-05, |
|
"loss": 0.4298, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.9685962920923193, |
|
"grad_norm": 0.6692840758794919, |
|
"learning_rate": 1.2254663551538047e-05, |
|
"loss": 0.4103, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.9716231555051078, |
|
"grad_norm": 0.7033017903632124, |
|
"learning_rate": 1.2203105327865407e-05, |
|
"loss": 0.4396, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.9746500189178964, |
|
"grad_norm": 0.8052567092226562, |
|
"learning_rate": 1.215148547860084e-05, |
|
"loss": 0.4291, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.9776768823306848, |
|
"grad_norm": 0.6927088950636443, |
|
"learning_rate": 1.2099805447662485e-05, |
|
"loss": 0.4007, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.9807037457434733, |
|
"grad_norm": 0.713133502680242, |
|
"learning_rate": 1.2048066680651908e-05, |
|
"loss": 0.4275, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.9837306091562619, |
|
"grad_norm": 0.7155463927144106, |
|
"learning_rate": 1.1996270624813642e-05, |
|
"loss": 0.4139, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.9867574725690503, |
|
"grad_norm": 0.6478798486290784, |
|
"learning_rate": 1.194441872899471e-05, |
|
"loss": 0.4071, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.9897843359818388, |
|
"grad_norm": 0.7173581836120222, |
|
"learning_rate": 1.1892512443604103e-05, |
|
"loss": 0.409, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.9928111993946274, |
|
"grad_norm": 0.730348703298021, |
|
"learning_rate": 1.1840553220572204e-05, |
|
"loss": 0.4229, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.9958380628074158, |
|
"grad_norm": 0.711547276417918, |
|
"learning_rate": 1.1788542513310178e-05, |
|
"loss": 0.4532, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.9988649262202043, |
|
"grad_norm": 0.7863251108389895, |
|
"learning_rate": 1.1736481776669307e-05, |
|
"loss": 0.4476, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.0018917896329929, |
|
"grad_norm": 0.7655456637348442, |
|
"learning_rate": 1.1684372466900306e-05, |
|
"loss": 0.3556, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.0049186530457812, |
|
"grad_norm": 0.8155286727422348, |
|
"learning_rate": 1.1632216041612595e-05, |
|
"loss": 0.3147, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.0079455164585698, |
|
"grad_norm": 0.7854218730281605, |
|
"learning_rate": 1.15800139597335e-05, |
|
"loss": 0.3316, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.0109723798713584, |
|
"grad_norm": 0.7245266577568512, |
|
"learning_rate": 1.1527767681467472e-05, |
|
"loss": 0.3318, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.0139992432841467, |
|
"grad_norm": 0.7016229980480927, |
|
"learning_rate": 1.1475478668255223e-05, |
|
"loss": 0.3102, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.0170261066969353, |
|
"grad_norm": 0.7833157072637251, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.3098, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.0200529701097238, |
|
"grad_norm": 0.8165706685969533, |
|
"learning_rate": 1.1370778288690947e-05, |
|
"loss": 0.3187, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.0230798335225122, |
|
"grad_norm": 0.8512358511828098, |
|
"learning_rate": 1.1318369851033604e-05, |
|
"loss": 0.3234, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.0261066969353008, |
|
"grad_norm": 0.7495737774775626, |
|
"learning_rate": 1.1265924535737494e-05, |
|
"loss": 0.2998, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.0291335603480893, |
|
"grad_norm": 0.8218835702516464, |
|
"learning_rate": 1.121344380981082e-05, |
|
"loss": 0.3268, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.0321604237608777, |
|
"grad_norm": 0.8048554632361441, |
|
"learning_rate": 1.1160929141252303e-05, |
|
"loss": 0.3034, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.0351872871736663, |
|
"grad_norm": 0.8022024006590893, |
|
"learning_rate": 1.1108381999010111e-05, |
|
"loss": 0.2914, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.0382141505864548, |
|
"grad_norm": 0.765519484837785, |
|
"learning_rate": 1.1055803852940772e-05, |
|
"loss": 0.3147, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.0412410139992432, |
|
"grad_norm": 0.7963547701633277, |
|
"learning_rate": 1.1003196173768051e-05, |
|
"loss": 0.3009, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.0442678774120318, |
|
"grad_norm": 0.7943705650703402, |
|
"learning_rate": 1.0950560433041825e-05, |
|
"loss": 0.3084, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.0472947408248203, |
|
"grad_norm": 0.7716170930494871, |
|
"learning_rate": 1.0897898103096917e-05, |
|
"loss": 0.315, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.0503216042376087, |
|
"grad_norm": 0.8032362714689631, |
|
"learning_rate": 1.0845210657011893e-05, |
|
"loss": 0.2958, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.0533484676503972, |
|
"grad_norm": 0.8718677087985199, |
|
"learning_rate": 1.0792499568567885e-05, |
|
"loss": 0.3125, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.0563753310631858, |
|
"grad_norm": 0.851840311161671, |
|
"learning_rate": 1.0739766312207344e-05, |
|
"loss": 0.3017, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.0594021944759742, |
|
"grad_norm": 0.8827173634942459, |
|
"learning_rate": 1.068701236299281e-05, |
|
"loss": 0.3106, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.0624290578887627, |
|
"grad_norm": 0.8240551910848539, |
|
"learning_rate": 1.0634239196565646e-05, |
|
"loss": 0.3019, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.0654559213015513, |
|
"grad_norm": 0.7778022378631735, |
|
"learning_rate": 1.0581448289104759e-05, |
|
"loss": 0.3141, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.0684827847143397, |
|
"grad_norm": 0.7748758031405112, |
|
"learning_rate": 1.0528641117285315e-05, |
|
"loss": 0.32, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.0715096481271282, |
|
"grad_norm": 0.7598758352746627, |
|
"learning_rate": 1.0475819158237426e-05, |
|
"loss": 0.2933, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.0745365115399168, |
|
"grad_norm": 0.8070887785063543, |
|
"learning_rate": 1.0422983889504831e-05, |
|
"loss": 0.3302, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.0775633749527052, |
|
"grad_norm": 0.738418133825358, |
|
"learning_rate": 1.0370136789003582e-05, |
|
"loss": 0.3143, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.0805902383654937, |
|
"grad_norm": 0.7222303260690495, |
|
"learning_rate": 1.031727933498068e-05, |
|
"loss": 0.2962, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.0836171017782823, |
|
"grad_norm": 0.7186811106893302, |
|
"learning_rate": 1.0264413005972736e-05, |
|
"loss": 0.3066, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.0866439651910706, |
|
"grad_norm": 0.9150833237562085, |
|
"learning_rate": 1.0211539280764617e-05, |
|
"loss": 0.3025, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.0896708286038592, |
|
"grad_norm": 0.7584088182859109, |
|
"learning_rate": 1.015865963834808e-05, |
|
"loss": 0.2924, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.0926976920166478, |
|
"grad_norm": 0.8177468806924608, |
|
"learning_rate": 1.0105775557880398e-05, |
|
"loss": 0.3169, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.0957245554294364, |
|
"grad_norm": 0.7444571272416078, |
|
"learning_rate": 1.0052888518642978e-05, |
|
"loss": 0.3087, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.0987514188422247, |
|
"grad_norm": 0.7118445169537763, |
|
"learning_rate": 1e-05, |
|
"loss": 0.3013, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.1017782822550133, |
|
"grad_norm": 0.7413297904249947, |
|
"learning_rate": 9.947111481357023e-06, |
|
"loss": 0.3098, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.1048051456678016, |
|
"grad_norm": 0.675083893707382, |
|
"learning_rate": 9.894224442119606e-06, |
|
"loss": 0.2911, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.1078320090805902, |
|
"grad_norm": 0.7092323480008726, |
|
"learning_rate": 9.841340361651921e-06, |
|
"loss": 0.3108, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.1108588724933788, |
|
"grad_norm": 0.8216369965917074, |
|
"learning_rate": 9.788460719235386e-06, |
|
"loss": 0.3074, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.1138857359061674, |
|
"grad_norm": 0.7182318100402008, |
|
"learning_rate": 9.735586994027267e-06, |
|
"loss": 0.3086, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.1169125993189557, |
|
"grad_norm": 0.7822678459225799, |
|
"learning_rate": 9.682720665019325e-06, |
|
"loss": 0.3168, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.1199394627317443, |
|
"grad_norm": 0.7885254086425039, |
|
"learning_rate": 9.62986321099642e-06, |
|
"loss": 0.302, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.1229663261445326, |
|
"grad_norm": 0.7544807812106038, |
|
"learning_rate": 9.57701611049517e-06, |
|
"loss": 0.2993, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.1259931895573212, |
|
"grad_norm": 0.7415945718266198, |
|
"learning_rate": 9.524180841762577e-06, |
|
"loss": 0.3097, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.1290200529701098, |
|
"grad_norm": 0.847628394718913, |
|
"learning_rate": 9.471358882714687e-06, |
|
"loss": 0.3055, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.1320469163828983, |
|
"grad_norm": 0.8022331160489418, |
|
"learning_rate": 9.418551710895243e-06, |
|
"loss": 0.3068, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.1350737797956867, |
|
"grad_norm": 0.7088116349252302, |
|
"learning_rate": 9.365760803434356e-06, |
|
"loss": 0.3085, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.1381006432084753, |
|
"grad_norm": 0.7606454883182064, |
|
"learning_rate": 9.312987637007191e-06, |
|
"loss": 0.3092, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.1411275066212636, |
|
"grad_norm": 0.7907641940527268, |
|
"learning_rate": 9.260233687792657e-06, |
|
"loss": 0.3115, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.1441543700340522, |
|
"grad_norm": 0.748031819260652, |
|
"learning_rate": 9.207500431432115e-06, |
|
"loss": 0.3043, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.1471812334468408, |
|
"grad_norm": 0.7479169353050704, |
|
"learning_rate": 9.154789342988108e-06, |
|
"loss": 0.3016, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.1502080968596293, |
|
"grad_norm": 0.8160987528935079, |
|
"learning_rate": 9.102101896903084e-06, |
|
"loss": 0.3523, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.1532349602724177, |
|
"grad_norm": 0.7496889075324688, |
|
"learning_rate": 9.049439566958176e-06, |
|
"loss": 0.3097, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.1562618236852062, |
|
"grad_norm": 0.7237202828430805, |
|
"learning_rate": 8.99680382623195e-06, |
|
"loss": 0.288, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.1592886870979946, |
|
"grad_norm": 0.6892190512122733, |
|
"learning_rate": 8.944196147059233e-06, |
|
"loss": 0.2938, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.1623155505107832, |
|
"grad_norm": 0.7314347336923697, |
|
"learning_rate": 8.89161800098989e-06, |
|
"loss": 0.315, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.1653424139235717, |
|
"grad_norm": 0.7188213304841331, |
|
"learning_rate": 8.839070858747697e-06, |
|
"loss": 0.3195, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.1683692773363603, |
|
"grad_norm": 0.706122507513273, |
|
"learning_rate": 8.786556190189183e-06, |
|
"loss": 0.2918, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.1713961407491487, |
|
"grad_norm": 0.7284323593714442, |
|
"learning_rate": 8.734075464262507e-06, |
|
"loss": 0.2978, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.1744230041619372, |
|
"grad_norm": 0.7606713985709538, |
|
"learning_rate": 8.681630148966397e-06, |
|
"loss": 0.2953, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.1774498675747256, |
|
"grad_norm": 0.688466436571864, |
|
"learning_rate": 8.629221711309056e-06, |
|
"loss": 0.2937, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.1804767309875142, |
|
"grad_norm": 0.7123387339634238, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.3076, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.1835035944003027, |
|
"grad_norm": 0.7041220957351468, |
|
"learning_rate": 8.52452133174478e-06, |
|
"loss": 0.2997, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.1865304578130913, |
|
"grad_norm": 0.7544182593526317, |
|
"learning_rate": 8.472232318532531e-06, |
|
"loss": 0.291, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.1895573212258796, |
|
"grad_norm": 0.6993535321943141, |
|
"learning_rate": 8.419986040266502e-06, |
|
"loss": 0.3023, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.1925841846386682, |
|
"grad_norm": 0.7671877610392603, |
|
"learning_rate": 8.367783958387407e-06, |
|
"loss": 0.2997, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.1956110480514566, |
|
"grad_norm": 0.7007891183306825, |
|
"learning_rate": 8.315627533099697e-06, |
|
"loss": 0.3046, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.1986379114642451, |
|
"grad_norm": 0.8058855256443498, |
|
"learning_rate": 8.263518223330698e-06, |
|
"loss": 0.3003, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.2016647748770337, |
|
"grad_norm": 0.7238731624948095, |
|
"learning_rate": 8.211457486689829e-06, |
|
"loss": 0.2864, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.2046916382898223, |
|
"grad_norm": 0.7280035370674615, |
|
"learning_rate": 8.159446779427798e-06, |
|
"loss": 0.313, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.2077185017026106, |
|
"grad_norm": 0.8203183170992852, |
|
"learning_rate": 8.107487556395902e-06, |
|
"loss": 0.2952, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.2107453651153992, |
|
"grad_norm": 0.7703023526908187, |
|
"learning_rate": 8.055581271005292e-06, |
|
"loss": 0.305, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.2137722285281876, |
|
"grad_norm": 0.7372326181592128, |
|
"learning_rate": 8.00372937518636e-06, |
|
"loss": 0.3035, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.2167990919409761, |
|
"grad_norm": 0.8905868737947894, |
|
"learning_rate": 7.951933319348095e-06, |
|
"loss": 0.295, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.2198259553537647, |
|
"grad_norm": 0.6990558352328131, |
|
"learning_rate": 7.900194552337516e-06, |
|
"loss": 0.2976, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.2228528187665533, |
|
"grad_norm": 0.7288993278319332, |
|
"learning_rate": 7.848514521399167e-06, |
|
"loss": 0.3099, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.2258796821793416, |
|
"grad_norm": 0.7486815017605478, |
|
"learning_rate": 7.796894672134594e-06, |
|
"loss": 0.3023, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.2289065455921302, |
|
"grad_norm": 0.7673448685879369, |
|
"learning_rate": 7.745336448461958e-06, |
|
"loss": 0.2898, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.2319334090049185, |
|
"grad_norm": 0.7118553191614561, |
|
"learning_rate": 7.6938412925756e-06, |
|
"loss": 0.2962, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.2349602724177071, |
|
"grad_norm": 0.7710015604825844, |
|
"learning_rate": 7.642410644905726e-06, |
|
"loss": 0.3006, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.2379871358304957, |
|
"grad_norm": 0.7189231198734848, |
|
"learning_rate": 7.591045944078119e-06, |
|
"loss": 0.2985, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.2410139992432843, |
|
"grad_norm": 0.7251703839141794, |
|
"learning_rate": 7.539748626873866e-06, |
|
"loss": 0.3032, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.2440408626560726, |
|
"grad_norm": 0.7793062661136919, |
|
"learning_rate": 7.488520128189209e-06, |
|
"loss": 0.3192, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.2470677260688612, |
|
"grad_norm": 0.7110951057424699, |
|
"learning_rate": 7.4373618809953755e-06, |
|
"loss": 0.2928, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 1.2500945894816495, |
|
"grad_norm": 0.7236116954948195, |
|
"learning_rate": 7.386275316298513e-06, |
|
"loss": 0.3004, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 1.253121452894438, |
|
"grad_norm": 0.6807054902350754, |
|
"learning_rate": 7.335261863099652e-06, |
|
"loss": 0.2947, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.2561483163072267, |
|
"grad_norm": 0.7535572769131268, |
|
"learning_rate": 7.2843229483547405e-06, |
|
"loss": 0.3063, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 1.2591751797200152, |
|
"grad_norm": 0.7289605965795644, |
|
"learning_rate": 7.233459996934731e-06, |
|
"loss": 0.3061, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 1.2622020431328036, |
|
"grad_norm": 0.735887414238395, |
|
"learning_rate": 7.182674431585703e-06, |
|
"loss": 0.2919, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.2652289065455922, |
|
"grad_norm": 0.7816839447208147, |
|
"learning_rate": 7.131967672889101e-06, |
|
"loss": 0.299, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 1.2682557699583805, |
|
"grad_norm": 0.7857094281691691, |
|
"learning_rate": 7.081341139221955e-06, |
|
"loss": 0.3233, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 1.271282633371169, |
|
"grad_norm": 0.6847211225600917, |
|
"learning_rate": 7.0307962467172555e-06, |
|
"loss": 0.2928, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.2743094967839577, |
|
"grad_norm": 0.8089942649547821, |
|
"learning_rate": 6.9803344092242855e-06, |
|
"loss": 0.3135, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 1.2773363601967462, |
|
"grad_norm": 0.7467970436028688, |
|
"learning_rate": 6.929957038269123e-06, |
|
"loss": 0.3112, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 1.2803632236095346, |
|
"grad_norm": 0.739500028088135, |
|
"learning_rate": 6.87966554301513e-06, |
|
"loss": 0.2957, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.2833900870223232, |
|
"grad_norm": 0.683431066214186, |
|
"learning_rate": 6.8294613302235325e-06, |
|
"loss": 0.2821, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 1.2864169504351115, |
|
"grad_norm": 0.7237067977309674, |
|
"learning_rate": 6.779345804214088e-06, |
|
"loss": 0.2984, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 1.2894438138479, |
|
"grad_norm": 0.7015273552824332, |
|
"learning_rate": 6.729320366825785e-06, |
|
"loss": 0.2945, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.2924706772606886, |
|
"grad_norm": 0.7153485007804515, |
|
"learning_rate": 6.679386417377649e-06, |
|
"loss": 0.3096, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 1.2954975406734772, |
|
"grad_norm": 0.7244256900949125, |
|
"learning_rate": 6.629545352629583e-06, |
|
"loss": 0.3011, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 1.2985244040862656, |
|
"grad_norm": 0.6616366868021462, |
|
"learning_rate": 6.579798566743314e-06, |
|
"loss": 0.2878, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.3015512674990541, |
|
"grad_norm": 0.7301195956302827, |
|
"learning_rate": 6.530147451243377e-06, |
|
"loss": 0.3165, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.3045781309118425, |
|
"grad_norm": 0.7485873126650703, |
|
"learning_rate": 6.480593394978208e-06, |
|
"loss": 0.3172, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 1.307604994324631, |
|
"grad_norm": 0.722992588164647, |
|
"learning_rate": 6.431137784081283e-06, |
|
"loss": 0.3021, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.3106318577374196, |
|
"grad_norm": 0.7120795658993238, |
|
"learning_rate": 6.381782001932352e-06, |
|
"loss": 0.3032, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 1.3136587211502082, |
|
"grad_norm": 0.6878016059399221, |
|
"learning_rate": 6.33252742911874e-06, |
|
"loss": 0.2973, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 1.3166855845629966, |
|
"grad_norm": 0.7470100235486244, |
|
"learning_rate": 6.283375443396726e-06, |
|
"loss": 0.2995, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.3197124479757851, |
|
"grad_norm": 0.745106264724361, |
|
"learning_rate": 6.234327419653013e-06, |
|
"loss": 0.3176, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.3227393113885735, |
|
"grad_norm": 0.7148680670930199, |
|
"learning_rate": 6.185384729866264e-06, |
|
"loss": 0.2964, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.325766174801362, |
|
"grad_norm": 0.735340989725308, |
|
"learning_rate": 6.136548743068713e-06, |
|
"loss": 0.3306, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.3287930382141506, |
|
"grad_norm": 0.6968338856946305, |
|
"learning_rate": 6.087820825307904e-06, |
|
"loss": 0.2814, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.3318199016269392, |
|
"grad_norm": 0.7506074677205136, |
|
"learning_rate": 6.039202339608432e-06, |
|
"loss": 0.3098, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.3348467650397275, |
|
"grad_norm": 0.6888450909879681, |
|
"learning_rate": 5.990694645933866e-06, |
|
"loss": 0.2848, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.3378736284525161, |
|
"grad_norm": 0.7028737264249152, |
|
"learning_rate": 5.9422991011486635e-06, |
|
"loss": 0.2941, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.3409004918653045, |
|
"grad_norm": 0.6737135456849989, |
|
"learning_rate": 5.894017058980249e-06, |
|
"loss": 0.2849, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.343927355278093, |
|
"grad_norm": 0.76219771195752, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.291, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.3469542186908816, |
|
"grad_norm": 0.7046144961913553, |
|
"learning_rate": 5.797798881491138e-06, |
|
"loss": 0.3002, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.3499810821036702, |
|
"grad_norm": 0.6943621300553611, |
|
"learning_rate": 5.749865437599703e-06, |
|
"loss": 0.3047, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.3530079455164585, |
|
"grad_norm": 0.6914910879917091, |
|
"learning_rate": 5.702050879108284e-06, |
|
"loss": 0.2893, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.356034808929247, |
|
"grad_norm": 0.7331133229050498, |
|
"learning_rate": 5.654356543492883e-06, |
|
"loss": 0.2963, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.3590616723420355, |
|
"grad_norm": 0.735728979656197, |
|
"learning_rate": 5.606783764866576e-06, |
|
"loss": 0.286, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.362088535754824, |
|
"grad_norm": 0.7166827443004791, |
|
"learning_rate": 5.559333873942259e-06, |
|
"loss": 0.2913, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.3651153991676126, |
|
"grad_norm": 0.7220521417783836, |
|
"learning_rate": 5.512008197995379e-06, |
|
"loss": 0.2976, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.3681422625804012, |
|
"grad_norm": 0.7459174836915505, |
|
"learning_rate": 5.464808060826825e-06, |
|
"loss": 0.2912, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.3711691259931895, |
|
"grad_norm": 0.7089918027020214, |
|
"learning_rate": 5.417734782725896e-06, |
|
"loss": 0.2905, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.374195989405978, |
|
"grad_norm": 0.7845199823151281, |
|
"learning_rate": 5.370789680433376e-06, |
|
"loss": 0.2957, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.3772228528187664, |
|
"grad_norm": 0.6720858368889135, |
|
"learning_rate": 5.323974067104687e-06, |
|
"loss": 0.2833, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.380249716231555, |
|
"grad_norm": 0.6982056481063078, |
|
"learning_rate": 5.277289252273175e-06, |
|
"loss": 0.3071, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.3832765796443436, |
|
"grad_norm": 0.7882593628095009, |
|
"learning_rate": 5.230736541813463e-06, |
|
"loss": 0.2988, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.3863034430571322, |
|
"grad_norm": 0.8079334522424221, |
|
"learning_rate": 5.184317237904939e-06, |
|
"loss": 0.2946, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.3893303064699205, |
|
"grad_norm": 0.7456845871321579, |
|
"learning_rate": 5.138032638995315e-06, |
|
"loss": 0.3076, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.392357169882709, |
|
"grad_norm": 0.7217500629994253, |
|
"learning_rate": 5.091884039764321e-06, |
|
"loss": 0.2838, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.3953840332954974, |
|
"grad_norm": 0.7894939886947894, |
|
"learning_rate": 5.045872731087479e-06, |
|
"loss": 0.3113, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.398410896708286, |
|
"grad_norm": 0.7086092715996748, |
|
"learning_rate": 5.000000000000003e-06, |
|
"loss": 0.3006, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.4014377601210746, |
|
"grad_norm": 0.6816648744384045, |
|
"learning_rate": 4.954267129660789e-06, |
|
"loss": 0.2884, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.4044646235338631, |
|
"grad_norm": 0.728600013990663, |
|
"learning_rate": 4.908675399316534e-06, |
|
"loss": 0.3007, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.4074914869466515, |
|
"grad_norm": 0.903727588239888, |
|
"learning_rate": 4.863226084265939e-06, |
|
"loss": 0.3123, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.41051835035944, |
|
"grad_norm": 0.7345738267643765, |
|
"learning_rate": 4.817920455824045e-06, |
|
"loss": 0.3069, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.4135452137722284, |
|
"grad_norm": 0.7440313441691954, |
|
"learning_rate": 4.772759781286679e-06, |
|
"loss": 0.308, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.416572077185017, |
|
"grad_norm": 0.6792354680476462, |
|
"learning_rate": 4.727745323894976e-06, |
|
"loss": 0.2922, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.4195989405978056, |
|
"grad_norm": 0.6786857194888762, |
|
"learning_rate": 4.682878342800087e-06, |
|
"loss": 0.2715, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.4226258040105941, |
|
"grad_norm": 0.7078428662438635, |
|
"learning_rate": 4.638160093027908e-06, |
|
"loss": 0.2892, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.4256526674233825, |
|
"grad_norm": 0.7317330788613218, |
|
"learning_rate": 4.593591825444028e-06, |
|
"loss": 0.3062, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.428679530836171, |
|
"grad_norm": 0.7084717565538176, |
|
"learning_rate": 4.549174786718684e-06, |
|
"loss": 0.2847, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.4317063942489594, |
|
"grad_norm": 0.7156951922230512, |
|
"learning_rate": 4.504910219291941e-06, |
|
"loss": 0.3018, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.434733257661748, |
|
"grad_norm": 0.710828046926721, |
|
"learning_rate": 4.460799361338898e-06, |
|
"loss": 0.2997, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.4377601210745365, |
|
"grad_norm": 0.6926049148527008, |
|
"learning_rate": 4.416843446735077e-06, |
|
"loss": 0.2948, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.4407869844873251, |
|
"grad_norm": 0.7439666429216344, |
|
"learning_rate": 4.373043705021899e-06, |
|
"loss": 0.2817, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.4438138479001135, |
|
"grad_norm": 0.7106245449902547, |
|
"learning_rate": 4.3294013613722944e-06, |
|
"loss": 0.2915, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.446840711312902, |
|
"grad_norm": 0.733540733445827, |
|
"learning_rate": 4.2859176365564294e-06, |
|
"loss": 0.3006, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.4498675747256904, |
|
"grad_norm": 0.7190635551625673, |
|
"learning_rate": 4.2425937469075626e-06, |
|
"loss": 0.2946, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.452894438138479, |
|
"grad_norm": 0.7331672001856638, |
|
"learning_rate": 4.19943090428802e-06, |
|
"loss": 0.2982, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.4559213015512675, |
|
"grad_norm": 0.734420797241406, |
|
"learning_rate": 4.1564303160552935e-06, |
|
"loss": 0.3067, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.458948164964056, |
|
"grad_norm": 0.7740977562423303, |
|
"learning_rate": 4.113593185028273e-06, |
|
"loss": 0.3033, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.4619750283768445, |
|
"grad_norm": 0.7449744490700083, |
|
"learning_rate": 4.070920709453597e-06, |
|
"loss": 0.3031, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.465001891789633, |
|
"grad_norm": 0.6920685911153219, |
|
"learning_rate": 4.028414082972141e-06, |
|
"loss": 0.2748, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.4680287552024214, |
|
"grad_norm": 0.7340696787550777, |
|
"learning_rate": 3.986074494585619e-06, |
|
"loss": 0.3165, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.47105561861521, |
|
"grad_norm": 0.7442468975750016, |
|
"learning_rate": 3.943903128623336e-06, |
|
"loss": 0.2924, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.4740824820279985, |
|
"grad_norm": 0.7421648433774107, |
|
"learning_rate": 3.9019011647090465e-06, |
|
"loss": 0.3047, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.477109345440787, |
|
"grad_norm": 0.7420301308588098, |
|
"learning_rate": 3.860069777727983e-06, |
|
"loss": 0.2952, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.4801362088535754, |
|
"grad_norm": 0.6691259866515429, |
|
"learning_rate": 3.818410137793947e-06, |
|
"loss": 0.2828, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.483163072266364, |
|
"grad_norm": 0.7071423660193852, |
|
"learning_rate": 3.7769234102166365e-06, |
|
"loss": 0.2902, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.4861899356791524, |
|
"grad_norm": 0.7676793579276753, |
|
"learning_rate": 3.735610755468988e-06, |
|
"loss": 0.2934, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.489216799091941, |
|
"grad_norm": 0.7211431192456063, |
|
"learning_rate": 3.6944733291547784e-06, |
|
"loss": 0.3043, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.4922436625047295, |
|
"grad_norm": 0.7102504685742357, |
|
"learning_rate": 3.653512281976238e-06, |
|
"loss": 0.2845, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.495270525917518, |
|
"grad_norm": 0.765811254055886, |
|
"learning_rate": 3.612728759701919e-06, |
|
"loss": 0.3209, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.4982973893303064, |
|
"grad_norm": 0.7185697316514301, |
|
"learning_rate": 3.5721239031346067e-06, |
|
"loss": 0.282, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.501324252743095, |
|
"grad_norm": 0.7308196283280098, |
|
"learning_rate": 3.5316988480794255e-06, |
|
"loss": 0.2898, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.5043511161558833, |
|
"grad_norm": 0.7470210687023551, |
|
"learning_rate": 3.4914547253120655e-06, |
|
"loss": 0.3067, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.507377979568672, |
|
"grad_norm": 0.7218053580884763, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.2917, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.5104048429814605, |
|
"grad_norm": 0.741476487219955, |
|
"learning_rate": 3.4115137744067516e-06, |
|
"loss": 0.3028, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.513431706394249, |
|
"grad_norm": 0.7375882108012576, |
|
"learning_rate": 3.37181918238904e-06, |
|
"loss": 0.298, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.5164585698070374, |
|
"grad_norm": 0.7103926965947772, |
|
"learning_rate": 3.3323099948370853e-06, |
|
"loss": 0.3079, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.519485433219826, |
|
"grad_norm": 0.7981736017237296, |
|
"learning_rate": 3.292987316907792e-06, |
|
"loss": 0.2997, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.5225122966326143, |
|
"grad_norm": 0.7140445453717933, |
|
"learning_rate": 3.253852248540994e-06, |
|
"loss": 0.2985, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.525539160045403, |
|
"grad_norm": 0.7338122647841709, |
|
"learning_rate": 3.2149058844286796e-06, |
|
"loss": 0.3044, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.5285660234581915, |
|
"grad_norm": 0.6932711316910662, |
|
"learning_rate": 3.1761493139843734e-06, |
|
"loss": 0.2805, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.53159288687098, |
|
"grad_norm": 0.7259651983892397, |
|
"learning_rate": 3.1375836213126653e-06, |
|
"loss": 0.281, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.5346197502837684, |
|
"grad_norm": 0.7246870645142374, |
|
"learning_rate": 3.099209885178882e-06, |
|
"loss": 0.2974, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.537646613696557, |
|
"grad_norm": 0.7162928934380735, |
|
"learning_rate": 3.0610291789789094e-06, |
|
"loss": 0.2981, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.5406734771093453, |
|
"grad_norm": 0.6678255968629371, |
|
"learning_rate": 3.023042570709185e-06, |
|
"loss": 0.2833, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.543700340522134, |
|
"grad_norm": 0.7646379953077853, |
|
"learning_rate": 2.9852511229367862e-06, |
|
"loss": 0.302, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.5467272039349225, |
|
"grad_norm": 0.7291367713834035, |
|
"learning_rate": 2.9476558927697605e-06, |
|
"loss": 0.2927, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.549754067347711, |
|
"grad_norm": 0.7213826190297216, |
|
"learning_rate": 2.9102579318274994e-06, |
|
"loss": 0.2819, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.5527809307604994, |
|
"grad_norm": 0.8689969612558015, |
|
"learning_rate": 2.8730582862113743e-06, |
|
"loss": 0.3236, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.555807794173288, |
|
"grad_norm": 0.7491954893536394, |
|
"learning_rate": 2.8360579964754277e-06, |
|
"loss": 0.2875, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.5588346575860763, |
|
"grad_norm": 0.8088199669308391, |
|
"learning_rate": 2.7992580975973136e-06, |
|
"loss": 0.3097, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.5618615209988649, |
|
"grad_norm": 0.7493951791584609, |
|
"learning_rate": 2.7626596189492983e-06, |
|
"loss": 0.2939, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.5648883844116535, |
|
"grad_norm": 0.8155178620024038, |
|
"learning_rate": 2.726263584269513e-06, |
|
"loss": 0.2969, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.567915247824442, |
|
"grad_norm": 0.7058552595036228, |
|
"learning_rate": 2.690071011633284e-06, |
|
"loss": 0.2796, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.5709421112372304, |
|
"grad_norm": 0.7378006103732747, |
|
"learning_rate": 2.6540829134246683e-06, |
|
"loss": 0.2856, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.573968974650019, |
|
"grad_norm": 0.7380712821686274, |
|
"learning_rate": 2.618300296308135e-06, |
|
"loss": 0.2941, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.5769958380628073, |
|
"grad_norm": 0.735846408326813, |
|
"learning_rate": 2.582724161200405e-06, |
|
"loss": 0.2957, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.5800227014755959, |
|
"grad_norm": 0.7159308274464296, |
|
"learning_rate": 2.5473555032424534e-06, |
|
"loss": 0.2962, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.5830495648883844, |
|
"grad_norm": 0.7810550278981911, |
|
"learning_rate": 2.5121953117716744e-06, |
|
"loss": 0.311, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.586076428301173, |
|
"grad_norm": 0.8148608666898813, |
|
"learning_rate": 2.477244570294206e-06, |
|
"loss": 0.2984, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.5891032917139614, |
|
"grad_norm": 0.7324596873197188, |
|
"learning_rate": 2.4425042564574186e-06, |
|
"loss": 0.3086, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.59213015512675, |
|
"grad_norm": 0.721094517008489, |
|
"learning_rate": 2.4079753420225694e-06, |
|
"loss": 0.2873, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.5951570185395383, |
|
"grad_norm": 0.6953935031512868, |
|
"learning_rate": 2.3736587928376197e-06, |
|
"loss": 0.288, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.5981838819523269, |
|
"grad_norm": 0.7287092455591053, |
|
"learning_rate": 2.339555568810221e-06, |
|
"loss": 0.2963, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.6012107453651154, |
|
"grad_norm": 0.7982001923035963, |
|
"learning_rate": 2.305666623880858e-06, |
|
"loss": 0.2906, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.604237608777904, |
|
"grad_norm": 0.7155115666818924, |
|
"learning_rate": 2.27199290599617e-06, |
|
"loss": 0.2989, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.6072644721906924, |
|
"grad_norm": 0.741352847859126, |
|
"learning_rate": 2.2385353570824308e-06, |
|
"loss": 0.2955, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.610291335603481, |
|
"grad_norm": 0.7438735228432585, |
|
"learning_rate": 2.2052949130192136e-06, |
|
"loss": 0.2972, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.6133181990162693, |
|
"grad_norm": 0.7080963834869359, |
|
"learning_rate": 2.172272503613183e-06, |
|
"loss": 0.2905, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.6163450624290578, |
|
"grad_norm": 0.6735598903056178, |
|
"learning_rate": 2.1394690525721275e-06, |
|
"loss": 0.2879, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.6193719258418464, |
|
"grad_norm": 0.7210296347572871, |
|
"learning_rate": 2.1068854774790783e-06, |
|
"loss": 0.3002, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.622398789254635, |
|
"grad_norm": 0.7052841341290685, |
|
"learning_rate": 2.0745226897666858e-06, |
|
"loss": 0.3071, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.6254256526674233, |
|
"grad_norm": 0.6898511280372791, |
|
"learning_rate": 2.0423815946916783e-06, |
|
"loss": 0.291, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.628452516080212, |
|
"grad_norm": 0.6848750186795004, |
|
"learning_rate": 2.010463091309587e-06, |
|
"loss": 0.2801, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.6314793794930003, |
|
"grad_norm": 0.6991822749647867, |
|
"learning_rate": 1.9787680724495617e-06, |
|
"loss": 0.2951, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.6345062429057888, |
|
"grad_norm": 0.7248939023028361, |
|
"learning_rate": 1.947297424689414e-06, |
|
"loss": 0.2837, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.6375331063185774, |
|
"grad_norm": 0.736808349101833, |
|
"learning_rate": 1.9160520283308115e-06, |
|
"loss": 0.3033, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.640559969731366, |
|
"grad_norm": 0.7229723881871016, |
|
"learning_rate": 1.8850327573746584e-06, |
|
"loss": 0.3041, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.6435868331441543, |
|
"grad_norm": 0.6862849651806169, |
|
"learning_rate": 1.854240479496643e-06, |
|
"loss": 0.2939, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.646613696556943, |
|
"grad_norm": 0.7143727176912422, |
|
"learning_rate": 1.8236760560229715e-06, |
|
"loss": 0.2991, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.6496405599697312, |
|
"grad_norm": 0.72498816636298, |
|
"learning_rate": 1.7933403419062689e-06, |
|
"loss": 0.299, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.6526674233825198, |
|
"grad_norm": 0.7290164894238529, |
|
"learning_rate": 1.7632341857016733e-06, |
|
"loss": 0.3006, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.6556942867953084, |
|
"grad_norm": 0.7468861089602867, |
|
"learning_rate": 1.7333584295430894e-06, |
|
"loss": 0.2953, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.658721150208097, |
|
"grad_norm": 0.6663562107366208, |
|
"learning_rate": 1.7037139091196396e-06, |
|
"loss": 0.2967, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.6617480136208853, |
|
"grad_norm": 0.7329320159252691, |
|
"learning_rate": 1.6743014536522872e-06, |
|
"loss": 0.2991, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.6647748770336739, |
|
"grad_norm": 0.7149965793264752, |
|
"learning_rate": 1.6451218858706374e-06, |
|
"loss": 0.2925, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.6678017404464622, |
|
"grad_norm": 0.7265353858215529, |
|
"learning_rate": 1.616176021989926e-06, |
|
"loss": 0.2709, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.6708286038592508, |
|
"grad_norm": 0.7007476764710114, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.2848, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.6738554672720394, |
|
"grad_norm": 0.7137521650633167, |
|
"learning_rate": 1.558988638083616e-06, |
|
"loss": 0.3019, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.676882330684828, |
|
"grad_norm": 0.7326076806509098, |
|
"learning_rate": 1.5307487177120773e-06, |
|
"loss": 0.2999, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.6799091940976163, |
|
"grad_norm": 0.7999915529349845, |
|
"learning_rate": 1.5027457005048573e-06, |
|
"loss": 0.3126, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.6829360575104049, |
|
"grad_norm": 0.7143699558643047, |
|
"learning_rate": 1.4749803697665366e-06, |
|
"loss": 0.2925, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.6859629209231932, |
|
"grad_norm": 0.6557819524154685, |
|
"learning_rate": 1.4474535021531099e-06, |
|
"loss": 0.2832, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.6889897843359818, |
|
"grad_norm": 0.6775709017079553, |
|
"learning_rate": 1.4201658676502294e-06, |
|
"loss": 0.2703, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.6920166477487704, |
|
"grad_norm": 0.6946860881212915, |
|
"learning_rate": 1.3931182295516965e-06, |
|
"loss": 0.2974, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.695043511161559, |
|
"grad_norm": 0.7569310394145937, |
|
"learning_rate": 1.3663113444380905e-06, |
|
"loss": 0.2936, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.6980703745743473, |
|
"grad_norm": 0.7326028603460049, |
|
"learning_rate": 1.339745962155613e-06, |
|
"loss": 0.2967, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.7010972379871359, |
|
"grad_norm": 0.7183903501780382, |
|
"learning_rate": 1.3134228257951142e-06, |
|
"loss": 0.2905, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.7041241013999242, |
|
"grad_norm": 0.6935811458897999, |
|
"learning_rate": 1.2873426716713012e-06, |
|
"loss": 0.2886, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.7071509648127128, |
|
"grad_norm": 0.689031002876606, |
|
"learning_rate": 1.2615062293021508e-06, |
|
"loss": 0.2906, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.7101778282255014, |
|
"grad_norm": 0.6898055501386529, |
|
"learning_rate": 1.2359142213884933e-06, |
|
"loss": 0.285, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.71320469163829, |
|
"grad_norm": 0.7179396714644282, |
|
"learning_rate": 1.2105673637938054e-06, |
|
"loss": 0.2877, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.7162315550510783, |
|
"grad_norm": 0.738766616411527, |
|
"learning_rate": 1.1854663655241804e-06, |
|
"loss": 0.3067, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.7192584184638668, |
|
"grad_norm": 0.7456936569323522, |
|
"learning_rate": 1.1606119287084982e-06, |
|
"loss": 0.2992, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.7222852818766552, |
|
"grad_norm": 0.7142112103087274, |
|
"learning_rate": 1.136004748578785e-06, |
|
"loss": 0.3019, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.7253121452894438, |
|
"grad_norm": 0.7210756682620847, |
|
"learning_rate": 1.1116455134507665e-06, |
|
"loss": 0.2901, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.7283390087022323, |
|
"grad_norm": 0.737335819794291, |
|
"learning_rate": 1.0875349047046113e-06, |
|
"loss": 0.2911, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.731365872115021, |
|
"grad_norm": 0.7488985088157455, |
|
"learning_rate": 1.0636735967658785e-06, |
|
"loss": 0.287, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.7343927355278093, |
|
"grad_norm": 0.7185064359906015, |
|
"learning_rate": 1.0400622570866426e-06, |
|
"loss": 0.2823, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.7374195989405978, |
|
"grad_norm": 0.7392173174876953, |
|
"learning_rate": 1.0167015461268303e-06, |
|
"loss": 0.2938, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.7404464623533862, |
|
"grad_norm": 0.6874504926869486, |
|
"learning_rate": 9.935921173357444e-07, |
|
"loss": 0.2835, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.7434733257661748, |
|
"grad_norm": 0.7181452475227407, |
|
"learning_rate": 9.707346171337895e-07, |
|
"loss": 0.2888, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.7465001891789633, |
|
"grad_norm": 0.7082020589095465, |
|
"learning_rate": 9.481296848943744e-07, |
|
"loss": 0.2774, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.749527052591752, |
|
"grad_norm": 0.7299344673010811, |
|
"learning_rate": 9.257779529260558e-07, |
|
"loss": 0.2988, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.7525539160045402, |
|
"grad_norm": 0.6991221763899843, |
|
"learning_rate": 9.036800464548157e-07, |
|
"loss": 0.2818, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.7555807794173288, |
|
"grad_norm": 0.6917836659790055, |
|
"learning_rate": 8.818365836066101e-07, |
|
"loss": 0.28, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.7586076428301172, |
|
"grad_norm": 0.7287350158726066, |
|
"learning_rate": 8.602481753900427e-07, |
|
"loss": 0.2979, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.7616345062429057, |
|
"grad_norm": 0.6899591584645147, |
|
"learning_rate": 8.389154256793042e-07, |
|
"loss": 0.2788, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.7646613696556943, |
|
"grad_norm": 0.7231428526834154, |
|
"learning_rate": 8.178389311972612e-07, |
|
"loss": 0.3048, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.7676882330684829, |
|
"grad_norm": 0.7339781327702657, |
|
"learning_rate": 7.970192814987676e-07, |
|
"loss": 0.2879, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.7707150964812712, |
|
"grad_norm": 0.6930891601239753, |
|
"learning_rate": 7.764570589541876e-07, |
|
"loss": 0.2782, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.7737419598940598, |
|
"grad_norm": 0.7253275682891991, |
|
"learning_rate": 7.561528387330797e-07, |
|
"loss": 0.2781, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.7767688233068482, |
|
"grad_norm": 0.6949049642692875, |
|
"learning_rate": 7.361071887881376e-07, |
|
"loss": 0.2981, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.7797956867196367, |
|
"grad_norm": 0.702545466960492, |
|
"learning_rate": 7.163206698392744e-07, |
|
"loss": 0.2825, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.7828225501324253, |
|
"grad_norm": 0.7408028431622163, |
|
"learning_rate": 6.96793835357964e-07, |
|
"loss": 0.2891, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.7858494135452139, |
|
"grad_norm": 0.7467909069416191, |
|
"learning_rate": 6.775272315517423e-07, |
|
"loss": 0.2811, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.7888762769580022, |
|
"grad_norm": 0.7664756468911015, |
|
"learning_rate": 6.585213973489335e-07, |
|
"loss": 0.3084, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.7919031403707908, |
|
"grad_norm": 0.743161616946802, |
|
"learning_rate": 6.397768643835755e-07, |
|
"loss": 0.2944, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.7949300037835791, |
|
"grad_norm": 0.7730191669076685, |
|
"learning_rate": 6.212941569805508e-07, |
|
"loss": 0.3009, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.7979568671963677, |
|
"grad_norm": 0.7761560509833367, |
|
"learning_rate": 6.030737921409169e-07, |
|
"loss": 0.3023, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.8009837306091563, |
|
"grad_norm": 0.7221223227755149, |
|
"learning_rate": 5.851162795274445e-07, |
|
"loss": 0.291, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.8040105940219449, |
|
"grad_norm": 0.7274770760365189, |
|
"learning_rate": 5.674221214503639e-07, |
|
"loss": 0.2812, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.8070374574347332, |
|
"grad_norm": 0.7378248793228842, |
|
"learning_rate": 5.499918128533155e-07, |
|
"loss": 0.2818, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.8100643208475218, |
|
"grad_norm": 0.7580981120954119, |
|
"learning_rate": 5.328258412994958e-07, |
|
"loss": 0.2922, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.8130911842603101, |
|
"grad_norm": 0.7442384271361044, |
|
"learning_rate": 5.159246869580348e-07, |
|
"loss": 0.3041, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.8161180476730987, |
|
"grad_norm": 0.7742587228505714, |
|
"learning_rate": 4.992888225905467e-07, |
|
"loss": 0.2916, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.8191449110858873, |
|
"grad_norm": 0.7294681269217987, |
|
"learning_rate": 4.829187135379221e-07, |
|
"loss": 0.2893, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.8221717744986758, |
|
"grad_norm": 0.7305955929689116, |
|
"learning_rate": 4.6681481770729844e-07, |
|
"loss": 0.2878, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.8251986379114642, |
|
"grad_norm": 0.7432526010109237, |
|
"learning_rate": 4.509775855592613e-07, |
|
"loss": 0.2922, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.8282255013242528, |
|
"grad_norm": 0.7013584478592265, |
|
"learning_rate": 4.354074600952407e-07, |
|
"loss": 0.3009, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.8312523647370411, |
|
"grad_norm": 0.745027520423352, |
|
"learning_rate": 4.2010487684511105e-07, |
|
"loss": 0.294, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.8342792281498297, |
|
"grad_norm": 0.6894148114552449, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.2704, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.8373060915626183, |
|
"grad_norm": 0.7134841002740187, |
|
"learning_rate": 3.9030404167542777e-07, |
|
"loss": 0.2892, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.8403329549754068, |
|
"grad_norm": 0.7236974491736105, |
|
"learning_rate": 3.7580662334929517e-07, |
|
"loss": 0.2958, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.8433598183881952, |
|
"grad_norm": 0.7193448989804936, |
|
"learning_rate": 3.615784144005796e-07, |
|
"loss": 0.2937, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.8463866818009838, |
|
"grad_norm": 0.7331648830759959, |
|
"learning_rate": 3.476198128228736e-07, |
|
"loss": 0.2847, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.849413545213772, |
|
"grad_norm": 0.722819568320747, |
|
"learning_rate": 3.339312090682689e-07, |
|
"loss": 0.284, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.8524404086265607, |
|
"grad_norm": 0.7216775663433448, |
|
"learning_rate": 3.2051298603643754e-07, |
|
"loss": 0.2918, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.8554672720393492, |
|
"grad_norm": 0.7306449607642955, |
|
"learning_rate": 3.0736551906392354e-07, |
|
"loss": 0.2953, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.8584941354521378, |
|
"grad_norm": 0.7087494584556927, |
|
"learning_rate": 2.9448917591363923e-07, |
|
"loss": 0.2769, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.8615209988649262, |
|
"grad_norm": 0.7240785756551431, |
|
"learning_rate": 2.818843167645835e-07, |
|
"loss": 0.2939, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.8645478622777147, |
|
"grad_norm": 0.74203055076884, |
|
"learning_rate": 2.6955129420176193e-07, |
|
"loss": 0.2898, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.867574725690503, |
|
"grad_norm": 0.678390021433273, |
|
"learning_rate": 2.5749045320632824e-07, |
|
"loss": 0.2841, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.8706015891032917, |
|
"grad_norm": 0.6787594902780316, |
|
"learning_rate": 2.4570213114592957e-07, |
|
"loss": 0.277, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.8736284525160802, |
|
"grad_norm": 0.7219609040084061, |
|
"learning_rate": 2.3418665776527738e-07, |
|
"loss": 0.2879, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.8766553159288688, |
|
"grad_norm": 0.7106681446272579, |
|
"learning_rate": 2.2294435517691504e-07, |
|
"loss": 0.2857, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.8796821793416572, |
|
"grad_norm": 0.7188388697911315, |
|
"learning_rate": 2.119755378522137e-07, |
|
"loss": 0.2853, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.8827090427544457, |
|
"grad_norm": 0.6622159018472675, |
|
"learning_rate": 2.0128051261257165e-07, |
|
"loss": 0.2779, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.885735906167234, |
|
"grad_norm": 0.6732961995385394, |
|
"learning_rate": 1.908595786208367e-07, |
|
"loss": 0.2794, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.8887627695800226, |
|
"grad_norm": 0.679031934034513, |
|
"learning_rate": 1.8071302737293294e-07, |
|
"loss": 0.2829, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.8917896329928112, |
|
"grad_norm": 0.7578341156815577, |
|
"learning_rate": 1.7084114268971275e-07, |
|
"loss": 0.2982, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.8948164964055998, |
|
"grad_norm": 0.6740770008653466, |
|
"learning_rate": 1.612442007090076e-07, |
|
"loss": 0.2661, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.8978433598183881, |
|
"grad_norm": 0.7349404222335727, |
|
"learning_rate": 1.519224698779198e-07, |
|
"loss": 0.29, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.9008702232311767, |
|
"grad_norm": 0.6952099114735816, |
|
"learning_rate": 1.4287621094529524e-07, |
|
"loss": 0.2873, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.903897086643965, |
|
"grad_norm": 0.7584641713321686, |
|
"learning_rate": 1.3410567695444576e-07, |
|
"loss": 0.3145, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.9069239500567536, |
|
"grad_norm": 0.7387079231650976, |
|
"learning_rate": 1.2561111323605714e-07, |
|
"loss": 0.2873, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.9099508134695422, |
|
"grad_norm": 0.7012378959130922, |
|
"learning_rate": 1.1739275740134004e-07, |
|
"loss": 0.2965, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.9129776768823308, |
|
"grad_norm": 0.6947694635862396, |
|
"learning_rate": 1.0945083933537104e-07, |
|
"loss": 0.2948, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.9160045402951191, |
|
"grad_norm": 0.6478719422405649, |
|
"learning_rate": 1.0178558119067316e-07, |
|
"loss": 0.2663, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.9190314037079077, |
|
"grad_norm": 0.7292975841241834, |
|
"learning_rate": 9.439719738099318e-08, |
|
"loss": 0.2809, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.922058267120696, |
|
"grad_norm": 0.807782247911084, |
|
"learning_rate": 8.728589457530857e-08, |
|
"loss": 0.2793, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.9250851305334846, |
|
"grad_norm": 0.7073896571692274, |
|
"learning_rate": 8.04518716920466e-08, |
|
"loss": 0.2938, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.9281119939462732, |
|
"grad_norm": 0.6824192050198555, |
|
"learning_rate": 7.389531989351773e-08, |
|
"loss": 0.2787, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.9311388573590618, |
|
"grad_norm": 0.7012292709178377, |
|
"learning_rate": 6.761642258056977e-08, |
|
"loss": 0.2771, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.9341657207718501, |
|
"grad_norm": 0.6894984431618938, |
|
"learning_rate": 6.161535538745877e-08, |
|
"loss": 0.2961, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.9371925841846387, |
|
"grad_norm": 0.7224592793153708, |
|
"learning_rate": 5.5892286176932875e-08, |
|
"loss": 0.2846, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.940219447597427, |
|
"grad_norm": 0.725541894026111, |
|
"learning_rate": 5.044737503554165e-08, |
|
"loss": 0.2968, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.9432463110102156, |
|
"grad_norm": 0.7301701298199866, |
|
"learning_rate": 4.528077426915412e-08, |
|
"loss": 0.2997, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.9462731744230042, |
|
"grad_norm": 0.7326633867231134, |
|
"learning_rate": 4.0392628398699954e-08, |
|
"loss": 0.2992, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.9493000378357928, |
|
"grad_norm": 0.6489686783548059, |
|
"learning_rate": 3.578307415612714e-08, |
|
"loss": 0.2641, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.952326901248581, |
|
"grad_norm": 0.716834783971763, |
|
"learning_rate": 3.1452240480577265e-08, |
|
"loss": 0.2847, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.9553537646613697, |
|
"grad_norm": 0.7330400518244846, |
|
"learning_rate": 2.7400248514776184e-08, |
|
"loss": 0.2992, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.958380628074158, |
|
"grad_norm": 0.7202874099506502, |
|
"learning_rate": 2.3627211601651157e-08, |
|
"loss": 0.2948, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.9614074914869466, |
|
"grad_norm": 0.7244818667531018, |
|
"learning_rate": 2.013323528115674e-08, |
|
"loss": 0.2995, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.9644343548997352, |
|
"grad_norm": 0.7170510897830359, |
|
"learning_rate": 1.6918417287318245e-08, |
|
"loss": 0.2766, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.9674612183125237, |
|
"grad_norm": 0.745124465740136, |
|
"learning_rate": 1.3982847545507271e-08, |
|
"loss": 0.2878, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.970488081725312, |
|
"grad_norm": 0.7134608984657206, |
|
"learning_rate": 1.1326608169920373e-08, |
|
"loss": 0.2832, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.9735149451381007, |
|
"grad_norm": 0.7244152345394852, |
|
"learning_rate": 8.949773461282008e-09, |
|
"loss": 0.2857, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.976541808550889, |
|
"grad_norm": 0.7091098185748254, |
|
"learning_rate": 6.8524099047695415e-09, |
|
"loss": 0.2896, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.9795686719636776, |
|
"grad_norm": 0.686131818846609, |
|
"learning_rate": 5.034576168149175e-09, |
|
"loss": 0.2816, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.9825955353764662, |
|
"grad_norm": 0.6952335372921985, |
|
"learning_rate": 3.4963231001383657e-09, |
|
"loss": 0.2826, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.9856223987892547, |
|
"grad_norm": 0.6975596218153316, |
|
"learning_rate": 2.237693728981416e-09, |
|
"loss": 0.2854, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.988649262202043, |
|
"grad_norm": 0.6863528342877395, |
|
"learning_rate": 1.2587232612493172e-09, |
|
"loss": 0.2833, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.9916761256148316, |
|
"grad_norm": 0.7135111717691293, |
|
"learning_rate": 5.594390808494332e-10, |
|
"loss": 0.3096, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.99470298902762, |
|
"grad_norm": 0.6841733668185142, |
|
"learning_rate": 1.3986074826388697e-10, |
|
"loss": 0.2786, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.9977298524404086, |
|
"grad_norm": 0.7265981551855979, |
|
"learning_rate": 0.0, |
|
"loss": 0.3022, |
|
"step": 660 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 660, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 69061717770240.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|