|
{ |
|
"best_metric": 1.942334532737732, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.03423192126658109, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00017115960633290543, |
|
"grad_norm": 0.3384851813316345, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 2.1057, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00017115960633290543, |
|
"eval_loss": 2.140085220336914, |
|
"eval_runtime": 252.4143, |
|
"eval_samples_per_second": 38.984, |
|
"eval_steps_per_second": 19.492, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00034231921266581085, |
|
"grad_norm": 0.3000636100769043, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.0679, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0005134788189987163, |
|
"grad_norm": 0.31949612498283386, |
|
"learning_rate": 1e-05, |
|
"loss": 2.2765, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0006846384253316217, |
|
"grad_norm": 0.2581222355365753, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.8811, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0008557980316645272, |
|
"grad_norm": 0.2666412889957428, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.021, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0010269576379974327, |
|
"grad_norm": 0.26130983233451843, |
|
"learning_rate": 2e-05, |
|
"loss": 1.8714, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0011981172443303381, |
|
"grad_norm": 0.22150737047195435, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.7815, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0013692768506632434, |
|
"grad_norm": 0.25391674041748047, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 2.0571, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0015404364569961489, |
|
"grad_norm": 0.2351103127002716, |
|
"learning_rate": 3e-05, |
|
"loss": 1.7819, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0017115960633290544, |
|
"grad_norm": 0.24356381595134735, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.9668, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0018827556696619598, |
|
"grad_norm": 0.25410857796669006, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 2.0877, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0020539152759948653, |
|
"grad_norm": 0.2615646719932556, |
|
"learning_rate": 4e-05, |
|
"loss": 2.0598, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.002225074882327771, |
|
"grad_norm": 0.24895967543125153, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 2.0459, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0023962344886606763, |
|
"grad_norm": 0.24129140377044678, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 2.1703, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0025673940949935813, |
|
"grad_norm": 0.21726980805397034, |
|
"learning_rate": 5e-05, |
|
"loss": 2.0914, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.002738553701326487, |
|
"grad_norm": 0.20076154172420502, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.9858, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0029097133076593923, |
|
"grad_norm": 0.15559637546539307, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.8849, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0030808729139922978, |
|
"grad_norm": 0.16304416954517365, |
|
"learning_rate": 6e-05, |
|
"loss": 2.1007, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0032520325203252032, |
|
"grad_norm": 0.15507805347442627, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 2.0215, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0034231921266581087, |
|
"grad_norm": 0.1489063948392868, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 2.0716, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.003594351732991014, |
|
"grad_norm": 0.15571531653404236, |
|
"learning_rate": 7e-05, |
|
"loss": 1.9169, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0037655113393239197, |
|
"grad_norm": 0.15557806193828583, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.996, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.003936670945656825, |
|
"grad_norm": 0.17348982393741608, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 2.0536, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.004107830551989731, |
|
"grad_norm": 0.16075366735458374, |
|
"learning_rate": 8e-05, |
|
"loss": 2.0326, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.004278990158322636, |
|
"grad_norm": 0.1705283671617508, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 2.0816, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.004450149764655542, |
|
"grad_norm": 0.1489923745393753, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.9582, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.004621309370988447, |
|
"grad_norm": 0.15770766139030457, |
|
"learning_rate": 9e-05, |
|
"loss": 2.1467, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.004792468977321353, |
|
"grad_norm": 0.13497239351272583, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.9728, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.004963628583654257, |
|
"grad_norm": 0.14810675382614136, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 2.0339, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.005134788189987163, |
|
"grad_norm": 0.12770140171051025, |
|
"learning_rate": 0.0001, |
|
"loss": 1.9108, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.005305947796320068, |
|
"grad_norm": 0.14340223371982574, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 1.9649, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.005477107402652974, |
|
"grad_norm": 0.13468272984027863, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.9385, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.005648267008985879, |
|
"grad_norm": 0.1366954743862152, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 1.9931, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.005819426615318785, |
|
"grad_norm": 0.12960107624530792, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 1.7814, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.00599058622165169, |
|
"grad_norm": 0.13072721660137177, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 1.9877, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0061617458279845955, |
|
"grad_norm": 0.14742650091648102, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 2.0895, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.006332905434317501, |
|
"grad_norm": 0.1375843733549118, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 1.9882, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0065040650406504065, |
|
"grad_norm": 0.1338900923728943, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 2.1235, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.006675224646983312, |
|
"grad_norm": 0.139565572142601, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 1.9161, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0068463842533162175, |
|
"grad_norm": 0.1471727341413498, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 1.8279, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.007017543859649123, |
|
"grad_norm": 0.14104895293712616, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 1.9517, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.007188703465982028, |
|
"grad_norm": 0.14945568144321442, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.8598, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.007359863072314934, |
|
"grad_norm": 0.16899332404136658, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 2.0511, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.007531022678647839, |
|
"grad_norm": 0.16674430668354034, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 2.0885, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.007702182284980745, |
|
"grad_norm": 0.1767071783542633, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 2.0406, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.00787334189131365, |
|
"grad_norm": 0.183282271027565, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 1.8779, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.008044501497646555, |
|
"grad_norm": 0.22200380265712738, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.8805, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.008215661103979461, |
|
"grad_norm": 0.2835330367088318, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 2.1045, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.008386820710312366, |
|
"grad_norm": 0.6013146638870239, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 2.1857, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.008557980316645272, |
|
"grad_norm": 0.7531317472457886, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 2.1909, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008557980316645272, |
|
"eval_loss": 2.0112826824188232, |
|
"eval_runtime": 253.1487, |
|
"eval_samples_per_second": 38.87, |
|
"eval_steps_per_second": 19.435, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.008729139922978177, |
|
"grad_norm": 0.24028153717517853, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 1.8849, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.008900299529311083, |
|
"grad_norm": 0.2163541167974472, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 1.9064, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.009071459135643988, |
|
"grad_norm": 0.1897958517074585, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 1.962, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.009242618741976894, |
|
"grad_norm": 0.1701580286026001, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 1.8102, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.009413778348309799, |
|
"grad_norm": 0.17404377460479736, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 1.7162, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.009584937954642705, |
|
"grad_norm": 0.16066402196884155, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 1.7332, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.00975609756097561, |
|
"grad_norm": 0.15609973669052124, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 1.7929, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.009927257167308514, |
|
"grad_norm": 0.15920859575271606, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 1.8153, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.01009841677364142, |
|
"grad_norm": 0.1596253216266632, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 1.8854, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.010269576379974325, |
|
"grad_norm": 0.136519655585289, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 1.9049, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.010440735986307232, |
|
"grad_norm": 0.1335257738828659, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 1.8594, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.010611895592640136, |
|
"grad_norm": 0.1406267285346985, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 2.0214, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.010783055198973043, |
|
"grad_norm": 0.11417517811059952, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 1.9628, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.010954214805305947, |
|
"grad_norm": 0.12227802723646164, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.9979, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.011125374411638854, |
|
"grad_norm": 0.11370483040809631, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 1.9422, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.011296534017971758, |
|
"grad_norm": 0.09835387021303177, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 1.8134, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.011467693624304665, |
|
"grad_norm": 0.11144676804542542, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 2.1119, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.01163885323063757, |
|
"grad_norm": 0.09735066443681717, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 1.827, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.011810012836970476, |
|
"grad_norm": 0.10068103671073914, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 1.8899, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.01198117244330338, |
|
"grad_norm": 0.10140778869390488, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 2.0218, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.012152332049636286, |
|
"grad_norm": 0.0990547463297844, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 1.981, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.012323491655969191, |
|
"grad_norm": 0.1076335459947586, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 1.9312, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.012494651262302097, |
|
"grad_norm": 0.11169704794883728, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 1.9669, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.012665810868635002, |
|
"grad_norm": 0.10578777641057968, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 1.8923, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.012836970474967908, |
|
"grad_norm": 0.1006617322564125, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 1.9088, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.013008130081300813, |
|
"grad_norm": 0.11208891868591309, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 1.9875, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.013179289687633718, |
|
"grad_norm": 0.1086820513010025, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 1.979, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.013350449293966624, |
|
"grad_norm": 0.11209630221128464, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 2.0093, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.013521608900299529, |
|
"grad_norm": 0.1047407016158104, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 2.0194, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.013692768506632435, |
|
"grad_norm": 0.10114631056785583, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 1.9117, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01386392811296534, |
|
"grad_norm": 0.10660432279109955, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.1128, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.014035087719298246, |
|
"grad_norm": 0.10562874376773834, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.819, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.01420624732563115, |
|
"grad_norm": 0.1116420328617096, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 2.0161, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.014377406931964057, |
|
"grad_norm": 0.1135152131319046, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 1.9062, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.014548566538296961, |
|
"grad_norm": 0.11426131427288055, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 1.8512, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.014719726144629868, |
|
"grad_norm": 0.11805808544158936, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 1.862, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.014890885750962772, |
|
"grad_norm": 0.11407601833343506, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 1.9902, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.015062045357295679, |
|
"grad_norm": 0.12108225375413895, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 2.0695, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.015233204963628583, |
|
"grad_norm": 0.1370498389005661, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 1.9937, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.01540436456996149, |
|
"grad_norm": 0.13261045515537262, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 1.9162, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.015575524176294394, |
|
"grad_norm": 0.13481606543064117, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 1.7693, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0157466837826273, |
|
"grad_norm": 0.13738499581813812, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 1.9906, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.015917843388960205, |
|
"grad_norm": 0.14677368104457855, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 1.6928, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.01608900299529311, |
|
"grad_norm": 0.16111257672309875, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 1.9891, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.016260162601626018, |
|
"grad_norm": 0.1692948341369629, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 1.8375, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.016431322207958923, |
|
"grad_norm": 0.21693989634513855, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 1.9869, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.016602481814291827, |
|
"grad_norm": 0.23700051009655, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 2.1276, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.016773641420624732, |
|
"grad_norm": 0.29915517568588257, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 2.3728, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.016944801026957636, |
|
"grad_norm": 0.4070492386817932, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 2.1064, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.017115960633290545, |
|
"grad_norm": 0.7625799775123596, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 2.4645, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.017115960633290545, |
|
"eval_loss": 1.9608962535858154, |
|
"eval_runtime": 254.4528, |
|
"eval_samples_per_second": 38.671, |
|
"eval_steps_per_second": 19.336, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01728712023962345, |
|
"grad_norm": 0.12267659604549408, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 1.8221, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.017458279845956354, |
|
"grad_norm": 0.11667729169130325, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 1.8163, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.01762943945228926, |
|
"grad_norm": 0.1297779083251953, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 1.8252, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.017800599058622166, |
|
"grad_norm": 0.12183117121458054, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 1.79, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.01797175866495507, |
|
"grad_norm": 0.11510883271694183, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 1.7359, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.018142918271287976, |
|
"grad_norm": 0.11487369984388351, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 1.8322, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.01831407787762088, |
|
"grad_norm": 0.12407288700342178, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 1.9415, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.01848523748395379, |
|
"grad_norm": 0.11880673468112946, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 1.7991, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.018656397090286693, |
|
"grad_norm": 0.12787018716335297, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 1.9157, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.018827556696619598, |
|
"grad_norm": 0.12218679487705231, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 1.843, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.018998716302952502, |
|
"grad_norm": 0.1273295134305954, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 1.9732, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.01916987590928541, |
|
"grad_norm": 0.1331591010093689, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 1.9346, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.019341035515618315, |
|
"grad_norm": 0.1283877044916153, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 1.9575, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.01951219512195122, |
|
"grad_norm": 0.11732184886932373, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 1.8368, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.019683354728284124, |
|
"grad_norm": 0.1036926805973053, |
|
"learning_rate": 5e-05, |
|
"loss": 1.7971, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.01985451433461703, |
|
"grad_norm": 0.12696321308612823, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 1.987, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.020025673940949937, |
|
"grad_norm": 0.12035303562879562, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 2.05, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.02019683354728284, |
|
"grad_norm": 0.09441588819026947, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 1.7966, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.020367993153615746, |
|
"grad_norm": 0.1084362342953682, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 1.9896, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.02053915275994865, |
|
"grad_norm": 0.10377444326877594, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 1.9442, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.02071031236628156, |
|
"grad_norm": 0.09699568152427673, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 1.8642, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.020881471972614463, |
|
"grad_norm": 0.10071204602718353, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 1.9961, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.021052631578947368, |
|
"grad_norm": 0.0921294167637825, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 1.8438, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.021223791185280273, |
|
"grad_norm": 0.10018813610076904, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 1.9538, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.02139495079161318, |
|
"grad_norm": 0.09550834447145462, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 1.8572, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.021566110397946085, |
|
"grad_norm": 0.1069275438785553, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 2.0335, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.02173727000427899, |
|
"grad_norm": 0.1051395982503891, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 2.0374, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.021908429610611894, |
|
"grad_norm": 0.10209425538778305, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 1.9306, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.022079589216944803, |
|
"grad_norm": 0.09854423254728317, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 1.9756, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.022250748823277707, |
|
"grad_norm": 0.10003539174795151, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 1.9646, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.022421908429610612, |
|
"grad_norm": 0.10941645503044128, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 2.062, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.022593068035943516, |
|
"grad_norm": 0.1019466444849968, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 1.9037, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.022764227642276424, |
|
"grad_norm": 0.10443319380283356, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 2.0101, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.02293538724860933, |
|
"grad_norm": 0.10645749419927597, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 1.9192, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.023106546854942234, |
|
"grad_norm": 0.10710829496383667, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 1.8548, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.02327770646127514, |
|
"grad_norm": 0.11624782532453537, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 1.9397, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.023448866067608043, |
|
"grad_norm": 0.12152379751205444, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 1.971, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.02362002567394095, |
|
"grad_norm": 0.12296583503484726, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 1.8179, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.023791185280273856, |
|
"grad_norm": 0.1308358758687973, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 2.0566, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.02396234488660676, |
|
"grad_norm": 0.12238892912864685, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 2.0106, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.024133504492939665, |
|
"grad_norm": 0.14263762533664703, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 1.8094, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.024304664099272573, |
|
"grad_norm": 0.16151531040668488, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 1.7815, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.024475823705605478, |
|
"grad_norm": 0.17212718725204468, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 1.977, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.024646983311938382, |
|
"grad_norm": 0.16314253211021423, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 1.8695, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.024818142918271287, |
|
"grad_norm": 0.1866285353899002, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 1.8092, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.024989302524604195, |
|
"grad_norm": 0.19816122949123383, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 1.9085, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0251604621309371, |
|
"grad_norm": 0.2647472023963928, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 2.0202, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.025331621737270004, |
|
"grad_norm": 0.37373101711273193, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 2.1299, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.02550278134360291, |
|
"grad_norm": 0.43742233514785767, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 2.0061, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.025673940949935817, |
|
"grad_norm": 0.7022097110748291, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 2.3952, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.025673940949935817, |
|
"eval_loss": 1.9427156448364258, |
|
"eval_runtime": 254.3821, |
|
"eval_samples_per_second": 38.682, |
|
"eval_steps_per_second": 19.341, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.02584510055626872, |
|
"grad_norm": 0.11141515523195267, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 2.0646, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.026016260162601626, |
|
"grad_norm": 0.10801205039024353, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 2.085, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.02618741976893453, |
|
"grad_norm": 0.09455177932977676, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 1.9206, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.026358579375267435, |
|
"grad_norm": 0.08838347345590591, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 1.8645, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.026529738981600343, |
|
"grad_norm": 0.08785195648670197, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 1.7189, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.026700898587933248, |
|
"grad_norm": 0.09213719516992569, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 1.6594, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.026872058194266153, |
|
"grad_norm": 0.09761328995227814, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 1.8997, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.027043217800599057, |
|
"grad_norm": 0.09117046743631363, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 1.8121, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.027214377406931965, |
|
"grad_norm": 0.08685458451509476, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 1.8085, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.02738553701326487, |
|
"grad_norm": 0.09400554746389389, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 1.859, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.027556696619597774, |
|
"grad_norm": 0.0903010442852974, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 1.8646, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.02772785622593068, |
|
"grad_norm": 0.09318774938583374, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 1.9487, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.027899015832263587, |
|
"grad_norm": 0.08630430698394775, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 1.7142, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.028070175438596492, |
|
"grad_norm": 0.09236445277929306, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 1.8608, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.028241335044929396, |
|
"grad_norm": 0.10190917551517487, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 2.0014, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0284124946512623, |
|
"grad_norm": 0.09969731420278549, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 1.8265, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.02858365425759521, |
|
"grad_norm": 0.09843458980321884, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 1.8552, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.028754813863928114, |
|
"grad_norm": 0.0932171493768692, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 1.8805, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.02892597347026102, |
|
"grad_norm": 0.09193925559520721, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 1.8708, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.029097133076593923, |
|
"grad_norm": 0.101052425801754, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 1.9583, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.02926829268292683, |
|
"grad_norm": 0.09988513588905334, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 1.9813, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.029439452289259736, |
|
"grad_norm": 0.09843932092189789, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 1.9067, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.02961061189559264, |
|
"grad_norm": 0.0977838858962059, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 1.8711, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.029781771501925545, |
|
"grad_norm": 0.10626049339771271, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 2.0638, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.02995293110825845, |
|
"grad_norm": 0.09931275993585587, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 1.8416, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.030124090714591358, |
|
"grad_norm": 0.10600997507572174, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 1.9066, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.030295250320924262, |
|
"grad_norm": 0.10138809680938721, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 1.7798, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.030466409927257167, |
|
"grad_norm": 0.10285711288452148, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 1.9305, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.03063756953359007, |
|
"grad_norm": 0.10435103625059128, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 1.7811, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.03080872913992298, |
|
"grad_norm": 0.10847938060760498, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 1.9549, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.030979888746255884, |
|
"grad_norm": 0.10423749685287476, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 1.7917, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.03115104835258879, |
|
"grad_norm": 0.11017204076051712, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 1.9272, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.0313222079589217, |
|
"grad_norm": 0.11599629372358322, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 1.8393, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0314933675652546, |
|
"grad_norm": 0.10840439796447754, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 1.9099, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.031664527171587506, |
|
"grad_norm": 0.12284722179174423, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 2.0558, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.03183568677792041, |
|
"grad_norm": 0.11635725945234299, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 1.9249, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.032006846384253315, |
|
"grad_norm": 0.1333126574754715, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 1.9864, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.03217800599058622, |
|
"grad_norm": 0.13437993824481964, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 1.9203, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.032349165596919124, |
|
"grad_norm": 0.12741129100322723, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 1.8627, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.032520325203252036, |
|
"grad_norm": 0.13912999629974365, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 1.8976, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.03269148480958494, |
|
"grad_norm": 0.1495036631822586, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 1.9268, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.032862644415917845, |
|
"grad_norm": 0.14511996507644653, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 1.9895, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.03303380402225075, |
|
"grad_norm": 0.1583525389432907, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 2.0931, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.033204963628583654, |
|
"grad_norm": 0.16893316805362701, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 1.9645, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.03337612323491656, |
|
"grad_norm": 0.19505704939365387, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 1.6557, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.033547282841249464, |
|
"grad_norm": 0.20835180580615997, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 1.7806, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.03371844244758237, |
|
"grad_norm": 0.23454757034778595, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 1.9318, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.03388960205391527, |
|
"grad_norm": 0.2649778425693512, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 1.9983, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.034060761660248184, |
|
"grad_norm": 0.3988756537437439, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 2.2859, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.03423192126658109, |
|
"grad_norm": 0.5916358828544617, |
|
"learning_rate": 0.0, |
|
"loss": 2.5445, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.03423192126658109, |
|
"eval_loss": 1.942334532737732, |
|
"eval_runtime": 253.457, |
|
"eval_samples_per_second": 38.823, |
|
"eval_steps_per_second": 19.412, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7688467500761088.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|