|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9990771103736721, |
|
"eval_steps": 50, |
|
"global_step": 795, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012567007677656253, |
|
"grad_norm": 18.75091794627999, |
|
"learning_rate": 2.5000000000000004e-07, |
|
"loss": 0.7624, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0025134015355312506, |
|
"grad_norm": 17.52654411272103, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 0.7516, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.003770102303296876, |
|
"grad_norm": 16.514255723157607, |
|
"learning_rate": 7.5e-07, |
|
"loss": 0.7638, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005026803071062501, |
|
"grad_norm": 15.863246140843936, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 0.7628, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006283503838828127, |
|
"grad_norm": 16.009485294186245, |
|
"learning_rate": 1.25e-06, |
|
"loss": 0.6841, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.007540204606593752, |
|
"grad_norm": 12.359798457900594, |
|
"learning_rate": 1.5e-06, |
|
"loss": 0.6439, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.008796905374359378, |
|
"grad_norm": 7.382396803359724, |
|
"learning_rate": 1.75e-06, |
|
"loss": 0.6556, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010053606142125002, |
|
"grad_norm": 9.902433893820467, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 0.5683, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011310306909890627, |
|
"grad_norm": 5.336530839540533, |
|
"learning_rate": 2.25e-06, |
|
"loss": 0.5813, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012567007677656254, |
|
"grad_norm": 4.96442690704384, |
|
"learning_rate": 2.5e-06, |
|
"loss": 0.5555, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013823708445421879, |
|
"grad_norm": 4.049027389723904, |
|
"learning_rate": 2.7500000000000004e-06, |
|
"loss": 0.5449, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.015080409213187504, |
|
"grad_norm": 2.9876344466981175, |
|
"learning_rate": 3e-06, |
|
"loss": 0.5739, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01633710998095313, |
|
"grad_norm": 3.5573781991080535, |
|
"learning_rate": 3.2500000000000002e-06, |
|
"loss": 0.5489, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.017593810748718755, |
|
"grad_norm": 2.8413983357245605, |
|
"learning_rate": 3.5e-06, |
|
"loss": 0.5336, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01885051151648438, |
|
"grad_norm": 3.025149469387222, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 0.6036, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.020107212284250005, |
|
"grad_norm": 3.220040516331875, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 0.5046, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.02136391305201563, |
|
"grad_norm": 2.783851178663962, |
|
"learning_rate": 4.25e-06, |
|
"loss": 0.5365, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022620613819781255, |
|
"grad_norm": 2.897929267846762, |
|
"learning_rate": 4.5e-06, |
|
"loss": 0.5198, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02387731458754688, |
|
"grad_norm": 2.506735216769208, |
|
"learning_rate": 4.75e-06, |
|
"loss": 0.5094, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.025134015355312508, |
|
"grad_norm": 2.565147056156479, |
|
"learning_rate": 5e-06, |
|
"loss": 0.5352, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.026390716123078133, |
|
"grad_norm": 2.516902243933518, |
|
"learning_rate": 5.2500000000000006e-06, |
|
"loss": 0.5026, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.027647416890843757, |
|
"grad_norm": 2.7534715241704832, |
|
"learning_rate": 5.500000000000001e-06, |
|
"loss": 0.5186, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.028904117658609382, |
|
"grad_norm": 2.9905067284933606, |
|
"learning_rate": 5.75e-06, |
|
"loss": 0.5913, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.030160818426375007, |
|
"grad_norm": 2.3261883764698608, |
|
"learning_rate": 6e-06, |
|
"loss": 0.5275, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03141751919414063, |
|
"grad_norm": 2.2175719666441704, |
|
"learning_rate": 6.25e-06, |
|
"loss": 0.4986, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03267421996190626, |
|
"grad_norm": 2.499813549718401, |
|
"learning_rate": 6.5000000000000004e-06, |
|
"loss": 0.5014, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03393092072967188, |
|
"grad_norm": 2.217010030707422, |
|
"learning_rate": 6.750000000000001e-06, |
|
"loss": 0.5205, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03518762149743751, |
|
"grad_norm": 2.30544152290609, |
|
"learning_rate": 7e-06, |
|
"loss": 0.5144, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03644432226520313, |
|
"grad_norm": 2.322128869370069, |
|
"learning_rate": 7.25e-06, |
|
"loss": 0.583, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.03770102303296876, |
|
"grad_norm": 2.326781634174622, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.4918, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03895772380073438, |
|
"grad_norm": 2.4674314043738943, |
|
"learning_rate": 7.75e-06, |
|
"loss": 0.5194, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.04021442456850001, |
|
"grad_norm": 2.1812796298312707, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 0.5206, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04147112533626564, |
|
"grad_norm": 2.1549349105457836, |
|
"learning_rate": 8.25e-06, |
|
"loss": 0.5916, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04272782610403126, |
|
"grad_norm": 2.199454313573332, |
|
"learning_rate": 8.5e-06, |
|
"loss": 0.5276, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04398452687179689, |
|
"grad_norm": 2.1700798684465537, |
|
"learning_rate": 8.750000000000001e-06, |
|
"loss": 0.5261, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04524122763956251, |
|
"grad_norm": 2.0007451576632858, |
|
"learning_rate": 9e-06, |
|
"loss": 0.528, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.04649792840732814, |
|
"grad_norm": 2.1400416338070367, |
|
"learning_rate": 9.250000000000001e-06, |
|
"loss": 0.5444, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04775462917509376, |
|
"grad_norm": 2.403488357912535, |
|
"learning_rate": 9.5e-06, |
|
"loss": 0.5155, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04901132994285939, |
|
"grad_norm": 3.009219784165261, |
|
"learning_rate": 9.75e-06, |
|
"loss": 0.5153, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.050268030710625015, |
|
"grad_norm": 2.142435686354672, |
|
"learning_rate": 1e-05, |
|
"loss": 0.493, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05152473147839064, |
|
"grad_norm": 2.239870156543961, |
|
"learning_rate": 1.025e-05, |
|
"loss": 0.5078, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.052781432246156265, |
|
"grad_norm": 2.6345987571093086, |
|
"learning_rate": 1.0500000000000001e-05, |
|
"loss": 0.4873, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05403813301392189, |
|
"grad_norm": 1.9418390969701194, |
|
"learning_rate": 1.075e-05, |
|
"loss": 0.4965, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.055294833781687515, |
|
"grad_norm": 2.6081247589601193, |
|
"learning_rate": 1.1000000000000001e-05, |
|
"loss": 0.5281, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.056551534549453136, |
|
"grad_norm": 2.0666878694041433, |
|
"learning_rate": 1.125e-05, |
|
"loss": 0.5448, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.057808235317218765, |
|
"grad_norm": 2.404619928095954, |
|
"learning_rate": 1.15e-05, |
|
"loss": 0.4765, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.059064936084984386, |
|
"grad_norm": 2.0576801046013724, |
|
"learning_rate": 1.1750000000000001e-05, |
|
"loss": 0.5118, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.060321636852750014, |
|
"grad_norm": 2.3318752319695717, |
|
"learning_rate": 1.2e-05, |
|
"loss": 0.5178, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06157833762051564, |
|
"grad_norm": 2.1469466151657293, |
|
"learning_rate": 1.2250000000000001e-05, |
|
"loss": 0.5127, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06283503838828126, |
|
"grad_norm": 2.122786996441985, |
|
"learning_rate": 1.25e-05, |
|
"loss": 0.508, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06283503838828126, |
|
"eval_loss": NaN, |
|
"eval_runtime": 399.176, |
|
"eval_samples_per_second": 20.83, |
|
"eval_steps_per_second": 2.605, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06409173915604689, |
|
"grad_norm": 2.1375097174172617, |
|
"learning_rate": 1.275e-05, |
|
"loss": 0.5397, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.06534843992381252, |
|
"grad_norm": 2.106065428482765, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 0.4904, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.06660514069157814, |
|
"grad_norm": 2.0188566811543973, |
|
"learning_rate": 1.325e-05, |
|
"loss": 0.5276, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06786184145934376, |
|
"grad_norm": 2.243686294713883, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 0.604, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06911854222710939, |
|
"grad_norm": 2.195249183257568, |
|
"learning_rate": 1.375e-05, |
|
"loss": 0.4981, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07037524299487502, |
|
"grad_norm": 2.192582683330822, |
|
"learning_rate": 1.4e-05, |
|
"loss": 0.5028, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07163194376264065, |
|
"grad_norm": 2.0627327989918185, |
|
"learning_rate": 1.425e-05, |
|
"loss": 0.5125, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07288864453040626, |
|
"grad_norm": 1.872712329188738, |
|
"learning_rate": 1.45e-05, |
|
"loss": 0.508, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07414534529817189, |
|
"grad_norm": 2.2251336046889008, |
|
"learning_rate": 1.4750000000000003e-05, |
|
"loss": 0.5025, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07540204606593752, |
|
"grad_norm": 1.974200197833877, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 0.5279, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.07665874683370315, |
|
"grad_norm": 1.938388866679753, |
|
"learning_rate": 1.525e-05, |
|
"loss": 0.5284, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07791544760146876, |
|
"grad_norm": 1.9081681403522774, |
|
"learning_rate": 1.55e-05, |
|
"loss": 0.587, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07917214836923439, |
|
"grad_norm": 1.86283059738371, |
|
"learning_rate": 1.575e-05, |
|
"loss": 0.4899, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.08042884913700002, |
|
"grad_norm": 1.947746623023955, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 0.5527, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08168554990476565, |
|
"grad_norm": 1.9186877361719825, |
|
"learning_rate": 1.6250000000000002e-05, |
|
"loss": 0.4835, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08294225067253128, |
|
"grad_norm": 2.044575571724766, |
|
"learning_rate": 1.65e-05, |
|
"loss": 0.5214, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08419895144029689, |
|
"grad_norm": 2.1551282978146578, |
|
"learning_rate": 1.675e-05, |
|
"loss": 0.5338, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08545565220806252, |
|
"grad_norm": 1.9391411629335615, |
|
"learning_rate": 1.7e-05, |
|
"loss": 0.5632, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08671235297582815, |
|
"grad_norm": 1.8837181224290052, |
|
"learning_rate": 1.7250000000000003e-05, |
|
"loss": 0.6073, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.08796905374359378, |
|
"grad_norm": 2.234034052091539, |
|
"learning_rate": 1.7500000000000002e-05, |
|
"loss": 0.5692, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0892257545113594, |
|
"grad_norm": 1.8949651791435709, |
|
"learning_rate": 1.775e-05, |
|
"loss": 0.5247, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09048245527912502, |
|
"grad_norm": 1.7148270660492497, |
|
"learning_rate": 1.8e-05, |
|
"loss": 0.5223, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09173915604689065, |
|
"grad_norm": 1.9445592195454198, |
|
"learning_rate": 1.825e-05, |
|
"loss": 0.5089, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09299585681465627, |
|
"grad_norm": 1.8265253877418266, |
|
"learning_rate": 1.8500000000000002e-05, |
|
"loss": 0.5236, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.0942525575824219, |
|
"grad_norm": 1.8233199917702092, |
|
"learning_rate": 1.8750000000000002e-05, |
|
"loss": 0.5649, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09550925835018752, |
|
"grad_norm": 2.2269645461783405, |
|
"learning_rate": 1.9e-05, |
|
"loss": 0.5303, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09676595911795315, |
|
"grad_norm": 1.8067805055645487, |
|
"learning_rate": 1.925e-05, |
|
"loss": 0.5763, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.09802265988571877, |
|
"grad_norm": 2.1573534367398364, |
|
"learning_rate": 1.95e-05, |
|
"loss": 0.5314, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0992793606534844, |
|
"grad_norm": 2.0382764916384546, |
|
"learning_rate": 1.9750000000000002e-05, |
|
"loss": 0.566, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10053606142125003, |
|
"grad_norm": 1.8481169886281565, |
|
"learning_rate": 2e-05, |
|
"loss": 0.519, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.10179276218901565, |
|
"grad_norm": 2.0250286194462577, |
|
"learning_rate": 1.9999903471186634e-05, |
|
"loss": 0.5109, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10304946295678127, |
|
"grad_norm": 1.9865218436602592, |
|
"learning_rate": 1.9999613886610097e-05, |
|
"loss": 0.5131, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.1043061637245469, |
|
"grad_norm": 2.146087026568179, |
|
"learning_rate": 1.9999131251861037e-05, |
|
"loss": 0.5479, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10556286449231253, |
|
"grad_norm": 1.883855000830492, |
|
"learning_rate": 1.999845557625709e-05, |
|
"loss": 0.5515, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10681956526007814, |
|
"grad_norm": 1.9686030378439734, |
|
"learning_rate": 1.9997586872842683e-05, |
|
"loss": 0.527, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10807626602784377, |
|
"grad_norm": 2.36575606473121, |
|
"learning_rate": 1.9996525158388804e-05, |
|
"loss": 0.5402, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1093329667956094, |
|
"grad_norm": 1.7575993387464999, |
|
"learning_rate": 1.999527045339266e-05, |
|
"loss": 0.5285, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11058966756337503, |
|
"grad_norm": 2.385054737158702, |
|
"learning_rate": 1.9993822782077282e-05, |
|
"loss": 0.5904, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.11184636833114066, |
|
"grad_norm": 1.9263541895607634, |
|
"learning_rate": 1.999218217239108e-05, |
|
"loss": 0.5315, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11310306909890627, |
|
"grad_norm": 1.985590164700294, |
|
"learning_rate": 1.999034865600726e-05, |
|
"loss": 0.5356, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1143597698666719, |
|
"grad_norm": 1.8572569893256032, |
|
"learning_rate": 1.998832226832327e-05, |
|
"loss": 0.5492, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11561647063443753, |
|
"grad_norm": 2.0556913179235305, |
|
"learning_rate": 1.9986103048460056e-05, |
|
"loss": 0.5274, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11687317140220316, |
|
"grad_norm": 1.7389370827636832, |
|
"learning_rate": 1.9983691039261358e-05, |
|
"loss": 0.5498, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11812987216996877, |
|
"grad_norm": 1.8846320386596898, |
|
"learning_rate": 1.9981086287292853e-05, |
|
"loss": 0.6494, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1193865729377344, |
|
"grad_norm": 1.6471533089571895, |
|
"learning_rate": 1.997828884284126e-05, |
|
"loss": 0.5245, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.12064327370550003, |
|
"grad_norm": 1.993702988982152, |
|
"learning_rate": 1.9975298759913382e-05, |
|
"loss": 0.5261, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.12189997447326566, |
|
"grad_norm": 1.74703558113797, |
|
"learning_rate": 1.9972116096235047e-05, |
|
"loss": 0.5069, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.12315667524103129, |
|
"grad_norm": 1.8912653823700982, |
|
"learning_rate": 1.996874091325001e-05, |
|
"loss": 0.5378, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.1244133760087969, |
|
"grad_norm": 1.9270886670287957, |
|
"learning_rate": 1.9965173276118747e-05, |
|
"loss": 0.513, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.12567007677656253, |
|
"grad_norm": 1.9246733227266641, |
|
"learning_rate": 1.9961413253717214e-05, |
|
"loss": 0.5371, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12567007677656253, |
|
"eval_loss": NaN, |
|
"eval_runtime": 385.6745, |
|
"eval_samples_per_second": 21.56, |
|
"eval_steps_per_second": 2.697, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.12692677754432816, |
|
"grad_norm": 1.7630213216746193, |
|
"learning_rate": 1.9957460918635513e-05, |
|
"loss": 0.495, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.12818347831209378, |
|
"grad_norm": 1.676584473649907, |
|
"learning_rate": 1.995331634717649e-05, |
|
"loss": 0.5571, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1294401790798594, |
|
"grad_norm": 1.7393851557554054, |
|
"learning_rate": 1.9948979619354253e-05, |
|
"loss": 0.5263, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.13069687984762504, |
|
"grad_norm": 1.5715249917558214, |
|
"learning_rate": 1.994445081889264e-05, |
|
"loss": 0.586, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.13195358061539067, |
|
"grad_norm": 1.7638743676149398, |
|
"learning_rate": 1.99397300332236e-05, |
|
"loss": 0.5371, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.13321028138315627, |
|
"grad_norm": 1.7333353036478043, |
|
"learning_rate": 1.99348173534855e-05, |
|
"loss": 0.5144, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1344669821509219, |
|
"grad_norm": 1.6032418727100688, |
|
"learning_rate": 1.9929712874521375e-05, |
|
"loss": 0.5037, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.13572368291868753, |
|
"grad_norm": 1.6332164064062114, |
|
"learning_rate": 1.9924416694877077e-05, |
|
"loss": 0.5427, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.13698038368645316, |
|
"grad_norm": 1.509888795711472, |
|
"learning_rate": 1.9918928916799395e-05, |
|
"loss": 0.5748, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.13823708445421878, |
|
"grad_norm": 1.7081555607417591, |
|
"learning_rate": 1.9913249646234072e-05, |
|
"loss": 0.5201, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1394937852219844, |
|
"grad_norm": 1.5103689076557207, |
|
"learning_rate": 1.9907378992823755e-05, |
|
"loss": 0.5313, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.14075048598975004, |
|
"grad_norm": 1.6174363391736, |
|
"learning_rate": 1.990131706990589e-05, |
|
"loss": 0.5414, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.14200718675751567, |
|
"grad_norm": 1.5406431804458636, |
|
"learning_rate": 1.9895063994510512e-05, |
|
"loss": 0.5183, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.1432638875252813, |
|
"grad_norm": 1.5953704972891425, |
|
"learning_rate": 1.9888619887358024e-05, |
|
"loss": 0.5036, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1445205882930469, |
|
"grad_norm": 1.6126627995125455, |
|
"learning_rate": 1.988198487285682e-05, |
|
"loss": 0.5268, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.14577728906081253, |
|
"grad_norm": 1.5872606292238929, |
|
"learning_rate": 1.9875159079100917e-05, |
|
"loss": 0.5404, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.14703398982857815, |
|
"grad_norm": 1.4997817516498149, |
|
"learning_rate": 1.9868142637867474e-05, |
|
"loss": 0.4942, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.14829069059634378, |
|
"grad_norm": 1.5577686391641898, |
|
"learning_rate": 1.9860935684614235e-05, |
|
"loss": 0.6059, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.1495473913641094, |
|
"grad_norm": 1.562098718613041, |
|
"learning_rate": 1.9853538358476933e-05, |
|
"loss": 0.5772, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.15080409213187504, |
|
"grad_norm": 1.4588688366756248, |
|
"learning_rate": 1.9845950802266584e-05, |
|
"loss": 0.5652, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.15206079289964067, |
|
"grad_norm": 1.4789008945019497, |
|
"learning_rate": 1.983817316246676e-05, |
|
"loss": 0.5258, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.1533174936674063, |
|
"grad_norm": 1.6321791739327218, |
|
"learning_rate": 1.9830205589230713e-05, |
|
"loss": 0.5833, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.15457419443517192, |
|
"grad_norm": 1.4311215238919617, |
|
"learning_rate": 1.9822048236378536e-05, |
|
"loss": 0.5357, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.15583089520293752, |
|
"grad_norm": 1.4711622354911698, |
|
"learning_rate": 1.9813701261394136e-05, |
|
"loss": 0.5412, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.15708759597070315, |
|
"grad_norm": 1.561014461052019, |
|
"learning_rate": 1.980516482542224e-05, |
|
"loss": 0.5579, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.15834429673846878, |
|
"grad_norm": 1.5602653545497562, |
|
"learning_rate": 1.9796439093265245e-05, |
|
"loss": 0.5243, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1596009975062344, |
|
"grad_norm": 1.533314694761247, |
|
"learning_rate": 1.9787524233380076e-05, |
|
"loss": 0.525, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.16085769827400004, |
|
"grad_norm": 1.557676346471582, |
|
"learning_rate": 1.9778420417874894e-05, |
|
"loss": 0.5242, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.16211439904176567, |
|
"grad_norm": 1.5308931674522483, |
|
"learning_rate": 1.9769127822505805e-05, |
|
"loss": 0.6101, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1633710998095313, |
|
"grad_norm": 1.588409977024727, |
|
"learning_rate": 1.9759646626673445e-05, |
|
"loss": 0.5748, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.16462780057729692, |
|
"grad_norm": 1.5825263108270393, |
|
"learning_rate": 1.9749977013419536e-05, |
|
"loss": 0.5317, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.16588450134506255, |
|
"grad_norm": 1.3805626708260326, |
|
"learning_rate": 1.9740119169423337e-05, |
|
"loss": 0.5818, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.16714120211282815, |
|
"grad_norm": 18.459816671434435, |
|
"learning_rate": 1.973007328499804e-05, |
|
"loss": 0.5388, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.16839790288059378, |
|
"grad_norm": 1.8855793357632873, |
|
"learning_rate": 1.9719839554087108e-05, |
|
"loss": 0.623, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1696546036483594, |
|
"grad_norm": 1.542977480320436, |
|
"learning_rate": 1.9709418174260523e-05, |
|
"loss": 0.5248, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.17091130441612504, |
|
"grad_norm": 1.6917497316960342, |
|
"learning_rate": 1.9698809346710965e-05, |
|
"loss": 0.5624, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.17216800518389067, |
|
"grad_norm": 1.6100252189793525, |
|
"learning_rate": 1.9688013276249947e-05, |
|
"loss": 0.553, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1734247059516563, |
|
"grad_norm": 1.5825712812861108, |
|
"learning_rate": 1.9677030171303842e-05, |
|
"loss": 0.5645, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.17468140671942192, |
|
"grad_norm": 1.6638750857270868, |
|
"learning_rate": 1.966586024390986e-05, |
|
"loss": 0.5349, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.17593810748718755, |
|
"grad_norm": 1.5448265769685363, |
|
"learning_rate": 1.9654503709711984e-05, |
|
"loss": 0.534, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.17719480825495318, |
|
"grad_norm": 1.605020502262543, |
|
"learning_rate": 1.964296078795675e-05, |
|
"loss": 0.5553, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1784515090227188, |
|
"grad_norm": 1.527407593624494, |
|
"learning_rate": 1.9631231701489083e-05, |
|
"loss": 0.5298, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1797082097904844, |
|
"grad_norm": 1.5204305071770103, |
|
"learning_rate": 1.9619316676747928e-05, |
|
"loss": 0.5212, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.18096491055825004, |
|
"grad_norm": 1.5980803059820479, |
|
"learning_rate": 1.9607215943761933e-05, |
|
"loss": 0.5611, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.18222161132601566, |
|
"grad_norm": 2.3142581133230986, |
|
"learning_rate": 1.9594929736144978e-05, |
|
"loss": 0.536, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1834783120937813, |
|
"grad_norm": 1.5920429437394876, |
|
"learning_rate": 1.9582458291091664e-05, |
|
"loss": 0.5363, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.18473501286154692, |
|
"grad_norm": 1.4799465577039153, |
|
"learning_rate": 1.9569801849372757e-05, |
|
"loss": 0.5093, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.18599171362931255, |
|
"grad_norm": 1.5040620797412507, |
|
"learning_rate": 1.9556960655330512e-05, |
|
"loss": 0.5755, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.18724841439707818, |
|
"grad_norm": 1.5753521873652412, |
|
"learning_rate": 1.954393495687398e-05, |
|
"loss": 0.5408, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.1885051151648438, |
|
"grad_norm": 1.436217689781534, |
|
"learning_rate": 1.9530725005474195e-05, |
|
"loss": 0.5908, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1885051151648438, |
|
"eval_loss": NaN, |
|
"eval_runtime": 389.6633, |
|
"eval_samples_per_second": 21.339, |
|
"eval_steps_per_second": 2.669, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.18976181593260943, |
|
"grad_norm": 1.4915607993777378, |
|
"learning_rate": 1.9517331056159353e-05, |
|
"loss": 0.5643, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.19101851670037504, |
|
"grad_norm": 1.3658845196609777, |
|
"learning_rate": 1.9503753367509855e-05, |
|
"loss": 0.5409, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.19227521746814066, |
|
"grad_norm": 1.6396983118066124, |
|
"learning_rate": 1.9489992201653337e-05, |
|
"loss": 0.5318, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1935319182359063, |
|
"grad_norm": 1.4530656863997269, |
|
"learning_rate": 1.9476047824259602e-05, |
|
"loss": 0.5288, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.19478861900367192, |
|
"grad_norm": 1.8323995061487757, |
|
"learning_rate": 1.946192050453549e-05, |
|
"loss": 0.5207, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.19604531977143755, |
|
"grad_norm": 1.4963171570508917, |
|
"learning_rate": 1.944761051521968e-05, |
|
"loss": 0.5341, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.19730202053920318, |
|
"grad_norm": 1.4571814574677353, |
|
"learning_rate": 1.9433118132577432e-05, |
|
"loss": 0.5147, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.1985587213069688, |
|
"grad_norm": 1.46930107978953, |
|
"learning_rate": 1.941844363639525e-05, |
|
"loss": 0.5533, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.19981542207473443, |
|
"grad_norm": 1.4876593945156182, |
|
"learning_rate": 1.9403587309975467e-05, |
|
"loss": 0.5524, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.20107212284250006, |
|
"grad_norm": 1.5458825520839694, |
|
"learning_rate": 1.93885494401308e-05, |
|
"loss": 0.5147, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.20232882361026566, |
|
"grad_norm": 1.4541059075079896, |
|
"learning_rate": 1.9373330317178797e-05, |
|
"loss": 0.6548, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2035855243780313, |
|
"grad_norm": 1.4441733330016775, |
|
"learning_rate": 1.9357930234936228e-05, |
|
"loss": 0.5307, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.20484222514579692, |
|
"grad_norm": 1.3924614428073763, |
|
"learning_rate": 1.9342349490713427e-05, |
|
"loss": 0.4934, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.20609892591356255, |
|
"grad_norm": 1.4163101626594428, |
|
"learning_rate": 1.932658838530855e-05, |
|
"loss": 0.5248, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.20735562668132818, |
|
"grad_norm": 1.4434173531044028, |
|
"learning_rate": 1.9310647223001752e-05, |
|
"loss": 0.5606, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.2086123274490938, |
|
"grad_norm": 1.4428551480982263, |
|
"learning_rate": 1.929452631154933e-05, |
|
"loss": 0.531, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.20986902821685943, |
|
"grad_norm": 1.423851316496012, |
|
"learning_rate": 1.9278225962177776e-05, |
|
"loss": 0.5223, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.21112572898462506, |
|
"grad_norm": 1.492507341069709, |
|
"learning_rate": 1.9261746489577767e-05, |
|
"loss": 0.6371, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2123824297523907, |
|
"grad_norm": 1.448145384070868, |
|
"learning_rate": 1.9245088211898086e-05, |
|
"loss": 0.5101, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.2136391305201563, |
|
"grad_norm": 1.384046704220895, |
|
"learning_rate": 1.9228251450739495e-05, |
|
"loss": 0.5975, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.21489583128792192, |
|
"grad_norm": 1.3991515824529313, |
|
"learning_rate": 1.92112365311485e-05, |
|
"loss": 0.5913, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.21615253205568755, |
|
"grad_norm": 1.3857833501335572, |
|
"learning_rate": 1.919404378161111e-05, |
|
"loss": 0.5224, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.21740923282345317, |
|
"grad_norm": 1.3507684127877164, |
|
"learning_rate": 1.9176673534046465e-05, |
|
"loss": 0.5393, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2186659335912188, |
|
"grad_norm": 1.4477184992508454, |
|
"learning_rate": 1.9159126123800437e-05, |
|
"loss": 0.5426, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.21992263435898443, |
|
"grad_norm": 1.2834792398929344, |
|
"learning_rate": 1.9141401889639167e-05, |
|
"loss": 0.5497, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.22117933512675006, |
|
"grad_norm": 1.3623159756756789, |
|
"learning_rate": 1.9123501173742514e-05, |
|
"loss": 0.5103, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2224360358945157, |
|
"grad_norm": 1.4467854185777025, |
|
"learning_rate": 1.910542432169745e-05, |
|
"loss": 0.5348, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.22369273666228132, |
|
"grad_norm": 1.3284354351981553, |
|
"learning_rate": 1.908717168249139e-05, |
|
"loss": 0.528, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.22494943743004692, |
|
"grad_norm": 1.397962009852414, |
|
"learning_rate": 1.9068743608505454e-05, |
|
"loss": 0.5727, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.22620613819781255, |
|
"grad_norm": 1.427342226625829, |
|
"learning_rate": 1.905014045550767e-05, |
|
"loss": 0.5328, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.22746283896557817, |
|
"grad_norm": 1.287239450182925, |
|
"learning_rate": 1.903136258264609e-05, |
|
"loss": 0.5404, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.2287195397333438, |
|
"grad_norm": 1.3231711055343047, |
|
"learning_rate": 1.9012410352441866e-05, |
|
"loss": 0.5387, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.22997624050110943, |
|
"grad_norm": 1.380871461116662, |
|
"learning_rate": 1.899328413078227e-05, |
|
"loss": 0.5245, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.23123294126887506, |
|
"grad_norm": 1.3476601420913, |
|
"learning_rate": 1.8973984286913584e-05, |
|
"loss": 0.5639, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2324896420366407, |
|
"grad_norm": 1.3713222215493124, |
|
"learning_rate": 1.8954511193434024e-05, |
|
"loss": 0.533, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.23374634280440632, |
|
"grad_norm": 1.301173886904826, |
|
"learning_rate": 1.8934865226286507e-05, |
|
"loss": 0.5302, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.23500304357217194, |
|
"grad_norm": 1.250844193542083, |
|
"learning_rate": 1.891504676475141e-05, |
|
"loss": 0.5678, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.23625974433993754, |
|
"grad_norm": 1.3624917616288885, |
|
"learning_rate": 1.8895056191439252e-05, |
|
"loss": 0.5478, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.23751644510770317, |
|
"grad_norm": 1.316550725145833, |
|
"learning_rate": 1.8874893892283296e-05, |
|
"loss": 0.5371, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2387731458754688, |
|
"grad_norm": 1.4024386573064895, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 0.5223, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.24002984664323443, |
|
"grad_norm": 1.2545561250193908, |
|
"learning_rate": 1.8834055676742018e-05, |
|
"loss": 0.5312, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.24128654741100006, |
|
"grad_norm": 1.3191673233181607, |
|
"learning_rate": 1.8813380548769594e-05, |
|
"loss": 0.5449, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.24254324817876569, |
|
"grad_norm": 1.351413784775738, |
|
"learning_rate": 1.8792535271763944e-05, |
|
"loss": 0.5338, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.24379994894653131, |
|
"grad_norm": 1.241988064092758, |
|
"learning_rate": 1.8771520248159044e-05, |
|
"loss": 0.537, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.24505664971429694, |
|
"grad_norm": 1.2555364071859036, |
|
"learning_rate": 1.8750335883665948e-05, |
|
"loss": 0.5831, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.24631335048206257, |
|
"grad_norm": 1.2648163083865465, |
|
"learning_rate": 1.8728982587264965e-05, |
|
"loss": 0.5553, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2475700512498282, |
|
"grad_norm": 1.3009972836834804, |
|
"learning_rate": 1.8707460771197773e-05, |
|
"loss": 0.5213, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.2488267520175938, |
|
"grad_norm": 1.308271909393604, |
|
"learning_rate": 1.8685770850959444e-05, |
|
"loss": 0.5781, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.25008345278535943, |
|
"grad_norm": 1.2985339297627132, |
|
"learning_rate": 1.8663913245290433e-05, |
|
"loss": 0.5319, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.25134015355312506, |
|
"grad_norm": 1.2451217830797545, |
|
"learning_rate": 1.8641888376168483e-05, |
|
"loss": 0.5608, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.25134015355312506, |
|
"eval_loss": NaN, |
|
"eval_runtime": 381.9917, |
|
"eval_samples_per_second": 21.767, |
|
"eval_steps_per_second": 2.723, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2525968543208907, |
|
"grad_norm": 1.4207395274648773, |
|
"learning_rate": 1.8619696668800494e-05, |
|
"loss": 0.5194, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.2538535550886563, |
|
"grad_norm": 1.2969288642452035, |
|
"learning_rate": 1.85973385516143e-05, |
|
"loss": 0.5303, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.25511025585642194, |
|
"grad_norm": 1.244817385015466, |
|
"learning_rate": 1.8574814456250406e-05, |
|
"loss": 0.5519, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.25636695662418757, |
|
"grad_norm": 1.2786899670392922, |
|
"learning_rate": 1.855212481755365e-05, |
|
"loss": 0.5314, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.2576236573919532, |
|
"grad_norm": 1.6119075958517899, |
|
"learning_rate": 1.852927007356481e-05, |
|
"loss": 0.5153, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.2588803581597188, |
|
"grad_norm": 1.3027579835871468, |
|
"learning_rate": 1.8506250665512156e-05, |
|
"loss": 0.5317, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.26013705892748445, |
|
"grad_norm": 1.4097127031945536, |
|
"learning_rate": 1.848306703780291e-05, |
|
"loss": 0.5432, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.2613937596952501, |
|
"grad_norm": 1.2253893934005955, |
|
"learning_rate": 1.8459719638014693e-05, |
|
"loss": 0.5144, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.2626504604630157, |
|
"grad_norm": 1.3490881789862665, |
|
"learning_rate": 1.843620891688686e-05, |
|
"loss": 0.5227, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.26390716123078134, |
|
"grad_norm": 1.2692047638329742, |
|
"learning_rate": 1.8412535328311813e-05, |
|
"loss": 0.5172, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.2651638619985469, |
|
"grad_norm": 1.3001969775108575, |
|
"learning_rate": 1.8388699329326237e-05, |
|
"loss": 0.5013, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.26642056276631254, |
|
"grad_norm": 1.2460584486023816, |
|
"learning_rate": 1.8364701380102267e-05, |
|
"loss": 0.5097, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.26767726353407817, |
|
"grad_norm": 1.2575445422859304, |
|
"learning_rate": 1.8340541943938623e-05, |
|
"loss": 0.5253, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.2689339643018438, |
|
"grad_norm": 1.247709368946671, |
|
"learning_rate": 1.831622148725164e-05, |
|
"loss": 0.5195, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.2701906650696094, |
|
"grad_norm": 1.331986681061102, |
|
"learning_rate": 1.8291740479566286e-05, |
|
"loss": 0.517, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.27144736583737505, |
|
"grad_norm": 1.2798935292338272, |
|
"learning_rate": 1.8267099393507083e-05, |
|
"loss": 0.5774, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.2727040666051407, |
|
"grad_norm": 1.2305947859868307, |
|
"learning_rate": 1.8242298704788988e-05, |
|
"loss": 0.5285, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2739607673729063, |
|
"grad_norm": 1.2890582922664517, |
|
"learning_rate": 1.821733889220822e-05, |
|
"loss": 0.5111, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.27521746814067194, |
|
"grad_norm": 1.2655404616013888, |
|
"learning_rate": 1.819222043763299e-05, |
|
"loss": 0.549, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.27647416890843757, |
|
"grad_norm": 1.3037267105348513, |
|
"learning_rate": 1.816694382599422e-05, |
|
"loss": 0.535, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2777308696762032, |
|
"grad_norm": 1.2775883338383642, |
|
"learning_rate": 1.814150954527618e-05, |
|
"loss": 0.518, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.2789875704439688, |
|
"grad_norm": 1.3182350382629517, |
|
"learning_rate": 1.8115918086507054e-05, |
|
"loss": 0.5879, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.28024427121173445, |
|
"grad_norm": 1.2837663456694282, |
|
"learning_rate": 1.8090169943749477e-05, |
|
"loss": 0.5575, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2815009719795001, |
|
"grad_norm": 1.3410051389307014, |
|
"learning_rate": 1.8064265614090976e-05, |
|
"loss": 0.5776, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2827576727472657, |
|
"grad_norm": 1.3538997044655456, |
|
"learning_rate": 1.8038205597634392e-05, |
|
"loss": 0.5315, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.28401437351503134, |
|
"grad_norm": 1.2837659278037532, |
|
"learning_rate": 1.801199039748822e-05, |
|
"loss": 0.5341, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.28527107428279697, |
|
"grad_norm": 1.3959368719539762, |
|
"learning_rate": 1.7985620519756897e-05, |
|
"loss": 0.53, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.2865277750505626, |
|
"grad_norm": 1.277924618484306, |
|
"learning_rate": 1.7959096473531016e-05, |
|
"loss": 0.5131, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.2877844758183282, |
|
"grad_norm": 1.360578070388895, |
|
"learning_rate": 1.7932418770877523e-05, |
|
"loss": 0.5146, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.2890411765860938, |
|
"grad_norm": 1.2558461807056212, |
|
"learning_rate": 1.7905587926829815e-05, |
|
"loss": 0.567, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.2902978773538594, |
|
"grad_norm": 1.2185312710624743, |
|
"learning_rate": 1.7878604459377795e-05, |
|
"loss": 0.5259, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.29155457812162505, |
|
"grad_norm": 1.1707487670839811, |
|
"learning_rate": 1.7851468889457883e-05, |
|
"loss": 0.5384, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.2928112788893907, |
|
"grad_norm": 1.2419851366188785, |
|
"learning_rate": 1.7824181740942958e-05, |
|
"loss": 0.526, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.2940679796571563, |
|
"grad_norm": 1.2190078071425596, |
|
"learning_rate": 1.7796743540632226e-05, |
|
"loss": 0.5228, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.29532468042492194, |
|
"grad_norm": 1.2620408913566545, |
|
"learning_rate": 1.776915481824107e-05, |
|
"loss": 0.5362, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.29658138119268757, |
|
"grad_norm": 1.2019501932459877, |
|
"learning_rate": 1.7741416106390828e-05, |
|
"loss": 0.5403, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.2978380819604532, |
|
"grad_norm": 1.2349476994702815, |
|
"learning_rate": 1.7713527940598473e-05, |
|
"loss": 0.5346, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.2990947827282188, |
|
"grad_norm": 1.228835848024245, |
|
"learning_rate": 1.7685490859266324e-05, |
|
"loss": 0.5357, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.30035148349598445, |
|
"grad_norm": 1.1433816285308742, |
|
"learning_rate": 1.7657305403671618e-05, |
|
"loss": 0.5237, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3016081842637501, |
|
"grad_norm": 1.1830531733373626, |
|
"learning_rate": 1.762897211795607e-05, |
|
"loss": 0.5048, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3028648850315157, |
|
"grad_norm": 1.3012741117414692, |
|
"learning_rate": 1.760049154911537e-05, |
|
"loss": 0.5325, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.30412158579928134, |
|
"grad_norm": 1.3019810089595567, |
|
"learning_rate": 1.7571864246988623e-05, |
|
"loss": 0.5311, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.30537828656704696, |
|
"grad_norm": 1.2381762150161675, |
|
"learning_rate": 1.7543090764247726e-05, |
|
"loss": 0.5256, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3066349873348126, |
|
"grad_norm": 1.189237177245814, |
|
"learning_rate": 1.751417165638671e-05, |
|
"loss": 0.533, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.3078916881025782, |
|
"grad_norm": 1.1392996416893437, |
|
"learning_rate": 1.7485107481711014e-05, |
|
"loss": 0.5116, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.30914838887034385, |
|
"grad_norm": 1.2392857084749362, |
|
"learning_rate": 1.7455898801326685e-05, |
|
"loss": 0.5324, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.3104050896381095, |
|
"grad_norm": 1.2004003016053595, |
|
"learning_rate": 1.742654617912958e-05, |
|
"loss": 0.5445, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.31166179040587505, |
|
"grad_norm": 1.202284432596933, |
|
"learning_rate": 1.7397050181794463e-05, |
|
"loss": 0.5455, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3129184911736407, |
|
"grad_norm": 1.2367358266165964, |
|
"learning_rate": 1.736741137876405e-05, |
|
"loss": 0.5263, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3141751919414063, |
|
"grad_norm": 1.2263686883476443, |
|
"learning_rate": 1.733763034223804e-05, |
|
"loss": 0.5318, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.3141751919414063, |
|
"eval_loss": NaN, |
|
"eval_runtime": 393.3082, |
|
"eval_samples_per_second": 21.141, |
|
"eval_steps_per_second": 2.644, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.31543189270917193, |
|
"grad_norm": 1.839889882151207, |
|
"learning_rate": 1.730770764716206e-05, |
|
"loss": 0.4856, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.31668859347693756, |
|
"grad_norm": 1.2987969186805615, |
|
"learning_rate": 1.7277643871216558e-05, |
|
"loss": 0.5393, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3179452942447032, |
|
"grad_norm": 1.4429089329852214, |
|
"learning_rate": 1.724743959480565e-05, |
|
"loss": 0.5473, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3192019950124688, |
|
"grad_norm": 2.3182483058377357, |
|
"learning_rate": 1.721709540104594e-05, |
|
"loss": 0.5719, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.32045869578023445, |
|
"grad_norm": 1.2943046560287874, |
|
"learning_rate": 1.7186611875755227e-05, |
|
"loss": 0.5421, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.3217153965480001, |
|
"grad_norm": 1.3312637937305758, |
|
"learning_rate": 1.715598960744121e-05, |
|
"loss": 0.5756, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3229720973157657, |
|
"grad_norm": 1.203432712589985, |
|
"learning_rate": 1.712522918729014e-05, |
|
"loss": 0.5876, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.32422879808353133, |
|
"grad_norm": 1.248392549739193, |
|
"learning_rate": 1.7094331209155394e-05, |
|
"loss": 0.5375, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.32548549885129696, |
|
"grad_norm": 1.3256545033784541, |
|
"learning_rate": 1.7063296269545988e-05, |
|
"loss": 0.5067, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.3267421996190626, |
|
"grad_norm": 1.1874224988844255, |
|
"learning_rate": 1.7032124967615112e-05, |
|
"loss": 0.5281, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.3279989003868282, |
|
"grad_norm": 1.36231925710455, |
|
"learning_rate": 1.7000817905148523e-05, |
|
"loss": 0.546, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.32925560115459385, |
|
"grad_norm": 1.2674188969413769, |
|
"learning_rate": 1.696937568655294e-05, |
|
"loss": 0.5139, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.3305123019223595, |
|
"grad_norm": 1.2676550984339194, |
|
"learning_rate": 1.6937798918844363e-05, |
|
"loss": 0.5033, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.3317690026901251, |
|
"grad_norm": 1.2627941544296886, |
|
"learning_rate": 1.6906088211636387e-05, |
|
"loss": 0.5323, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.33302570345789073, |
|
"grad_norm": 1.1828449763849782, |
|
"learning_rate": 1.6874244177128395e-05, |
|
"loss": 0.4978, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3342824042256563, |
|
"grad_norm": 1.2113446935401968, |
|
"learning_rate": 1.6842267430093762e-05, |
|
"loss": 0.5336, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.33553910499342193, |
|
"grad_norm": 1.1871927265354432, |
|
"learning_rate": 1.6810158587867973e-05, |
|
"loss": 0.4963, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.33679580576118756, |
|
"grad_norm": 1.21086248350049, |
|
"learning_rate": 1.6777918270336718e-05, |
|
"loss": 0.5204, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.3380525065289532, |
|
"grad_norm": 1.2862931993679727, |
|
"learning_rate": 1.6745547099923917e-05, |
|
"loss": 0.5306, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.3393092072967188, |
|
"grad_norm": 1.2086868536312418, |
|
"learning_rate": 1.6713045701579705e-05, |
|
"loss": 0.4775, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.34056590806448445, |
|
"grad_norm": 1.246734645659461, |
|
"learning_rate": 1.6680414702768358e-05, |
|
"loss": 0.5003, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.3418226088322501, |
|
"grad_norm": 1.1307518995270345, |
|
"learning_rate": 1.66476547334562e-05, |
|
"loss": 0.5618, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.3430793096000157, |
|
"grad_norm": 1.2461483743109123, |
|
"learning_rate": 1.661476642609943e-05, |
|
"loss": 0.5154, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.34433601036778133, |
|
"grad_norm": 1.2318638718720292, |
|
"learning_rate": 1.658175041563189e-05, |
|
"loss": 0.5255, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.34559271113554696, |
|
"grad_norm": 1.2614769482466344, |
|
"learning_rate": 1.6548607339452853e-05, |
|
"loss": 0.4903, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.3468494119033126, |
|
"grad_norm": 1.218403889343406, |
|
"learning_rate": 1.6515337837414677e-05, |
|
"loss": 0.5374, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.3481061126710782, |
|
"grad_norm": 1.309250396407954, |
|
"learning_rate": 1.6481942551810476e-05, |
|
"loss": 0.5207, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.34936281343884384, |
|
"grad_norm": 1.1711784327085948, |
|
"learning_rate": 1.6448422127361707e-05, |
|
"loss": 0.531, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.3506195142066095, |
|
"grad_norm": 1.1878422695605408, |
|
"learning_rate": 1.641477721120573e-05, |
|
"loss": 0.54, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.3518762149743751, |
|
"grad_norm": 1.161149033672964, |
|
"learning_rate": 1.638100845288331e-05, |
|
"loss": 0.5078, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.35313291574214073, |
|
"grad_norm": 1.1916167026440183, |
|
"learning_rate": 1.6347116504326082e-05, |
|
"loss": 0.5246, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.35438961650990636, |
|
"grad_norm": 1.1648314835473719, |
|
"learning_rate": 1.631310201984396e-05, |
|
"loss": 0.5111, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.355646317277672, |
|
"grad_norm": 1.2652965637545066, |
|
"learning_rate": 1.627896565611251e-05, |
|
"loss": 0.5376, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.3569030180454376, |
|
"grad_norm": 1.0925768418862014, |
|
"learning_rate": 1.6244708072160267e-05, |
|
"loss": 0.5158, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.3581597188132032, |
|
"grad_norm": 1.237142044854975, |
|
"learning_rate": 1.6210329929356017e-05, |
|
"loss": 0.4975, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.3594164195809688, |
|
"grad_norm": 1.314890455752597, |
|
"learning_rate": 1.6175831891396034e-05, |
|
"loss": 0.5504, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.36067312034873444, |
|
"grad_norm": 1.2189337274031111, |
|
"learning_rate": 1.614121462429124e-05, |
|
"loss": 0.55, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.3619298211165001, |
|
"grad_norm": 1.1195814719052748, |
|
"learning_rate": 1.6106478796354382e-05, |
|
"loss": 0.5159, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.3631865218842657, |
|
"grad_norm": 1.2263611549293467, |
|
"learning_rate": 1.6071625078187113e-05, |
|
"loss": 0.5478, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.36444322265203133, |
|
"grad_norm": 1.150066595637109, |
|
"learning_rate": 1.6036654142667043e-05, |
|
"loss": 0.5503, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.36569992341979696, |
|
"grad_norm": 1.1156487171205896, |
|
"learning_rate": 1.600156666493475e-05, |
|
"loss": 0.5218, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.3669566241875626, |
|
"grad_norm": 1.132115800963164, |
|
"learning_rate": 1.596636332238076e-05, |
|
"loss": 0.4936, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.3682133249553282, |
|
"grad_norm": 1.1553749691225366, |
|
"learning_rate": 1.593104479463244e-05, |
|
"loss": 0.5209, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.36947002572309384, |
|
"grad_norm": 1.1883358272581606, |
|
"learning_rate": 1.5895611763540914e-05, |
|
"loss": 0.4964, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.37072672649085947, |
|
"grad_norm": 1.1558054507595528, |
|
"learning_rate": 1.5860064913167863e-05, |
|
"loss": 0.5063, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.3719834272586251, |
|
"grad_norm": 1.2438113556876693, |
|
"learning_rate": 1.5824404929772347e-05, |
|
"loss": 0.507, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.3732401280263907, |
|
"grad_norm": 1.1179995486577579, |
|
"learning_rate": 1.5788632501797545e-05, |
|
"loss": 0.4902, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.37449682879415636, |
|
"grad_norm": 1.1092026267657866, |
|
"learning_rate": 1.575274831985746e-05, |
|
"loss": 0.512, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.375753529561922, |
|
"grad_norm": 1.154632467938108, |
|
"learning_rate": 1.5716753076723594e-05, |
|
"loss": 0.5245, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.3770102303296876, |
|
"grad_norm": 1.189993649393655, |
|
"learning_rate": 1.568064746731156e-05, |
|
"loss": 0.5191, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.3770102303296876, |
|
"eval_loss": NaN, |
|
"eval_runtime": 390.9329, |
|
"eval_samples_per_second": 21.27, |
|
"eval_steps_per_second": 2.66, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.37826693109745324, |
|
"grad_norm": 1.1500530686780315, |
|
"learning_rate": 1.5644432188667695e-05, |
|
"loss": 0.5288, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.37952363186521887, |
|
"grad_norm": 1.1878804551065247, |
|
"learning_rate": 1.5608107939955565e-05, |
|
"loss": 0.5231, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.38078033263298444, |
|
"grad_norm": 1.1168332905551357, |
|
"learning_rate": 1.5571675422442504e-05, |
|
"loss": 0.5387, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.38203703340075007, |
|
"grad_norm": 1.1448743390588387, |
|
"learning_rate": 1.5535135339486044e-05, |
|
"loss": 0.5214, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.3832937341685157, |
|
"grad_norm": 1.1699623266496315, |
|
"learning_rate": 1.549848839652035e-05, |
|
"loss": 0.5466, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.3845504349362813, |
|
"grad_norm": 1.1274856023440132, |
|
"learning_rate": 1.5461735301042615e-05, |
|
"loss": 0.5534, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.38580713570404696, |
|
"grad_norm": 1.1200748824733626, |
|
"learning_rate": 1.542487676259937e-05, |
|
"loss": 0.5213, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.3870638364718126, |
|
"grad_norm": 1.1177676101897944, |
|
"learning_rate": 1.5387913492772816e-05, |
|
"loss": 0.5261, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.3883205372395782, |
|
"grad_norm": 1.0956645557598703, |
|
"learning_rate": 1.5350846205167065e-05, |
|
"loss": 0.5007, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.38957723800734384, |
|
"grad_norm": 1.170185855384506, |
|
"learning_rate": 1.5313675615394373e-05, |
|
"loss": 0.5221, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.39083393877510947, |
|
"grad_norm": 1.1889108508498927, |
|
"learning_rate": 1.527640244106133e-05, |
|
"loss": 0.5385, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.3920906395428751, |
|
"grad_norm": 1.1415465303312236, |
|
"learning_rate": 1.5239027401754987e-05, |
|
"loss": 0.5112, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.3933473403106407, |
|
"grad_norm": 1.2013200373878787, |
|
"learning_rate": 1.5201551219028988e-05, |
|
"loss": 0.4914, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.39460404107840635, |
|
"grad_norm": 1.1988351313938457, |
|
"learning_rate": 1.5163974616389621e-05, |
|
"loss": 0.4947, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.395860741846172, |
|
"grad_norm": 1.089460298631534, |
|
"learning_rate": 1.5126298319281859e-05, |
|
"loss": 0.5458, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.3971174426139376, |
|
"grad_norm": 1.1216780572022857, |
|
"learning_rate": 1.508852305507535e-05, |
|
"loss": 0.529, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.39837414338170324, |
|
"grad_norm": 1.1750196353813835, |
|
"learning_rate": 1.5050649553050383e-05, |
|
"loss": 0.5343, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.39963084414946887, |
|
"grad_norm": 1.1613435161044152, |
|
"learning_rate": 1.5012678544383797e-05, |
|
"loss": 0.4809, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4008875449172345, |
|
"grad_norm": 1.0838357359556312, |
|
"learning_rate": 1.4974610762134875e-05, |
|
"loss": 0.5397, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.4021442456850001, |
|
"grad_norm": 1.1334411455279323, |
|
"learning_rate": 1.4936446941231186e-05, |
|
"loss": 0.5023, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4034009464527657, |
|
"grad_norm": 1.084067016750503, |
|
"learning_rate": 1.4898187818454401e-05, |
|
"loss": 0.507, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4046576472205313, |
|
"grad_norm": 1.1436033686465177, |
|
"learning_rate": 1.485983413242606e-05, |
|
"loss": 0.4803, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.40591434798829695, |
|
"grad_norm": 1.1381556287788788, |
|
"learning_rate": 1.4821386623593332e-05, |
|
"loss": 0.5212, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.4071710487560626, |
|
"grad_norm": 1.2083085101416968, |
|
"learning_rate": 1.4782846034214689e-05, |
|
"loss": 0.6227, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.4084277495238282, |
|
"grad_norm": 1.1218491511435251, |
|
"learning_rate": 1.4744213108345605e-05, |
|
"loss": 0.5064, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.40968445029159384, |
|
"grad_norm": 1.222423494749568, |
|
"learning_rate": 1.4705488591824182e-05, |
|
"loss": 0.5404, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.41094115105935947, |
|
"grad_norm": 1.3200013736515412, |
|
"learning_rate": 1.4666673232256738e-05, |
|
"loss": 0.5236, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4121978518271251, |
|
"grad_norm": 1.174130976702849, |
|
"learning_rate": 1.4627767779003402e-05, |
|
"loss": 0.5109, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.4134545525948907, |
|
"grad_norm": 1.1121745715846247, |
|
"learning_rate": 1.4588772983163612e-05, |
|
"loss": 0.5187, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.41471125336265635, |
|
"grad_norm": 1.1211595150469165, |
|
"learning_rate": 1.4549689597561652e-05, |
|
"loss": 0.5195, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.415967954130422, |
|
"grad_norm": 1.0882830091956517, |
|
"learning_rate": 1.4510518376732081e-05, |
|
"loss": 0.5197, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.4172246548981876, |
|
"grad_norm": 1.103233638957626, |
|
"learning_rate": 1.4471260076905198e-05, |
|
"loss": 0.5025, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.41848135566595324, |
|
"grad_norm": 1.073960984254828, |
|
"learning_rate": 1.4431915455992416e-05, |
|
"loss": 0.5093, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.41973805643371886, |
|
"grad_norm": 1.0782782404257423, |
|
"learning_rate": 1.4392485273571652e-05, |
|
"loss": 0.4834, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.4209947572014845, |
|
"grad_norm": 1.1812719353147918, |
|
"learning_rate": 1.435297029087265e-05, |
|
"loss": 0.5096, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.4222514579692501, |
|
"grad_norm": 1.0945216590303213, |
|
"learning_rate": 1.431337127076229e-05, |
|
"loss": 0.4992, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.42350815873701575, |
|
"grad_norm": 1.124754349061893, |
|
"learning_rate": 1.4273688977729852e-05, |
|
"loss": 0.5806, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.4247648595047814, |
|
"grad_norm": 1.182469605262903, |
|
"learning_rate": 1.4233924177872269e-05, |
|
"loss": 0.5119, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.426021560272547, |
|
"grad_norm": 1.095989659016963, |
|
"learning_rate": 1.4194077638879333e-05, |
|
"loss": 0.5369, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.4272782610403126, |
|
"grad_norm": 1.1287938362303405, |
|
"learning_rate": 1.4154150130018867e-05, |
|
"loss": 0.4763, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.4285349618080782, |
|
"grad_norm": 1.1391290234474063, |
|
"learning_rate": 1.4114142422121879e-05, |
|
"loss": 0.5052, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.42979166257584384, |
|
"grad_norm": 1.0672829625786657, |
|
"learning_rate": 1.4074055287567685e-05, |
|
"loss": 0.539, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.43104836334360946, |
|
"grad_norm": 1.1130997359221613, |
|
"learning_rate": 1.4033889500268991e-05, |
|
"loss": 0.5033, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.4323050641113751, |
|
"grad_norm": 1.11023882693686, |
|
"learning_rate": 1.3993645835656955e-05, |
|
"loss": 0.4961, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.4335617648791407, |
|
"grad_norm": 1.129690617178472, |
|
"learning_rate": 1.3953325070666215e-05, |
|
"loss": 0.5302, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.43481846564690635, |
|
"grad_norm": 1.1112927798263614, |
|
"learning_rate": 1.3912927983719888e-05, |
|
"loss": 0.5032, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.436075166414672, |
|
"grad_norm": 1.1287505890600753, |
|
"learning_rate": 1.3872455354714552e-05, |
|
"loss": 0.5287, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.4373318671824376, |
|
"grad_norm": 1.1474684694732853, |
|
"learning_rate": 1.3831907965005173e-05, |
|
"loss": 0.503, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.43858856795020323, |
|
"grad_norm": 1.1271461572724835, |
|
"learning_rate": 1.3791286597390035e-05, |
|
"loss": 0.52, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.43984526871796886, |
|
"grad_norm": 1.0112579946119413, |
|
"learning_rate": 1.375059203609562e-05, |
|
"loss": 0.512, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.43984526871796886, |
|
"eval_loss": NaN, |
|
"eval_runtime": 386.8898, |
|
"eval_samples_per_second": 21.492, |
|
"eval_steps_per_second": 2.688, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.4411019694857345, |
|
"grad_norm": 1.1391998689599176, |
|
"learning_rate": 1.370982506676147e-05, |
|
"loss": 0.5153, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.4423586702535001, |
|
"grad_norm": 1.1522319157530978, |
|
"learning_rate": 1.3668986476425024e-05, |
|
"loss": 0.5223, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.44361537102126575, |
|
"grad_norm": 1.056841423994722, |
|
"learning_rate": 1.362807705350641e-05, |
|
"loss": 0.5459, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.4448720717890314, |
|
"grad_norm": 1.1383935709291246, |
|
"learning_rate": 1.3587097587793243e-05, |
|
"loss": 0.5449, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.446128772556797, |
|
"grad_norm": 1.106248398688068, |
|
"learning_rate": 1.3546048870425356e-05, |
|
"loss": 0.4944, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.44738547332456263, |
|
"grad_norm": 1.067289329127628, |
|
"learning_rate": 1.3504931693879553e-05, |
|
"loss": 0.5538, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.44864217409232826, |
|
"grad_norm": 1.0795347423113244, |
|
"learning_rate": 1.3463746851954275e-05, |
|
"loss": 0.5306, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.44989887486009383, |
|
"grad_norm": 1.1163481113252645, |
|
"learning_rate": 1.342249513975432e-05, |
|
"loss": 0.5146, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.45115557562785946, |
|
"grad_norm": 1.1760911501443567, |
|
"learning_rate": 1.3381177353675441e-05, |
|
"loss": 0.5239, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.4524122763956251, |
|
"grad_norm": 1.1510645603762788, |
|
"learning_rate": 1.3339794291389015e-05, |
|
"loss": 0.4986, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.4536689771633907, |
|
"grad_norm": 1.0505550098423375, |
|
"learning_rate": 1.3298346751826624e-05, |
|
"loss": 0.4932, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.45492567793115635, |
|
"grad_norm": 1.110458489656852, |
|
"learning_rate": 1.3256835535164622e-05, |
|
"loss": 0.499, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.456182378698922, |
|
"grad_norm": 1.0429297164870697, |
|
"learning_rate": 1.3215261442808718e-05, |
|
"loss": 0.5519, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.4574390794666876, |
|
"grad_norm": 1.1025779859161235, |
|
"learning_rate": 1.3173625277378464e-05, |
|
"loss": 0.498, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.45869578023445323, |
|
"grad_norm": 1.0866619959147732, |
|
"learning_rate": 1.3131927842691793e-05, |
|
"loss": 0.4939, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.45995248100221886, |
|
"grad_norm": 1.0657930951927579, |
|
"learning_rate": 1.3090169943749475e-05, |
|
"loss": 0.4984, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.4612091817699845, |
|
"grad_norm": 1.0589212846976253, |
|
"learning_rate": 1.3048352386719605e-05, |
|
"loss": 0.4943, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.4624658825377501, |
|
"grad_norm": 1.0942767373527489, |
|
"learning_rate": 1.3006475978922013e-05, |
|
"loss": 0.5145, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.46372258330551575, |
|
"grad_norm": 1.123925914892006, |
|
"learning_rate": 1.2964541528812689e-05, |
|
"loss": 0.5212, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.4649792840732814, |
|
"grad_norm": 1.1029621728132757, |
|
"learning_rate": 1.2922549845968174e-05, |
|
"loss": 0.5008, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.466235984841047, |
|
"grad_norm": 1.1066345430120166, |
|
"learning_rate": 1.2880501741069931e-05, |
|
"loss": 0.5099, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.46749268560881263, |
|
"grad_norm": 1.1178828922894442, |
|
"learning_rate": 1.2838398025888695e-05, |
|
"loss": 0.5174, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.46874938637657826, |
|
"grad_norm": 1.0618894730289437, |
|
"learning_rate": 1.2796239513268796e-05, |
|
"loss": 0.5266, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.4700060871443439, |
|
"grad_norm": 1.1442577048190856, |
|
"learning_rate": 1.275402701711248e-05, |
|
"loss": 0.5406, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.4712627879121095, |
|
"grad_norm": 1.5555245911074627, |
|
"learning_rate": 1.2711761352364172e-05, |
|
"loss": 0.526, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.4725194886798751, |
|
"grad_norm": 3.2281540462224814, |
|
"learning_rate": 1.2669443334994768e-05, |
|
"loss": 0.5058, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.4737761894476407, |
|
"grad_norm": 1.230186696498868, |
|
"learning_rate": 1.262707378198587e-05, |
|
"loss": 0.5189, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.47503289021540634, |
|
"grad_norm": 1.1067717727956337, |
|
"learning_rate": 1.2584653511314012e-05, |
|
"loss": 0.5499, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.476289590983172, |
|
"grad_norm": 1.2096032452146201, |
|
"learning_rate": 1.2542183341934873e-05, |
|
"loss": 0.4961, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.4775462917509376, |
|
"grad_norm": 1.1760777197898722, |
|
"learning_rate": 1.2499664093767458e-05, |
|
"loss": 0.5352, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.47880299251870323, |
|
"grad_norm": 1.1829467616609262, |
|
"learning_rate": 1.245709658767829e-05, |
|
"loss": 0.5039, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.48005969328646886, |
|
"grad_norm": 1.093348347620876, |
|
"learning_rate": 1.241448164546553e-05, |
|
"loss": 0.5063, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.4813163940542345, |
|
"grad_norm": 1.0813316534886166, |
|
"learning_rate": 1.2371820089843145e-05, |
|
"loss": 0.504, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.4825730948220001, |
|
"grad_norm": 1.0559642834186367, |
|
"learning_rate": 1.2329112744425e-05, |
|
"loss": 0.4968, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.48382979558976574, |
|
"grad_norm": 1.1119456528103215, |
|
"learning_rate": 1.2286360433708976e-05, |
|
"loss": 0.5623, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.48508649635753137, |
|
"grad_norm": 1.143973204715742, |
|
"learning_rate": 1.2243563983061029e-05, |
|
"loss": 0.4909, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.486343197125297, |
|
"grad_norm": 1.0989207521976259, |
|
"learning_rate": 1.2200724218699284e-05, |
|
"loss": 0.5342, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.48759989789306263, |
|
"grad_norm": 1.097054041408541, |
|
"learning_rate": 1.2157841967678064e-05, |
|
"loss": 0.4762, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.48885659866082826, |
|
"grad_norm": 1.0459339979949123, |
|
"learning_rate": 1.2114918057871928e-05, |
|
"loss": 0.4651, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.4901132994285939, |
|
"grad_norm": 1.0847962784136453, |
|
"learning_rate": 1.2071953317959692e-05, |
|
"loss": 0.5305, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.4913700001963595, |
|
"grad_norm": 1.1246107275670814, |
|
"learning_rate": 1.202894857740843e-05, |
|
"loss": 0.524, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.49262670096412514, |
|
"grad_norm": 1.0117888555208185, |
|
"learning_rate": 1.1985904666457455e-05, |
|
"loss": 0.4387, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.49388340173189077, |
|
"grad_norm": 1.072130705469674, |
|
"learning_rate": 1.19428224161023e-05, |
|
"loss": 0.4996, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.4951401024996564, |
|
"grad_norm": 1.0949029023305772, |
|
"learning_rate": 1.1899702658078663e-05, |
|
"loss": 0.5001, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.49639680326742197, |
|
"grad_norm": 1.0307577287604857, |
|
"learning_rate": 1.1856546224846354e-05, |
|
"loss": 0.5561, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.4976535040351876, |
|
"grad_norm": 1.0754967872326906, |
|
"learning_rate": 1.181335394957324e-05, |
|
"loss": 0.4659, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.4989102048029532, |
|
"grad_norm": 1.078728824641922, |
|
"learning_rate": 1.1770126666119133e-05, |
|
"loss": 0.5014, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5001669055707189, |
|
"grad_norm": 1.0499497992004496, |
|
"learning_rate": 1.1726865209019709e-05, |
|
"loss": 0.4716, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5014236063384845, |
|
"grad_norm": 1.0777776407348791, |
|
"learning_rate": 1.1683570413470384e-05, |
|
"loss": 0.4928, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5026803071062501, |
|
"grad_norm": 1.0574879518909284, |
|
"learning_rate": 1.1640243115310219e-05, |
|
"loss": 0.4814, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5026803071062501, |
|
"eval_loss": NaN, |
|
"eval_runtime": 389.4493, |
|
"eval_samples_per_second": 21.351, |
|
"eval_steps_per_second": 2.67, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5039370078740157, |
|
"grad_norm": 1.0747572780095613, |
|
"learning_rate": 1.1596884151005743e-05, |
|
"loss": 0.5086, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.5051937086417814, |
|
"grad_norm": 1.104818199875437, |
|
"learning_rate": 1.155349435763483e-05, |
|
"loss": 0.5385, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.506450409409547, |
|
"grad_norm": 1.0738096255710292, |
|
"learning_rate": 1.1510074572870533e-05, |
|
"loss": 0.4624, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.5077071101773126, |
|
"grad_norm": 1.0669378834870606, |
|
"learning_rate": 1.1466625634964911e-05, |
|
"loss": 0.5015, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.5089638109450783, |
|
"grad_norm": 1.0058479382503636, |
|
"learning_rate": 1.1423148382732854e-05, |
|
"loss": 0.4808, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.5102205117128439, |
|
"grad_norm": 1.0558622246582776, |
|
"learning_rate": 1.1379643655535869e-05, |
|
"loss": 0.4676, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.5114772124806095, |
|
"grad_norm": 1.0593950117803592, |
|
"learning_rate": 1.1336112293265896e-05, |
|
"loss": 0.5675, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.5127339132483751, |
|
"grad_norm": 1.0819776501101257, |
|
"learning_rate": 1.1292555136329082e-05, |
|
"loss": 0.523, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.5139906140161408, |
|
"grad_norm": 1.1503702106834164, |
|
"learning_rate": 1.1248973025629567e-05, |
|
"loss": 0.573, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.5152473147839064, |
|
"grad_norm": 1.1896682820272197, |
|
"learning_rate": 1.1205366802553231e-05, |
|
"loss": 0.5443, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.516504015551672, |
|
"grad_norm": 1.051710377423096, |
|
"learning_rate": 1.1161737308951473e-05, |
|
"loss": 0.5213, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.5177607163194377, |
|
"grad_norm": 1.0876393327574223, |
|
"learning_rate": 1.111808538712494e-05, |
|
"loss": 0.4783, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.5190174170872033, |
|
"grad_norm": 1.1011922768783085, |
|
"learning_rate": 1.1074411879807271e-05, |
|
"loss": 0.5381, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.5202741178549689, |
|
"grad_norm": 1.0360462896838518, |
|
"learning_rate": 1.1030717630148839e-05, |
|
"loss": 0.4827, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.5215308186227345, |
|
"grad_norm": 1.0634457362167398, |
|
"learning_rate": 1.0987003481700456e-05, |
|
"loss": 0.468, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.5227875193905002, |
|
"grad_norm": 1.063398934605818, |
|
"learning_rate": 1.0943270278397097e-05, |
|
"loss": 0.5022, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.5240442201582658, |
|
"grad_norm": 1.1236585460615724, |
|
"learning_rate": 1.0899518864541607e-05, |
|
"loss": 0.4927, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.5253009209260314, |
|
"grad_norm": 1.1131356127327812, |
|
"learning_rate": 1.08557500847884e-05, |
|
"loss": 0.482, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.526557621693797, |
|
"grad_norm": 1.074020533914174, |
|
"learning_rate": 1.0811964784127145e-05, |
|
"loss": 0.4989, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.5278143224615627, |
|
"grad_norm": 1.10409363319172, |
|
"learning_rate": 1.076816380786647e-05, |
|
"loss": 0.5241, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5290710232293283, |
|
"grad_norm": 1.122732579577094, |
|
"learning_rate": 1.0724348001617626e-05, |
|
"loss": 0.5156, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.5303277239970938, |
|
"grad_norm": 1.0617618240477706, |
|
"learning_rate": 1.0680518211278169e-05, |
|
"loss": 0.48, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.5315844247648595, |
|
"grad_norm": 1.0479305127381242, |
|
"learning_rate": 1.063667528301563e-05, |
|
"loss": 0.4979, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.5328411255326251, |
|
"grad_norm": 0.9841907268191958, |
|
"learning_rate": 1.0592820063251177e-05, |
|
"loss": 0.4794, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.5340978263003907, |
|
"grad_norm": 1.0319821042091273, |
|
"learning_rate": 1.0548953398643276e-05, |
|
"loss": 0.5059, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.5353545270681563, |
|
"grad_norm": 1.0138228640258524, |
|
"learning_rate": 1.0505076136071342e-05, |
|
"loss": 0.4951, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.536611227835922, |
|
"grad_norm": 1.1594538762080069, |
|
"learning_rate": 1.0461189122619394e-05, |
|
"loss": 0.4968, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.5378679286036876, |
|
"grad_norm": 1.007644935103111, |
|
"learning_rate": 1.0417293205559694e-05, |
|
"loss": 0.5192, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.5391246293714532, |
|
"grad_norm": 1.0681339563091352, |
|
"learning_rate": 1.0373389232336404e-05, |
|
"loss": 0.516, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.5403813301392189, |
|
"grad_norm": 1.0071232530991345, |
|
"learning_rate": 1.0329478050549208e-05, |
|
"loss": 0.4812, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.5416380309069845, |
|
"grad_norm": 1.0658183171395332, |
|
"learning_rate": 1.0285560507936962e-05, |
|
"loss": 0.4668, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.5428947316747501, |
|
"grad_norm": 0.97242755575935, |
|
"learning_rate": 1.0241637452361323e-05, |
|
"loss": 0.5024, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.5441514324425157, |
|
"grad_norm": 1.1113207771581286, |
|
"learning_rate": 1.0197709731790375e-05, |
|
"loss": 0.4847, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.5454081332102814, |
|
"grad_norm": 1.0305552020476478, |
|
"learning_rate": 1.0153778194282269e-05, |
|
"loss": 0.5019, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.546664833978047, |
|
"grad_norm": 1.0370594881922448, |
|
"learning_rate": 1.0109843687968837e-05, |
|
"loss": 0.4958, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.5479215347458126, |
|
"grad_norm": 1.01930158227936, |
|
"learning_rate": 1.0065907061039234e-05, |
|
"loss": 0.5096, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.5491782355135783, |
|
"grad_norm": 0.9871603297074679, |
|
"learning_rate": 1.0021969161723555e-05, |
|
"loss": 0.4685, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.5504349362813439, |
|
"grad_norm": 1.0920379317965205, |
|
"learning_rate": 9.97803083827645e-06, |
|
"loss": 0.4644, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.5516916370491095, |
|
"grad_norm": 1.054600973465295, |
|
"learning_rate": 9.934092938960769e-06, |
|
"loss": 0.4893, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.5529483378168751, |
|
"grad_norm": 1.0201582460505152, |
|
"learning_rate": 9.890156312031165e-06, |
|
"loss": 0.4955, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.5542050385846408, |
|
"grad_norm": 0.9716441699106971, |
|
"learning_rate": 9.846221805717734e-06, |
|
"loss": 0.495, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.5554617393524064, |
|
"grad_norm": 1.082312481961198, |
|
"learning_rate": 9.802290268209627e-06, |
|
"loss": 0.5257, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.556718440120172, |
|
"grad_norm": 1.0097621423303396, |
|
"learning_rate": 9.75836254763868e-06, |
|
"loss": 0.5063, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.5579751408879376, |
|
"grad_norm": 1.0275627453990908, |
|
"learning_rate": 9.71443949206304e-06, |
|
"loss": 0.5461, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.5592318416557033, |
|
"grad_norm": 0.990883428276697, |
|
"learning_rate": 9.670521949450793e-06, |
|
"loss": 0.4691, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.5604885424234689, |
|
"grad_norm": 1.3593079122891867, |
|
"learning_rate": 9.6266107676636e-06, |
|
"loss": 0.4809, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.5617452431912345, |
|
"grad_norm": 1.0402512025062964, |
|
"learning_rate": 9.58270679444031e-06, |
|
"loss": 0.5249, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.5630019439590002, |
|
"grad_norm": 1.0374899415597172, |
|
"learning_rate": 9.538810877380611e-06, |
|
"loss": 0.4911, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.5642586447267658, |
|
"grad_norm": 1.0181963135605545, |
|
"learning_rate": 9.49492386392866e-06, |
|
"loss": 0.5074, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.5655153454945314, |
|
"grad_norm": 1.2455466094666068, |
|
"learning_rate": 9.451046601356725e-06, |
|
"loss": 0.5031, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.5655153454945314, |
|
"eval_loss": NaN, |
|
"eval_runtime": 390.362, |
|
"eval_samples_per_second": 21.301, |
|
"eval_steps_per_second": 2.664, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.566772046262297, |
|
"grad_norm": 1.087941249032518, |
|
"learning_rate": 9.407179936748827e-06, |
|
"loss": 0.4838, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.5680287470300627, |
|
"grad_norm": 1.0487351474005773, |
|
"learning_rate": 9.363324716984375e-06, |
|
"loss": 0.6147, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.5692854477978283, |
|
"grad_norm": 1.0006830147884598, |
|
"learning_rate": 9.319481788721833e-06, |
|
"loss": 0.4864, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.5705421485655939, |
|
"grad_norm": 1.0804045530543167, |
|
"learning_rate": 9.275651998382377e-06, |
|
"loss": 0.465, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.5717988493333596, |
|
"grad_norm": 1.0428355842758288, |
|
"learning_rate": 9.231836192133532e-06, |
|
"loss": 0.4821, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.5730555501011252, |
|
"grad_norm": 1.0610803769161647, |
|
"learning_rate": 9.188035215872858e-06, |
|
"loss": 0.4867, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.5743122508688908, |
|
"grad_norm": 1.0520660600408804, |
|
"learning_rate": 9.144249915211605e-06, |
|
"loss": 0.5214, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.5755689516366564, |
|
"grad_norm": 1.0373635995203723, |
|
"learning_rate": 9.100481135458393e-06, |
|
"loss": 0.4943, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.576825652404422, |
|
"grad_norm": 1.3517258385540856, |
|
"learning_rate": 9.056729721602904e-06, |
|
"loss": 0.6224, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.5780823531721876, |
|
"grad_norm": 1.0301066331386723, |
|
"learning_rate": 9.012996518299547e-06, |
|
"loss": 0.5092, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.5793390539399532, |
|
"grad_norm": 1.0099912645887554, |
|
"learning_rate": 8.969282369851163e-06, |
|
"loss": 0.4901, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.5805957547077188, |
|
"grad_norm": 1.0482057682006019, |
|
"learning_rate": 8.92558812019273e-06, |
|
"loss": 0.4793, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.5818524554754845, |
|
"grad_norm": 1.0475419610115655, |
|
"learning_rate": 8.881914612875062e-06, |
|
"loss": 0.48, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.5831091562432501, |
|
"grad_norm": 1.0523184837700892, |
|
"learning_rate": 8.838262691048529e-06, |
|
"loss": 0.4852, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.5843658570110157, |
|
"grad_norm": 1.3520200098351463, |
|
"learning_rate": 8.79463319744677e-06, |
|
"loss": 0.4807, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.5856225577787814, |
|
"grad_norm": 1.070883661168547, |
|
"learning_rate": 8.751026974370438e-06, |
|
"loss": 0.5023, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.586879258546547, |
|
"grad_norm": 1.064553688268609, |
|
"learning_rate": 8.70744486367092e-06, |
|
"loss": 0.4924, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.5881359593143126, |
|
"grad_norm": 1.0759932963506342, |
|
"learning_rate": 8.66388770673411e-06, |
|
"loss": 0.4971, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.5893926600820782, |
|
"grad_norm": 0.9719685034220976, |
|
"learning_rate": 8.620356344464135e-06, |
|
"loss": 0.4932, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.5906493608498439, |
|
"grad_norm": 1.0215732980684333, |
|
"learning_rate": 8.576851617267151e-06, |
|
"loss": 0.4901, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.5919060616176095, |
|
"grad_norm": 1.0057794928079793, |
|
"learning_rate": 8.533374365035089e-06, |
|
"loss": 0.4755, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.5931627623853751, |
|
"grad_norm": 1.0731603783900507, |
|
"learning_rate": 8.489925427129469e-06, |
|
"loss": 0.5194, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.5944194631531408, |
|
"grad_norm": 1.0413553281269248, |
|
"learning_rate": 8.446505642365174e-06, |
|
"loss": 0.4651, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.5956761639209064, |
|
"grad_norm": 1.0439372619946545, |
|
"learning_rate": 8.40311584899426e-06, |
|
"loss": 0.5009, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.596932864688672, |
|
"grad_norm": 1.079306190387013, |
|
"learning_rate": 8.359756884689785e-06, |
|
"loss": 0.4643, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.5981895654564376, |
|
"grad_norm": 0.9998675795140389, |
|
"learning_rate": 8.316429586529616e-06, |
|
"loss": 0.4811, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.5994462662242033, |
|
"grad_norm": 0.9612157155286085, |
|
"learning_rate": 8.273134790980295e-06, |
|
"loss": 0.4732, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.6007029669919689, |
|
"grad_norm": 1.0216579545060833, |
|
"learning_rate": 8.22987333388087e-06, |
|
"loss": 0.4878, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.6019596677597345, |
|
"grad_norm": 1.035592271636439, |
|
"learning_rate": 8.186646050426763e-06, |
|
"loss": 0.4763, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.6032163685275002, |
|
"grad_norm": 0.9266009986946163, |
|
"learning_rate": 8.143453775153646e-06, |
|
"loss": 0.4962, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6044730692952658, |
|
"grad_norm": 1.0063210029903829, |
|
"learning_rate": 8.100297341921342e-06, |
|
"loss": 0.5457, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.6057297700630314, |
|
"grad_norm": 1.0212142908210302, |
|
"learning_rate": 8.057177583897704e-06, |
|
"loss": 0.4623, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.606986470830797, |
|
"grad_norm": 1.0319718233335433, |
|
"learning_rate": 8.014095333542548e-06, |
|
"loss": 0.5095, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.6082431715985627, |
|
"grad_norm": 0.9974523217893733, |
|
"learning_rate": 7.971051422591571e-06, |
|
"loss": 0.5192, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.6094998723663283, |
|
"grad_norm": 0.9326473530788999, |
|
"learning_rate": 7.928046682040311e-06, |
|
"loss": 0.4574, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.6107565731340939, |
|
"grad_norm": 0.9393968466161859, |
|
"learning_rate": 7.885081942128074e-06, |
|
"loss": 0.5029, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.6120132739018596, |
|
"grad_norm": 0.9680572001368599, |
|
"learning_rate": 7.84215803232194e-06, |
|
"loss": 0.514, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.6132699746696252, |
|
"grad_norm": 1.0704719257655635, |
|
"learning_rate": 7.79927578130072e-06, |
|
"loss": 0.4928, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.6145266754373908, |
|
"grad_norm": 1.0200776137947951, |
|
"learning_rate": 7.756436016938973e-06, |
|
"loss": 0.4869, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.6157833762051564, |
|
"grad_norm": 0.9387956884468813, |
|
"learning_rate": 7.713639566291028e-06, |
|
"loss": 0.4799, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6170400769729221, |
|
"grad_norm": 0.9312805823610483, |
|
"learning_rate": 7.670887255575003e-06, |
|
"loss": 0.4931, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.6182967777406877, |
|
"grad_norm": 0.9295388110852509, |
|
"learning_rate": 7.628179910156859e-06, |
|
"loss": 0.5234, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.6195534785084533, |
|
"grad_norm": 0.9969654532505003, |
|
"learning_rate": 7.585518354534473e-06, |
|
"loss": 0.5349, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.620810179276219, |
|
"grad_norm": 0.969918008367211, |
|
"learning_rate": 7.542903412321714e-06, |
|
"loss": 0.4843, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.6220668800439845, |
|
"grad_norm": 0.9449769047029168, |
|
"learning_rate": 7.500335906232544e-06, |
|
"loss": 0.4639, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.6233235808117501, |
|
"grad_norm": 1.033777318960197, |
|
"learning_rate": 7.4578166580651335e-06, |
|
"loss": 0.504, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.6245802815795157, |
|
"grad_norm": 0.9914352689303235, |
|
"learning_rate": 7.4153464886859925e-06, |
|
"loss": 0.4981, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.6258369823472814, |
|
"grad_norm": 1.056953990111743, |
|
"learning_rate": 7.372926218014131e-06, |
|
"loss": 0.516, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.627093683115047, |
|
"grad_norm": 0.9687789789603087, |
|
"learning_rate": 7.330556665005235e-06, |
|
"loss": 0.4599, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.6283503838828126, |
|
"grad_norm": 1.0066721178900888, |
|
"learning_rate": 7.2882386476358304e-06, |
|
"loss": 0.4614, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6283503838828126, |
|
"eval_loss": NaN, |
|
"eval_runtime": 384.2451, |
|
"eval_samples_per_second": 21.64, |
|
"eval_steps_per_second": 2.707, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6296070846505782, |
|
"grad_norm": 1.0360193648866638, |
|
"learning_rate": 7.2459729828875256e-06, |
|
"loss": 0.4916, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.6308637854183439, |
|
"grad_norm": 0.9139912569370758, |
|
"learning_rate": 7.203760486731204e-06, |
|
"loss": 0.4688, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.6321204861861095, |
|
"grad_norm": 1.0077884286770724, |
|
"learning_rate": 7.161601974111308e-06, |
|
"loss": 0.4936, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.6333771869538751, |
|
"grad_norm": 0.9993486667429677, |
|
"learning_rate": 7.119498258930073e-06, |
|
"loss": 0.5179, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.6346338877216408, |
|
"grad_norm": 0.9964058480599178, |
|
"learning_rate": 7.0774501540318305e-06, |
|
"loss": 0.4745, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.6358905884894064, |
|
"grad_norm": 0.9646168801150186, |
|
"learning_rate": 7.035458471187312e-06, |
|
"loss": 0.4865, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.637147289257172, |
|
"grad_norm": 0.9977812292191052, |
|
"learning_rate": 6.993524021077989e-06, |
|
"loss": 0.5161, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.6384039900249376, |
|
"grad_norm": 0.9788000489126177, |
|
"learning_rate": 6.951647613280397e-06, |
|
"loss": 0.5361, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.6396606907927033, |
|
"grad_norm": 0.9186321053151645, |
|
"learning_rate": 6.909830056250527e-06, |
|
"loss": 0.4871, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.6409173915604689, |
|
"grad_norm": 1.0035904562057332, |
|
"learning_rate": 6.868072157308213e-06, |
|
"loss": 0.5202, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.6421740923282345, |
|
"grad_norm": 0.9810776358670968, |
|
"learning_rate": 6.826374722621536e-06, |
|
"loss": 0.4599, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.6434307930960002, |
|
"grad_norm": 0.9283679926638487, |
|
"learning_rate": 6.784738557191284e-06, |
|
"loss": 0.4628, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.6446874938637658, |
|
"grad_norm": 0.9994992253986084, |
|
"learning_rate": 6.7431644648353785e-06, |
|
"loss": 0.4674, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.6459441946315314, |
|
"grad_norm": 0.9106352041140137, |
|
"learning_rate": 6.701653248173382e-06, |
|
"loss": 0.4625, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.647200895399297, |
|
"grad_norm": 0.9500901417881573, |
|
"learning_rate": 6.660205708610987e-06, |
|
"loss": 0.4797, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.6484575961670627, |
|
"grad_norm": 0.9472535122755693, |
|
"learning_rate": 6.618822646324563e-06, |
|
"loss": 0.4669, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.6497142969348283, |
|
"grad_norm": 0.9589653557097556, |
|
"learning_rate": 6.577504860245684e-06, |
|
"loss": 0.5408, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.6509709977025939, |
|
"grad_norm": 0.96979303506713, |
|
"learning_rate": 6.536253148045726e-06, |
|
"loss": 0.4897, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.6522276984703596, |
|
"grad_norm": 0.9496625953614718, |
|
"learning_rate": 6.495068306120452e-06, |
|
"loss": 0.4896, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.6534843992381252, |
|
"grad_norm": 0.9999476061446844, |
|
"learning_rate": 6.453951129574644e-06, |
|
"loss": 0.5023, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.6547411000058908, |
|
"grad_norm": 0.9223335871200387, |
|
"learning_rate": 6.41290241220676e-06, |
|
"loss": 0.5464, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.6559978007736564, |
|
"grad_norm": 1.0030864529913075, |
|
"learning_rate": 6.3719229464935915e-06, |
|
"loss": 0.4778, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.6572545015414221, |
|
"grad_norm": 0.9309808406108107, |
|
"learning_rate": 6.331013523574978e-06, |
|
"loss": 0.5015, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.6585112023091877, |
|
"grad_norm": 1.1823052168866466, |
|
"learning_rate": 6.290174933238531e-06, |
|
"loss": 0.4782, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.6597679030769533, |
|
"grad_norm": 0.9878321294333653, |
|
"learning_rate": 6.249407963904381e-06, |
|
"loss": 0.5033, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.661024603844719, |
|
"grad_norm": 0.9465968966854122, |
|
"learning_rate": 6.208713402609968e-06, |
|
"loss": 0.4765, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.6622813046124846, |
|
"grad_norm": 1.042662157579056, |
|
"learning_rate": 6.168092034994832e-06, |
|
"loss": 0.4847, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.6635380053802502, |
|
"grad_norm": 0.9634576640584912, |
|
"learning_rate": 6.127544645285448e-06, |
|
"loss": 0.4634, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.6647947061480158, |
|
"grad_norm": 0.9603382195784586, |
|
"learning_rate": 6.087072016280111e-06, |
|
"loss": 0.4902, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.6660514069157815, |
|
"grad_norm": 0.9598750622234282, |
|
"learning_rate": 6.046674929333787e-06, |
|
"loss": 0.4806, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.6673081076835471, |
|
"grad_norm": 0.9029178527016823, |
|
"learning_rate": 6.006354164343047e-06, |
|
"loss": 0.4577, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.6685648084513126, |
|
"grad_norm": 0.9306648630657135, |
|
"learning_rate": 5.96611049973101e-06, |
|
"loss": 0.578, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.6698215092190782, |
|
"grad_norm": 0.9745393471796585, |
|
"learning_rate": 5.925944712432317e-06, |
|
"loss": 0.4582, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.6710782099868439, |
|
"grad_norm": 0.9854308021827322, |
|
"learning_rate": 5.885857577878122e-06, |
|
"loss": 0.4728, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.6723349107546095, |
|
"grad_norm": 0.9505828662992784, |
|
"learning_rate": 5.845849869981137e-06, |
|
"loss": 0.4732, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.6735916115223751, |
|
"grad_norm": 0.971008498361879, |
|
"learning_rate": 5.8059223611206716e-06, |
|
"loss": 0.4782, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.6748483122901408, |
|
"grad_norm": 1.0199269663151482, |
|
"learning_rate": 5.766075822127735e-06, |
|
"loss": 0.4664, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.6761050130579064, |
|
"grad_norm": 0.9414292961876951, |
|
"learning_rate": 5.726311022270152e-06, |
|
"loss": 0.4869, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.677361713825672, |
|
"grad_norm": 0.9637472122488508, |
|
"learning_rate": 5.686628729237713e-06, |
|
"loss": 0.4853, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.6786184145934376, |
|
"grad_norm": 1.004859916641192, |
|
"learning_rate": 5.647029709127355e-06, |
|
"loss": 0.4755, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.6798751153612033, |
|
"grad_norm": 1.0313280249469097, |
|
"learning_rate": 5.6075147264283526e-06, |
|
"loss": 0.4728, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.6811318161289689, |
|
"grad_norm": 0.9536505641573378, |
|
"learning_rate": 5.5680845440075885e-06, |
|
"loss": 0.473, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.6823885168967345, |
|
"grad_norm": 0.9540350587248954, |
|
"learning_rate": 5.528739923094806e-06, |
|
"loss": 0.4657, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.6836452176645001, |
|
"grad_norm": 0.9072196968401911, |
|
"learning_rate": 5.4894816232679195e-06, |
|
"loss": 0.4543, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.6849019184322658, |
|
"grad_norm": 0.9814435512849559, |
|
"learning_rate": 5.450310402438353e-06, |
|
"loss": 0.4897, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.6861586192000314, |
|
"grad_norm": 0.9601361725013989, |
|
"learning_rate": 5.4112270168363854e-06, |
|
"loss": 0.4749, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.687415319967797, |
|
"grad_norm": 0.9663745304412944, |
|
"learning_rate": 5.3722322209966024e-06, |
|
"loss": 0.4848, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.6886720207355627, |
|
"grad_norm": 1.0061563886612799, |
|
"learning_rate": 5.333326767743263e-06, |
|
"loss": 0.4673, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.6899287215033283, |
|
"grad_norm": 0.9439604050992346, |
|
"learning_rate": 5.294511408175825e-06, |
|
"loss": 0.4853, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.6911854222710939, |
|
"grad_norm": 0.962910340515467, |
|
"learning_rate": 5.2557868916543996e-06, |
|
"loss": 0.4791, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6911854222710939, |
|
"eval_loss": NaN, |
|
"eval_runtime": 395.5216, |
|
"eval_samples_per_second": 21.023, |
|
"eval_steps_per_second": 2.629, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.6924421230388595, |
|
"grad_norm": 0.9707818718943069, |
|
"learning_rate": 5.217153965785315e-06, |
|
"loss": 0.5109, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.6936988238066252, |
|
"grad_norm": 0.9595264837099737, |
|
"learning_rate": 5.178613376406672e-06, |
|
"loss": 0.4675, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.6949555245743908, |
|
"grad_norm": 0.9611450529414026, |
|
"learning_rate": 5.14016586757394e-06, |
|
"loss": 0.4814, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.6962122253421564, |
|
"grad_norm": 1.0045400911821167, |
|
"learning_rate": 5.1018121815456045e-06, |
|
"loss": 0.5006, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.6974689261099221, |
|
"grad_norm": 0.964170058570595, |
|
"learning_rate": 5.063553058768814e-06, |
|
"loss": 0.4615, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.6987256268776877, |
|
"grad_norm": 0.974630657567601, |
|
"learning_rate": 5.025389237865128e-06, |
|
"loss": 0.6329, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.6999823276454533, |
|
"grad_norm": 0.9940742215685975, |
|
"learning_rate": 4.987321455616206e-06, |
|
"loss": 0.4603, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.701239028413219, |
|
"grad_norm": 0.8894890015293643, |
|
"learning_rate": 4.9493504469496235e-06, |
|
"loss": 0.4553, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.7024957291809846, |
|
"grad_norm": 0.9992618220443336, |
|
"learning_rate": 4.911476944924651e-06, |
|
"loss": 0.4864, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.7037524299487502, |
|
"grad_norm": 0.9141872027503535, |
|
"learning_rate": 4.873701680718146e-06, |
|
"loss": 0.5114, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7050091307165158, |
|
"grad_norm": 0.9326203191415026, |
|
"learning_rate": 4.836025383610382e-06, |
|
"loss": 0.4764, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.7062658314842815, |
|
"grad_norm": 0.925599543126751, |
|
"learning_rate": 4.798448780971013e-06, |
|
"loss": 0.4841, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.7075225322520471, |
|
"grad_norm": 0.9315828509380828, |
|
"learning_rate": 4.7609725982450176e-06, |
|
"loss": 0.4739, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.7087792330198127, |
|
"grad_norm": 0.934793953759224, |
|
"learning_rate": 4.7235975589386715e-06, |
|
"loss": 0.4464, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.7100359337875783, |
|
"grad_norm": 0.9341913967846289, |
|
"learning_rate": 4.686324384605629e-06, |
|
"loss": 0.4559, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.711292634555344, |
|
"grad_norm": 0.9828397495391764, |
|
"learning_rate": 4.649153794832939e-06, |
|
"loss": 0.5193, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.7125493353231096, |
|
"grad_norm": 0.9608848681418946, |
|
"learning_rate": 4.612086507227186e-06, |
|
"loss": 0.4675, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.7138060360908752, |
|
"grad_norm": 0.9250968302486778, |
|
"learning_rate": 4.5751232374006304e-06, |
|
"loss": 0.4469, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.7150627368586407, |
|
"grad_norm": 0.8931095869951399, |
|
"learning_rate": 4.538264698957387e-06, |
|
"loss": 0.4384, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.7163194376264064, |
|
"grad_norm": 0.8957255025032255, |
|
"learning_rate": 4.501511603479653e-06, |
|
"loss": 0.4595, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.717576138394172, |
|
"grad_norm": 0.9797783050556119, |
|
"learning_rate": 4.4648646605139605e-06, |
|
"loss": 0.4803, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.7188328391619376, |
|
"grad_norm": 1.0477043965745412, |
|
"learning_rate": 4.428324577557501e-06, |
|
"loss": 0.5001, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.7200895399297033, |
|
"grad_norm": 0.9402442503704912, |
|
"learning_rate": 4.391892060044435e-06, |
|
"loss": 0.5285, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.7213462406974689, |
|
"grad_norm": 0.895533404301798, |
|
"learning_rate": 4.355567811332311e-06, |
|
"loss": 0.4249, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.7226029414652345, |
|
"grad_norm": 0.9375812495309651, |
|
"learning_rate": 4.319352532688444e-06, |
|
"loss": 0.4864, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.7238596422330001, |
|
"grad_norm": 0.9535540256167436, |
|
"learning_rate": 4.283246923276411e-06, |
|
"loss": 0.451, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.7251163430007658, |
|
"grad_norm": 0.9843964347322175, |
|
"learning_rate": 4.247251680142542e-06, |
|
"loss": 0.4934, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.7263730437685314, |
|
"grad_norm": 0.902113121251565, |
|
"learning_rate": 4.211367498202456e-06, |
|
"loss": 0.4859, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.727629744536297, |
|
"grad_norm": 0.9071019213314944, |
|
"learning_rate": 4.175595070227655e-06, |
|
"loss": 0.463, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.7288864453040627, |
|
"grad_norm": 0.9445861043217729, |
|
"learning_rate": 4.13993508683214e-06, |
|
"loss": 0.4639, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.7301431460718283, |
|
"grad_norm": 0.9529191091817809, |
|
"learning_rate": 4.1043882364590895e-06, |
|
"loss": 0.458, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.7313998468395939, |
|
"grad_norm": 0.943614064462381, |
|
"learning_rate": 4.068955205367559e-06, |
|
"loss": 0.4501, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.7326565476073595, |
|
"grad_norm": 0.9501101229235286, |
|
"learning_rate": 4.033636677619242e-06, |
|
"loss": 0.4507, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.7339132483751252, |
|
"grad_norm": 0.9696808926218615, |
|
"learning_rate": 3.998433335065251e-06, |
|
"loss": 0.5016, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.7351699491428908, |
|
"grad_norm": 0.8737082824243725, |
|
"learning_rate": 3.96334585733296e-06, |
|
"loss": 0.418, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.7364266499106564, |
|
"grad_norm": 0.9176906083149247, |
|
"learning_rate": 3.9283749218128885e-06, |
|
"loss": 0.479, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.7376833506784221, |
|
"grad_norm": 0.9192555275886508, |
|
"learning_rate": 3.893521203645618e-06, |
|
"loss": 0.49, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.7389400514461877, |
|
"grad_norm": 0.9259761814325044, |
|
"learning_rate": 3.858785375708764e-06, |
|
"loss": 0.4658, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.7401967522139533, |
|
"grad_norm": 0.8762077783379093, |
|
"learning_rate": 3.824168108603971e-06, |
|
"loss": 0.4825, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.7414534529817189, |
|
"grad_norm": 0.9928947391135122, |
|
"learning_rate": 3.7896700706439826e-06, |
|
"loss": 0.5519, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.7427101537494846, |
|
"grad_norm": 0.9049374842785678, |
|
"learning_rate": 3.7552919278397335e-06, |
|
"loss": 0.4518, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.7439668545172502, |
|
"grad_norm": 0.8936620428913575, |
|
"learning_rate": 3.7210343438874917e-06, |
|
"loss": 0.4341, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.7452235552850158, |
|
"grad_norm": 0.9409320634262343, |
|
"learning_rate": 3.6868979801560443e-06, |
|
"loss": 0.4719, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.7464802560527815, |
|
"grad_norm": 0.9851785740451936, |
|
"learning_rate": 3.6528834956739224e-06, |
|
"loss": 0.429, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.7477369568205471, |
|
"grad_norm": 0.9952385017039359, |
|
"learning_rate": 3.6189915471166927e-06, |
|
"loss": 0.474, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.7489936575883127, |
|
"grad_norm": 0.9175118141037356, |
|
"learning_rate": 3.5852227887942713e-06, |
|
"loss": 0.4453, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.7502503583560783, |
|
"grad_norm": 0.8948744735918709, |
|
"learning_rate": 3.5515778726382967e-06, |
|
"loss": 0.491, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.751507059123844, |
|
"grad_norm": 0.9493043335059733, |
|
"learning_rate": 3.518057448189527e-06, |
|
"loss": 0.4812, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.7527637598916096, |
|
"grad_norm": 0.9526119414704524, |
|
"learning_rate": 3.4846621625853248e-06, |
|
"loss": 0.4758, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.7540204606593752, |
|
"grad_norm": 0.9375798827152386, |
|
"learning_rate": 3.4513926605471504e-06, |
|
"loss": 0.4703, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7540204606593752, |
|
"eval_loss": NaN, |
|
"eval_runtime": 391.2661, |
|
"eval_samples_per_second": 21.252, |
|
"eval_steps_per_second": 2.658, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.7552771614271409, |
|
"grad_norm": 0.9276644067982343, |
|
"learning_rate": 3.4182495843681117e-06, |
|
"loss": 0.461, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.7565338621949065, |
|
"grad_norm": 0.9573184146956152, |
|
"learning_rate": 3.385233573900576e-06, |
|
"loss": 0.4812, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.7577905629626721, |
|
"grad_norm": 0.9005304918861501, |
|
"learning_rate": 3.3523452665438004e-06, |
|
"loss": 0.4603, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.7590472637304377, |
|
"grad_norm": 0.8844651813155415, |
|
"learning_rate": 3.3195852972316435e-06, |
|
"loss": 0.4783, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.7603039644982033, |
|
"grad_norm": 0.9053279526971507, |
|
"learning_rate": 3.2869542984202974e-06, |
|
"loss": 0.5253, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.7615606652659689, |
|
"grad_norm": 0.995160552063608, |
|
"learning_rate": 3.254452900076083e-06, |
|
"loss": 0.5082, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.7628173660337345, |
|
"grad_norm": 0.9879909586608571, |
|
"learning_rate": 3.2220817296632845e-06, |
|
"loss": 0.4499, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.7640740668015001, |
|
"grad_norm": 0.9345160999737937, |
|
"learning_rate": 3.1898414121320277e-06, |
|
"loss": 0.4915, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.7653307675692658, |
|
"grad_norm": 0.9546540611891579, |
|
"learning_rate": 3.1577325699062424e-06, |
|
"loss": 0.4864, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.7665874683370314, |
|
"grad_norm": 0.9216552084428411, |
|
"learning_rate": 3.125755822871607e-06, |
|
"loss": 0.5028, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.767844169104797, |
|
"grad_norm": 0.921600882945863, |
|
"learning_rate": 3.093911788363617e-06, |
|
"loss": 0.4773, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.7691008698725627, |
|
"grad_norm": 0.9429114879803641, |
|
"learning_rate": 3.062201081155637e-06, |
|
"loss": 0.4828, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.7703575706403283, |
|
"grad_norm": 0.9468558457454733, |
|
"learning_rate": 3.0306243134470668e-06, |
|
"loss": 0.5299, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.7716142714080939, |
|
"grad_norm": 0.8680737752062002, |
|
"learning_rate": 2.9991820948514795e-06, |
|
"loss": 0.4608, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.7728709721758595, |
|
"grad_norm": 0.9257236575469265, |
|
"learning_rate": 2.9678750323848893e-06, |
|
"loss": 0.4786, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.7741276729436252, |
|
"grad_norm": 0.9656830199257891, |
|
"learning_rate": 2.936703730454017e-06, |
|
"loss": 0.4358, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.7753843737113908, |
|
"grad_norm": 0.9907979170488217, |
|
"learning_rate": 2.90566879084461e-06, |
|
"loss": 0.5215, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.7766410744791564, |
|
"grad_norm": 0.8950799317069913, |
|
"learning_rate": 2.8747708127098593e-06, |
|
"loss": 0.4717, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.777897775246922, |
|
"grad_norm": 0.9505878825468562, |
|
"learning_rate": 2.8440103925587904e-06, |
|
"loss": 0.4566, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.7791544760146877, |
|
"grad_norm": 0.969358314749328, |
|
"learning_rate": 2.813388124244778e-06, |
|
"loss": 0.4492, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.7804111767824533, |
|
"grad_norm": 0.9785261324929386, |
|
"learning_rate": 2.7829045989540594e-06, |
|
"loss": 0.4496, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.7816678775502189, |
|
"grad_norm": 0.9531236242065506, |
|
"learning_rate": 2.7525604051943512e-06, |
|
"loss": 0.5504, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.7829245783179846, |
|
"grad_norm": 0.9826298336215402, |
|
"learning_rate": 2.7223561287834467e-06, |
|
"loss": 0.4586, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.7841812790857502, |
|
"grad_norm": 0.9317085993058872, |
|
"learning_rate": 2.692292352837942e-06, |
|
"loss": 0.4672, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.7854379798535158, |
|
"grad_norm": 1.0787276642963513, |
|
"learning_rate": 2.662369657761963e-06, |
|
"loss": 0.4608, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.7866946806212815, |
|
"grad_norm": 0.9372652077711696, |
|
"learning_rate": 2.6325886212359496e-06, |
|
"loss": 0.4377, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.7879513813890471, |
|
"grad_norm": 0.9921327711014636, |
|
"learning_rate": 2.602949818205539e-06, |
|
"loss": 0.4584, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.7892080821568127, |
|
"grad_norm": 0.947977203839485, |
|
"learning_rate": 2.5734538208704197e-06, |
|
"loss": 0.4787, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.7904647829245783, |
|
"grad_norm": 0.8656699847159132, |
|
"learning_rate": 2.5441011986733165e-06, |
|
"loss": 0.4662, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.791721483692344, |
|
"grad_norm": 0.8459597720147866, |
|
"learning_rate": 2.514892518288988e-06, |
|
"loss": 0.4475, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.7929781844601096, |
|
"grad_norm": 0.9446896479383777, |
|
"learning_rate": 2.485828343613288e-06, |
|
"loss": 0.4766, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.7942348852278752, |
|
"grad_norm": 0.9122011845063069, |
|
"learning_rate": 2.456909235752276e-06, |
|
"loss": 0.4502, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.7954915859956408, |
|
"grad_norm": 0.9193582934450675, |
|
"learning_rate": 2.4281357530113804e-06, |
|
"loss": 0.4823, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.7967482867634065, |
|
"grad_norm": 0.897269277270419, |
|
"learning_rate": 2.399508450884631e-06, |
|
"loss": 0.4513, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.7980049875311721, |
|
"grad_norm": 0.9475888045070303, |
|
"learning_rate": 2.3710278820439313e-06, |
|
"loss": 0.5161, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.7992616882989377, |
|
"grad_norm": 0.9443257123202231, |
|
"learning_rate": 2.3426945963283853e-06, |
|
"loss": 0.486, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.8005183890667034, |
|
"grad_norm": 0.9502264882194557, |
|
"learning_rate": 2.3145091407336785e-06, |
|
"loss": 0.4578, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.801775089834469, |
|
"grad_norm": 0.9187557661433506, |
|
"learning_rate": 2.2864720594015288e-06, |
|
"loss": 0.4871, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.8030317906022346, |
|
"grad_norm": 0.8862615742754856, |
|
"learning_rate": 2.2585838936091753e-06, |
|
"loss": 0.4635, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.8042884913700002, |
|
"grad_norm": 0.9044746570210405, |
|
"learning_rate": 2.230845181758928e-06, |
|
"loss": 0.4614, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.8055451921377659, |
|
"grad_norm": 0.9371401414038523, |
|
"learning_rate": 2.2032564593677773e-06, |
|
"loss": 0.4718, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.8068018929055314, |
|
"grad_norm": 1.0205326351085469, |
|
"learning_rate": 2.1758182590570454e-06, |
|
"loss": 0.4587, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.808058593673297, |
|
"grad_norm": 0.9391567214691986, |
|
"learning_rate": 2.148531110542118e-06, |
|
"loss": 0.4583, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.8093152944410626, |
|
"grad_norm": 0.9423357541683243, |
|
"learning_rate": 2.1213955406222076e-06, |
|
"loss": 0.4564, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.8105719952088283, |
|
"grad_norm": 0.9170267994353932, |
|
"learning_rate": 2.09441207317019e-06, |
|
"loss": 0.4501, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.8118286959765939, |
|
"grad_norm": 0.9670344205389225, |
|
"learning_rate": 2.0675812291224796e-06, |
|
"loss": 0.4827, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.8130853967443595, |
|
"grad_norm": 0.9406467714039654, |
|
"learning_rate": 2.0409035264689857e-06, |
|
"loss": 0.4708, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.8143420975121252, |
|
"grad_norm": 0.8958190576761381, |
|
"learning_rate": 2.014379480243105e-06, |
|
"loss": 0.4483, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.8155987982798908, |
|
"grad_norm": 0.9488884622008084, |
|
"learning_rate": 1.988009602511779e-06, |
|
"loss": 0.4569, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.8168554990476564, |
|
"grad_norm": 0.9215947064515085, |
|
"learning_rate": 1.961794402365611e-06, |
|
"loss": 0.5286, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.8168554990476564, |
|
"eval_loss": NaN, |
|
"eval_runtime": 392.1209, |
|
"eval_samples_per_second": 21.205, |
|
"eval_steps_per_second": 2.652, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.818112199815422, |
|
"grad_norm": 0.9209169808681984, |
|
"learning_rate": 1.935734385909028e-06, |
|
"loss": 0.4795, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.8193689005831877, |
|
"grad_norm": 0.9076583048514367, |
|
"learning_rate": 1.9098300562505266e-06, |
|
"loss": 0.4711, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.8206256013509533, |
|
"grad_norm": 0.9912038417618557, |
|
"learning_rate": 1.8840819134929467e-06, |
|
"loss": 0.48, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.8218823021187189, |
|
"grad_norm": 0.9324565329660811, |
|
"learning_rate": 1.8584904547238214e-06, |
|
"loss": 0.4387, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.8231390028864846, |
|
"grad_norm": 0.8864965286423587, |
|
"learning_rate": 1.8330561740057839e-06, |
|
"loss": 0.4469, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.8243957036542502, |
|
"grad_norm": 0.8720461851671827, |
|
"learning_rate": 1.8077795623670135e-06, |
|
"loss": 0.4531, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.8256524044220158, |
|
"grad_norm": 0.8921315350992695, |
|
"learning_rate": 1.7826611077917843e-06, |
|
"loss": 0.4797, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.8269091051897814, |
|
"grad_norm": 0.8844082344199783, |
|
"learning_rate": 1.757701295211014e-06, |
|
"loss": 0.4523, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.8281658059575471, |
|
"grad_norm": 0.9020082915341568, |
|
"learning_rate": 1.7329006064929232e-06, |
|
"loss": 0.4772, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.8294225067253127, |
|
"grad_norm": 0.9058399917219788, |
|
"learning_rate": 1.7082595204337183e-06, |
|
"loss": 0.5386, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.8306792074930783, |
|
"grad_norm": 0.8817763873468203, |
|
"learning_rate": 1.683778512748362e-06, |
|
"loss": 0.4663, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.831935908260844, |
|
"grad_norm": 0.9634025095210257, |
|
"learning_rate": 1.6594580560613782e-06, |
|
"loss": 0.4689, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.8331926090286096, |
|
"grad_norm": 0.9633653565356368, |
|
"learning_rate": 1.6352986198977327e-06, |
|
"loss": 0.4667, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.8344493097963752, |
|
"grad_norm": 0.9487086679856576, |
|
"learning_rate": 1.6113006706737667e-06, |
|
"loss": 0.5184, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.8357060105641408, |
|
"grad_norm": 0.9325953615840198, |
|
"learning_rate": 1.587464671688187e-06, |
|
"loss": 0.456, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.8369627113319065, |
|
"grad_norm": 1.009772295164291, |
|
"learning_rate": 1.563791083113142e-06, |
|
"loss": 0.4451, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.8382194120996721, |
|
"grad_norm": 0.9419871710371429, |
|
"learning_rate": 1.540280361985308e-06, |
|
"loss": 0.5302, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.8394761128674377, |
|
"grad_norm": 0.8754091668537753, |
|
"learning_rate": 1.5169329621970918e-06, |
|
"loss": 0.4655, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.8407328136352034, |
|
"grad_norm": 0.8613931367520573, |
|
"learning_rate": 1.4937493344878474e-06, |
|
"loss": 0.63, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.841989514402969, |
|
"grad_norm": 0.9312238100708667, |
|
"learning_rate": 1.4707299264351914e-06, |
|
"loss": 0.5655, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.8432462151707346, |
|
"grad_norm": 0.9049299425860378, |
|
"learning_rate": 1.4478751824463543e-06, |
|
"loss": 0.4651, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.8445029159385002, |
|
"grad_norm": 0.8788628461504958, |
|
"learning_rate": 1.4251855437495976e-06, |
|
"loss": 0.475, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.8457596167062659, |
|
"grad_norm": 0.8951500164340301, |
|
"learning_rate": 1.4026614483857037e-06, |
|
"loss": 0.4673, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.8470163174740315, |
|
"grad_norm": 0.9244238966775989, |
|
"learning_rate": 1.3803033311995072e-06, |
|
"loss": 0.4796, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.8482730182417971, |
|
"grad_norm": 0.9749389580422814, |
|
"learning_rate": 1.3581116238315194e-06, |
|
"loss": 0.4735, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.8495297190095628, |
|
"grad_norm": 0.936241513202334, |
|
"learning_rate": 1.336086754709569e-06, |
|
"loss": 0.4635, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.8507864197773284, |
|
"grad_norm": 0.9584973374609863, |
|
"learning_rate": 1.3142291490405568e-06, |
|
"loss": 0.4638, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.852043120545094, |
|
"grad_norm": 0.9235602347304942, |
|
"learning_rate": 1.2925392288022299e-06, |
|
"loss": 0.4699, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.8532998213128595, |
|
"grad_norm": 0.8876268215636179, |
|
"learning_rate": 1.2710174127350362e-06, |
|
"loss": 0.4681, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.8545565220806252, |
|
"grad_norm": 0.9354140918132684, |
|
"learning_rate": 1.2496641163340562e-06, |
|
"loss": 0.4699, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.8558132228483908, |
|
"grad_norm": 0.9255534990449884, |
|
"learning_rate": 1.2284797518409575e-06, |
|
"loss": 0.4483, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.8570699236161564, |
|
"grad_norm": 0.9733945197204105, |
|
"learning_rate": 1.2074647282360573e-06, |
|
"loss": 0.4697, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.858326624383922, |
|
"grad_norm": 0.9031964521291095, |
|
"learning_rate": 1.1866194512304075e-06, |
|
"loss": 0.4631, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.8595833251516877, |
|
"grad_norm": 0.8891842214060443, |
|
"learning_rate": 1.165944323257986e-06, |
|
"loss": 0.5275, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.8608400259194533, |
|
"grad_norm": 0.9289352194308863, |
|
"learning_rate": 1.1454397434679022e-06, |
|
"loss": 0.4573, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.8620967266872189, |
|
"grad_norm": 0.9246693948234718, |
|
"learning_rate": 1.125106107716708e-06, |
|
"loss": 0.4606, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.8633534274549846, |
|
"grad_norm": 0.8763495664310694, |
|
"learning_rate": 1.10494380856075e-06, |
|
"loss": 0.4315, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.8646101282227502, |
|
"grad_norm": 0.9677586559163779, |
|
"learning_rate": 1.0849532352485903e-06, |
|
"loss": 0.4771, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.8658668289905158, |
|
"grad_norm": 0.8612887488120702, |
|
"learning_rate": 1.0651347737134965e-06, |
|
"loss": 0.4706, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.8671235297582814, |
|
"grad_norm": 0.8842124304281852, |
|
"learning_rate": 1.0454888065659775e-06, |
|
"loss": 0.4548, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.8683802305260471, |
|
"grad_norm": 0.9277463249641559, |
|
"learning_rate": 1.0260157130864178e-06, |
|
"loss": 0.4567, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.8696369312938127, |
|
"grad_norm": 0.8437870865194692, |
|
"learning_rate": 1.0067158692177325e-06, |
|
"loss": 0.4597, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.8708936320615783, |
|
"grad_norm": 0.8643510843583194, |
|
"learning_rate": 9.87589647558135e-07, |
|
"loss": 0.4256, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.872150332829344, |
|
"grad_norm": 0.9295154059632877, |
|
"learning_rate": 9.686374173539147e-07, |
|
"loss": 0.4861, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.8734070335971096, |
|
"grad_norm": 0.9448636463228528, |
|
"learning_rate": 9.49859544492332e-07, |
|
"loss": 0.4711, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.8746637343648752, |
|
"grad_norm": 0.9093250850860881, |
|
"learning_rate": 9.312563914945461e-07, |
|
"loss": 0.4696, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.8759204351326408, |
|
"grad_norm": 0.9002792388799274, |
|
"learning_rate": 9.128283175086106e-07, |
|
"loss": 0.4656, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.8771771359004065, |
|
"grad_norm": 0.8559481875479032, |
|
"learning_rate": 8.945756783025528e-07, |
|
"loss": 0.4335, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.8784338366681721, |
|
"grad_norm": 0.8796349450465737, |
|
"learning_rate": 8.76498826257488e-07, |
|
"loss": 0.4561, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.8796905374359377, |
|
"grad_norm": 0.8726954083378391, |
|
"learning_rate": 8.585981103608343e-07, |
|
"loss": 0.4993, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8796905374359377, |
|
"eval_loss": NaN, |
|
"eval_runtime": 391.2376, |
|
"eval_samples_per_second": 21.253, |
|
"eval_steps_per_second": 2.658, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.8809472382037034, |
|
"grad_norm": 1.4368360063954035, |
|
"learning_rate": 8.40873876199565e-07, |
|
"loss": 0.4788, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.882203938971469, |
|
"grad_norm": 0.9404327931258755, |
|
"learning_rate": 8.233264659535367e-07, |
|
"loss": 0.4502, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.8834606397392346, |
|
"grad_norm": 0.8946470160723485, |
|
"learning_rate": 8.059562183888903e-07, |
|
"loss": 0.4608, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.8847173405070002, |
|
"grad_norm": 0.9041054772329139, |
|
"learning_rate": 7.887634688515e-07, |
|
"loss": 0.4506, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.8859740412747659, |
|
"grad_norm": 0.8915805709724289, |
|
"learning_rate": 7.71748549260507e-07, |
|
"loss": 0.4424, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.8872307420425315, |
|
"grad_norm": 0.9307904793059376, |
|
"learning_rate": 7.549117881019141e-07, |
|
"loss": 0.4603, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.8884874428102971, |
|
"grad_norm": 0.9166282118331757, |
|
"learning_rate": 7.382535104222366e-07, |
|
"loss": 0.4669, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.8897441435780628, |
|
"grad_norm": 0.9726682302204435, |
|
"learning_rate": 7.21774037822226e-07, |
|
"loss": 0.4575, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.8910008443458284, |
|
"grad_norm": 0.9024738036393012, |
|
"learning_rate": 7.054736884506718e-07, |
|
"loss": 0.4584, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.892257545113594, |
|
"grad_norm": 0.9450717020332904, |
|
"learning_rate": 6.8935277699825e-07, |
|
"loss": 0.4478, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.8935142458813596, |
|
"grad_norm": 0.8734170696346301, |
|
"learning_rate": 6.734116146914516e-07, |
|
"loss": 0.4811, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.8947709466491253, |
|
"grad_norm": 0.9009909790129396, |
|
"learning_rate": 6.576505092865748e-07, |
|
"loss": 0.4695, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.8960276474168909, |
|
"grad_norm": 0.9465203253593445, |
|
"learning_rate": 6.420697650637753e-07, |
|
"loss": 0.4469, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.8972843481846565, |
|
"grad_norm": 0.9084056997490763, |
|
"learning_rate": 6.266696828212071e-07, |
|
"loss": 0.4467, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.898541048952422, |
|
"grad_norm": 0.9510351940546182, |
|
"learning_rate": 6.114505598692011e-07, |
|
"loss": 0.4538, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.8997977497201877, |
|
"grad_norm": 0.8690165913406015, |
|
"learning_rate": 5.964126900245359e-07, |
|
"loss": 0.4542, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.9010544504879533, |
|
"grad_norm": 0.8525566457571607, |
|
"learning_rate": 5.815563636047539e-07, |
|
"loss": 0.4526, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.9023111512557189, |
|
"grad_norm": 0.9287838751774835, |
|
"learning_rate": 5.668818674225684e-07, |
|
"loss": 0.4444, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.9035678520234846, |
|
"grad_norm": 0.960597993996023, |
|
"learning_rate": 5.523894847803235e-07, |
|
"loss": 0.4688, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.9048245527912502, |
|
"grad_norm": 0.8867015416832417, |
|
"learning_rate": 5.380794954645141e-07, |
|
"loss": 0.4369, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.9060812535590158, |
|
"grad_norm": 0.9250364567041895, |
|
"learning_rate": 5.23952175740402e-07, |
|
"loss": 0.4728, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.9073379543267814, |
|
"grad_norm": 0.8538358598265039, |
|
"learning_rate": 5.100077983466667e-07, |
|
"loss": 0.454, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.9085946550945471, |
|
"grad_norm": 0.8588765866461067, |
|
"learning_rate": 4.962466324901483e-07, |
|
"loss": 0.4488, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.9098513558623127, |
|
"grad_norm": 0.8879641890521957, |
|
"learning_rate": 4.826689438406495e-07, |
|
"loss": 0.4538, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.9111080566300783, |
|
"grad_norm": 0.868954790800622, |
|
"learning_rate": 4.6927499452580574e-07, |
|
"loss": 0.4258, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.912364757397844, |
|
"grad_norm": 0.869194697723244, |
|
"learning_rate": 4.5606504312602384e-07, |
|
"loss": 0.4428, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.9136214581656096, |
|
"grad_norm": 0.9689587398952784, |
|
"learning_rate": 4.4303934466948804e-07, |
|
"loss": 0.4461, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.9148781589333752, |
|
"grad_norm": 0.9714671146836645, |
|
"learning_rate": 4.3019815062724567e-07, |
|
"loss": 0.4523, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.9161348597011408, |
|
"grad_norm": 0.9098783404381591, |
|
"learning_rate": 4.1754170890833777e-07, |
|
"loss": 0.5114, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.9173915604689065, |
|
"grad_norm": 0.903763846717378, |
|
"learning_rate": 4.0507026385502747e-07, |
|
"loss": 0.4558, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.9186482612366721, |
|
"grad_norm": 0.8589002511680877, |
|
"learning_rate": 3.9278405623806914e-07, |
|
"loss": 0.4463, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.9199049620044377, |
|
"grad_norm": 0.9149570489401863, |
|
"learning_rate": 3.806833232520746e-07, |
|
"loss": 0.4665, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.9211616627722033, |
|
"grad_norm": 0.857139001990913, |
|
"learning_rate": 3.687682985109209e-07, |
|
"loss": 0.4847, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.922418363539969, |
|
"grad_norm": 0.9093058792535011, |
|
"learning_rate": 3.5703921204324863e-07, |
|
"loss": 0.5461, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.9236750643077346, |
|
"grad_norm": 0.8979631581554675, |
|
"learning_rate": 3.454962902880199e-07, |
|
"loss": 0.4589, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.9249317650755002, |
|
"grad_norm": 0.8828698198732804, |
|
"learning_rate": 3.3413975609013713e-07, |
|
"loss": 0.4622, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.9261884658432659, |
|
"grad_norm": 0.8945978464715129, |
|
"learning_rate": 3.2296982869616134e-07, |
|
"loss": 0.4247, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.9274451666110315, |
|
"grad_norm": 0.8897356645887511, |
|
"learning_rate": 3.1198672375005403e-07, |
|
"loss": 0.465, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.9287018673787971, |
|
"grad_norm": 0.8698017477642928, |
|
"learning_rate": 3.0119065328903517e-07, |
|
"loss": 0.4426, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.9299585681465627, |
|
"grad_norm": 0.8856161042535935, |
|
"learning_rate": 2.905818257394799e-07, |
|
"loss": 0.4489, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.9312152689143284, |
|
"grad_norm": 0.8551132648021337, |
|
"learning_rate": 2.801604459128926e-07, |
|
"loss": 0.4546, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.932471969682094, |
|
"grad_norm": 0.8755685457795177, |
|
"learning_rate": 2.6992671500196134e-07, |
|
"loss": 0.4767, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.9337286704498596, |
|
"grad_norm": 1.0076154176090533, |
|
"learning_rate": 2.5988083057666534e-07, |
|
"loss": 0.5176, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.9349853712176253, |
|
"grad_norm": 0.8773948389601972, |
|
"learning_rate": 2.5002298658046484e-07, |
|
"loss": 0.4665, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.9362420719853909, |
|
"grad_norm": 0.949607330031333, |
|
"learning_rate": 2.4035337332655504e-07, |
|
"loss": 0.4865, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.9374987727531565, |
|
"grad_norm": 0.8727367626946688, |
|
"learning_rate": 2.308721774941991e-07, |
|
"loss": 0.4755, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.9387554735209221, |
|
"grad_norm": 0.950145974990549, |
|
"learning_rate": 2.2157958212510877e-07, |
|
"loss": 0.4658, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.9400121742886878, |
|
"grad_norm": 0.8949527330544057, |
|
"learning_rate": 2.124757666199273e-07, |
|
"loss": 0.4715, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.9412688750564534, |
|
"grad_norm": 0.857039311943978, |
|
"learning_rate": 2.035609067347566e-07, |
|
"loss": 0.4846, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.942525575824219, |
|
"grad_norm": 0.9246764854504824, |
|
"learning_rate": 1.9483517457776436e-07, |
|
"loss": 0.5387, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.942525575824219, |
|
"eval_loss": NaN, |
|
"eval_runtime": 392.0537, |
|
"eval_samples_per_second": 21.209, |
|
"eval_steps_per_second": 2.653, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.9437822765919847, |
|
"grad_norm": 0.9246699749677241, |
|
"learning_rate": 1.8629873860586567e-07, |
|
"loss": 0.4915, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.9450389773597502, |
|
"grad_norm": 0.9273150008131176, |
|
"learning_rate": 1.7795176362146783e-07, |
|
"loss": 0.4394, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.9462956781275158, |
|
"grad_norm": 0.8721007270250941, |
|
"learning_rate": 1.6979441076928837e-07, |
|
"loss": 0.5125, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.9475523788952814, |
|
"grad_norm": 0.8233991769635042, |
|
"learning_rate": 1.6182683753324435e-07, |
|
"loss": 0.4622, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.9488090796630471, |
|
"grad_norm": 0.88887926972337, |
|
"learning_rate": 1.5404919773341576e-07, |
|
"loss": 0.46, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.9500657804308127, |
|
"grad_norm": 0.9151783514520999, |
|
"learning_rate": 1.464616415230702e-07, |
|
"loss": 0.4769, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.9513224811985783, |
|
"grad_norm": 0.9233338402724911, |
|
"learning_rate": 1.3906431538576626e-07, |
|
"loss": 0.4511, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.952579181966344, |
|
"grad_norm": 0.876720746013767, |
|
"learning_rate": 1.3185736213252808e-07, |
|
"loss": 0.4623, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.9538358827341096, |
|
"grad_norm": 0.8808110259800377, |
|
"learning_rate": 1.2484092089908307e-07, |
|
"loss": 0.455, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.9550925835018752, |
|
"grad_norm": 0.9127364361814908, |
|
"learning_rate": 1.1801512714318286e-07, |
|
"loss": 0.4659, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.9563492842696408, |
|
"grad_norm": 0.9250866501406244, |
|
"learning_rate": 1.113801126419789e-07, |
|
"loss": 0.4097, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.9576059850374065, |
|
"grad_norm": 0.9039383706969123, |
|
"learning_rate": 1.0493600548948879e-07, |
|
"loss": 0.4372, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.9588626858051721, |
|
"grad_norm": 0.9023237019743435, |
|
"learning_rate": 9.8682930094115e-08, |
|
"loss": 0.4524, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.9601193865729377, |
|
"grad_norm": 0.9011103800480463, |
|
"learning_rate": 9.262100717624678e-08, |
|
"loss": 0.4342, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.9613760873407033, |
|
"grad_norm": 0.9408044434789575, |
|
"learning_rate": 8.675035376593088e-08, |
|
"loss": 0.4699, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.962632788108469, |
|
"grad_norm": 0.8157071127061982, |
|
"learning_rate": 8.107108320060675e-08, |
|
"loss": 0.4388, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.9638894888762346, |
|
"grad_norm": 0.9614204495709833, |
|
"learning_rate": 7.558330512292378e-08, |
|
"loss": 0.5011, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.9651461896440002, |
|
"grad_norm": 0.9164749653087414, |
|
"learning_rate": 7.028712547862526e-08, |
|
"loss": 0.4922, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.9664028904117659, |
|
"grad_norm": 0.8941273006248021, |
|
"learning_rate": 6.51826465144978e-08, |
|
"loss": 0.5093, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.9676595911795315, |
|
"grad_norm": 0.9345588962411722, |
|
"learning_rate": 6.026996677640062e-08, |
|
"loss": 0.4654, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.9689162919472971, |
|
"grad_norm": 0.8714785559856224, |
|
"learning_rate": 5.5549181107362734e-08, |
|
"loss": 0.4459, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.9701729927150627, |
|
"grad_norm": 0.8661748110904719, |
|
"learning_rate": 5.102038064575099e-08, |
|
"loss": 0.426, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.9714296934828284, |
|
"grad_norm": 0.8998862817234337, |
|
"learning_rate": 4.6683652823513725e-08, |
|
"loss": 0.4808, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.972686394250594, |
|
"grad_norm": 0.9253080744252631, |
|
"learning_rate": 4.253908136448881e-08, |
|
"loss": 0.4777, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.9739430950183596, |
|
"grad_norm": 0.8884730525355212, |
|
"learning_rate": 3.858674628278825e-08, |
|
"loss": 0.4445, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.9751997957861253, |
|
"grad_norm": 0.8920492529443076, |
|
"learning_rate": 3.482672388125719e-08, |
|
"loss": 0.5067, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.9764564965538909, |
|
"grad_norm": 0.913098536113785, |
|
"learning_rate": 3.125908674999289e-08, |
|
"loss": 0.4704, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.9777131973216565, |
|
"grad_norm": 0.8585074814418806, |
|
"learning_rate": 2.7883903764953647e-08, |
|
"loss": 0.4383, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.9789698980894221, |
|
"grad_norm": 0.8563259132871073, |
|
"learning_rate": 2.470124008661978e-08, |
|
"loss": 0.486, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.9802265988571878, |
|
"grad_norm": 1.0110036318343043, |
|
"learning_rate": 2.171115715874139e-08, |
|
"loss": 0.4784, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.9814832996249534, |
|
"grad_norm": 0.9352424779414596, |
|
"learning_rate": 1.8913712707149255e-08, |
|
"loss": 0.4732, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.982740000392719, |
|
"grad_norm": 0.904141538390942, |
|
"learning_rate": 1.630896073864352e-08, |
|
"loss": 0.483, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.9839967011604847, |
|
"grad_norm": 0.8991415167948376, |
|
"learning_rate": 1.3896951539945635e-08, |
|
"loss": 0.467, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.9852534019282503, |
|
"grad_norm": 0.8724840641479233, |
|
"learning_rate": 1.1677731676733584e-08, |
|
"loss": 0.4614, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.9865101026960159, |
|
"grad_norm": 0.8862179253481299, |
|
"learning_rate": 9.651343992740369e-09, |
|
"loss": 0.4452, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.9877668034637815, |
|
"grad_norm": 0.877231477260857, |
|
"learning_rate": 7.817827608924689e-09, |
|
"loss": 0.4913, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.9890235042315472, |
|
"grad_norm": 0.9256633248853225, |
|
"learning_rate": 6.1772179227181926e-09, |
|
"loss": 0.4854, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.9902802049993128, |
|
"grad_norm": 0.8460307202589556, |
|
"learning_rate": 4.7295466073427055e-09, |
|
"loss": 0.4562, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.9915369057670783, |
|
"grad_norm": 0.9076188371683147, |
|
"learning_rate": 3.474841611197377e-09, |
|
"loss": 0.4736, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.9927936065348439, |
|
"grad_norm": 0.8533095890208698, |
|
"learning_rate": 2.4131271573191172e-09, |
|
"loss": 0.452, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.9940503073026096, |
|
"grad_norm": 0.8331866413387022, |
|
"learning_rate": 1.5444237429140806e-09, |
|
"loss": 0.4387, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.9953070080703752, |
|
"grad_norm": 0.8888758833913929, |
|
"learning_rate": 8.687481389657582e-10, |
|
"loss": 0.4634, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.9965637088381408, |
|
"grad_norm": 0.9295696536473508, |
|
"learning_rate": 3.861133899063507e-10, |
|
"loss": 0.4674, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.9978204096059065, |
|
"grad_norm": 0.9051031551266895, |
|
"learning_rate": 9.652881336696951e-11, |
|
"loss": 0.4903, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.9990771103736721, |
|
"grad_norm": 0.9088993808171332, |
|
"learning_rate": 0.0, |
|
"loss": 0.4562, |
|
"step": 795 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 795, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1331628437667840.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|