|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 732, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 9.08570438897306, |
|
"learning_rate": 2.702702702702703e-07, |
|
"loss": 1.1396, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 9.679962246384667, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 1.1779, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 10.102509592240429, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 1.1318, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.9319421528072973, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 1.0028, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3759757212785226, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 0.926, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.9504372792893874, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 0.9044, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2271132587175222, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 0.8775, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8247319038566833, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 0.868, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.6839207898039522, |
|
"learning_rate": 1.0810810810810812e-05, |
|
"loss": 0.8626, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6459463481231297, |
|
"learning_rate": 1.2162162162162164e-05, |
|
"loss": 0.8454, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.5903349665251353, |
|
"learning_rate": 1.3513513513513515e-05, |
|
"loss": 0.8437, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5948338223404456, |
|
"learning_rate": 1.4864864864864865e-05, |
|
"loss": 0.8353, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.5895070475596026, |
|
"learning_rate": 1.6216216216216218e-05, |
|
"loss": 0.8222, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6066852387492704, |
|
"learning_rate": 1.756756756756757e-05, |
|
"loss": 0.8144, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.602310053600098, |
|
"learning_rate": 1.891891891891892e-05, |
|
"loss": 0.821, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.5621279417664072, |
|
"learning_rate": 1.999988602302209e-05, |
|
"loss": 0.8133, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6630630012402627, |
|
"learning_rate": 1.9995897101594454e-05, |
|
"loss": 0.8334, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6077906778917731, |
|
"learning_rate": 1.99862119291555e-05, |
|
"loss": 0.804, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.5618472092387999, |
|
"learning_rate": 1.997083602488702e-05, |
|
"loss": 0.8193, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6333601185724956, |
|
"learning_rate": 1.994977815088504e-05, |
|
"loss": 0.7945, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6001887764474764, |
|
"learning_rate": 1.9923050307166655e-05, |
|
"loss": 0.8079, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6114725172341102, |
|
"learning_rate": 1.989066772483171e-05, |
|
"loss": 0.8063, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6410224419207078, |
|
"learning_rate": 1.9852648857383224e-05, |
|
"loss": 0.8079, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6155008789371026, |
|
"learning_rate": 1.9809015370211505e-05, |
|
"loss": 0.8012, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5701298765644415, |
|
"learning_rate": 1.9759792128247922e-05, |
|
"loss": 0.7858, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5769994850814212, |
|
"learning_rate": 1.9705007181795416e-05, |
|
"loss": 0.7949, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5989802045117304, |
|
"learning_rate": 1.964469175054377e-05, |
|
"loss": 0.7912, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6158332171463875, |
|
"learning_rate": 1.9578880205778793e-05, |
|
"loss": 0.8, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6045880045462032, |
|
"learning_rate": 1.950761005079556e-05, |
|
"loss": 0.7831, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6022037882647541, |
|
"learning_rate": 1.9430921899526786e-05, |
|
"loss": 0.7967, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5703057672919921, |
|
"learning_rate": 1.934885945339865e-05, |
|
"loss": 0.7851, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5660007442989872, |
|
"learning_rate": 1.9261469476427122e-05, |
|
"loss": 0.7699, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6052031318507304, |
|
"learning_rate": 1.916880176856909e-05, |
|
"loss": 0.787, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5691776960805929, |
|
"learning_rate": 1.907090913734341e-05, |
|
"loss": 0.7851, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6074889597948341, |
|
"learning_rate": 1.896784736773805e-05, |
|
"loss": 0.7753, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5860864302531827, |
|
"learning_rate": 1.885967519042054e-05, |
|
"loss": 0.7871, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.569482390256513, |
|
"learning_rate": 1.8746454248269777e-05, |
|
"loss": 0.7601, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5854282191729411, |
|
"learning_rate": 1.862824906124826e-05, |
|
"loss": 0.7819, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6428086559905823, |
|
"learning_rate": 1.850512698963485e-05, |
|
"loss": 0.7831, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.5698221365710079, |
|
"learning_rate": 1.8377158195638877e-05, |
|
"loss": 0.7604, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7373917549594062, |
|
"learning_rate": 1.8244415603417603e-05, |
|
"loss": 0.7587, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.585209211283403, |
|
"learning_rate": 1.8106974857519737e-05, |
|
"loss": 0.7804, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6168183287266534, |
|
"learning_rate": 1.7964914279778716e-05, |
|
"loss": 0.7797, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.625316831394029, |
|
"learning_rate": 1.78183148246803e-05, |
|
"loss": 0.7583, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5564471122778261, |
|
"learning_rate": 1.7667260033229953e-05, |
|
"loss": 0.7512, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5890010216830359, |
|
"learning_rate": 1.751183598534625e-05, |
|
"loss": 0.7528, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.6009170802635413, |
|
"learning_rate": 1.7352131250807466e-05, |
|
"loss": 0.7626, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5792443910657085, |
|
"learning_rate": 1.7188236838779297e-05, |
|
"loss": 0.7632, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5716562571066842, |
|
"learning_rate": 1.702024614595248e-05, |
|
"loss": 0.7551, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5703410316230326, |
|
"learning_rate": 1.6848254903319866e-05, |
|
"loss": 0.7427, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6094207116793442, |
|
"learning_rate": 1.6672361121623238e-05, |
|
"loss": 0.7943, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5915494337632395, |
|
"learning_rate": 1.6492665035501048e-05, |
|
"loss": 0.7373, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.5958263340568929, |
|
"learning_rate": 1.6309269046368777e-05, |
|
"loss": 0.7523, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 0.6569542499394689, |
|
"learning_rate": 1.612227766406461e-05, |
|
"loss": 0.755, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"grad_norm": 0.5775231523651057, |
|
"learning_rate": 1.5931797447293553e-05, |
|
"loss": 0.7292, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.6322564996445303, |
|
"learning_rate": 1.5737936942904025e-05, |
|
"loss": 0.7476, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"grad_norm": 0.5522985173213607, |
|
"learning_rate": 1.554080662403144e-05, |
|
"loss": 0.7427, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"grad_norm": 0.6314561016931608, |
|
"learning_rate": 1.5340518827144145e-05, |
|
"loss": 0.7602, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5865596324052452, |
|
"learning_rate": 1.5137187688027437e-05, |
|
"loss": 0.7496, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 0.5697597752265093, |
|
"learning_rate": 1.4930929076742317e-05, |
|
"loss": 0.7392, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"grad_norm": 0.6129791987688828, |
|
"learning_rate": 1.4721860531595868e-05, |
|
"loss": 0.7537, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5897396431474848, |
|
"learning_rate": 1.451010119216102e-05, |
|
"loss": 0.7583, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"grad_norm": 0.5710567280058013, |
|
"learning_rate": 1.4295771731383799e-05, |
|
"loss": 0.7275, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 0.5953176828826594, |
|
"learning_rate": 1.4078994286816768e-05, |
|
"loss": 0.7453, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6242032112691839, |
|
"learning_rate": 1.3859892391017867e-05, |
|
"loss": 0.7644, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"grad_norm": 0.6014365104233058, |
|
"learning_rate": 1.3638590901154276e-05, |
|
"loss": 0.7532, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"grad_norm": 0.5726135101483418, |
|
"learning_rate": 1.341521592785145e-05, |
|
"loss": 0.7663, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.6061053656685103, |
|
"learning_rate": 1.3189894763327851e-05, |
|
"loss": 0.7476, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"grad_norm": 0.5910382927751472, |
|
"learning_rate": 1.2962755808856341e-05, |
|
"loss": 0.7418, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 0.6192359895872668, |
|
"learning_rate": 1.2733928501593587e-05, |
|
"loss": 0.7322, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5754476677307057, |
|
"learning_rate": 1.2503543240819127e-05, |
|
"loss": 0.7505, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"grad_norm": 0.5954914603467452, |
|
"learning_rate": 1.227173131362619e-05, |
|
"loss": 0.7316, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"grad_norm": 0.636714901981685, |
|
"learning_rate": 1.2038624820106572e-05, |
|
"loss": 0.7483, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 0.5956828212931418, |
|
"learning_rate": 1.1804356598072223e-05, |
|
"loss": 0.7141, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.6438375663233106, |
|
"learning_rate": 1.1569060147356441e-05, |
|
"loss": 0.7589, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 0.5911783265386276, |
|
"learning_rate": 1.133286955373779e-05, |
|
"loss": 0.7485, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"grad_norm": 0.5615839698644924, |
|
"learning_rate": 1.1095919412530136e-05, |
|
"loss": 0.75, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.5906964619382736, |
|
"learning_rate": 1.0858344751882304e-05, |
|
"loss": 0.7089, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"grad_norm": 0.622095376177676, |
|
"learning_rate": 1.0620280955831088e-05, |
|
"loss": 0.7418, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"grad_norm": 0.5629629729406987, |
|
"learning_rate": 1.038186368715145e-05, |
|
"loss": 0.7148, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5596961155705783, |
|
"learning_rate": 1.0143228810047877e-05, |
|
"loss": 0.7226, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 0.5949479844701526, |
|
"learning_rate": 9.904512312730948e-06, |
|
"loss": 0.7446, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"grad_norm": 0.5503557697571823, |
|
"learning_rate": 9.665850229923258e-06, |
|
"loss": 0.7294, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.5646735246212238, |
|
"learning_rate": 9.4273785653388e-06, |
|
"loss": 0.7399, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"grad_norm": 0.6166612666135994, |
|
"learning_rate": 9.189233214180057e-06, |
|
"loss": 0.7464, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"grad_norm": 0.6607877653922463, |
|
"learning_rate": 8.951549885696889e-06, |
|
"loss": 0.7104, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.5828440632986862, |
|
"learning_rate": 8.714464025851428e-06, |
|
"loss": 0.748, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 0.6692052000569471, |
|
"learning_rate": 8.478110740132971e-06, |
|
"loss": 0.7194, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 0.5925544656331848, |
|
"learning_rate": 8.242624716566928e-06, |
|
"loss": 0.7414, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.5681970472775137, |
|
"learning_rate": 8.008140148961642e-06, |
|
"loss": 0.7266, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"grad_norm": 0.6022566260644191, |
|
"learning_rate": 7.774790660436857e-06, |
|
"loss": 0.7458, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"grad_norm": 0.6280077977218962, |
|
"learning_rate": 7.542709227277396e-06, |
|
"loss": 0.7265, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 0.5662044508467975, |
|
"learning_rate": 7.312028103155426e-06, |
|
"loss": 0.7296, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5875195935745249, |
|
"learning_rate": 7.0828787437645455e-06, |
|
"loss": 0.7666, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"grad_norm": 0.5878339828851088, |
|
"learning_rate": 6.8553917319085676e-06, |
|
"loss": 0.7478, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"grad_norm": 0.552328272875581, |
|
"learning_rate": 6.629696703087755e-06, |
|
"loss": 0.7237, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6071223669952857, |
|
"learning_rate": 6.405922271624874e-06, |
|
"loss": 0.7357, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"grad_norm": 0.6060821252643572, |
|
"learning_rate": 6.184195957373176e-06, |
|
"loss": 0.7178, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 0.543636900637841, |
|
"learning_rate": 5.964644113048079e-06, |
|
"loss": 0.7264, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.603596293760061, |
|
"learning_rate": 5.74739185222394e-06, |
|
"loss": 0.7415, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"grad_norm": 0.5558735174394915, |
|
"learning_rate": 5.532562978036964e-06, |
|
"loss": 0.7431, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"grad_norm": 0.5937971276012349, |
|
"learning_rate": 5.320279912634907e-06, |
|
"loss": 0.7316, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5786141927192617, |
|
"learning_rate": 5.110663627413695e-06, |
|
"loss": 0.713, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 0.5809275145865697, |
|
"learning_rate": 4.903833574080825e-06, |
|
"loss": 0.7273, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 0.571032903937194, |
|
"learning_rate": 4.6999076165847214e-06, |
|
"loss": 0.7128, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.6308749156115376, |
|
"learning_rate": 4.499001963948929e-06, |
|
"loss": 0.727, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"grad_norm": 0.5475000263845851, |
|
"learning_rate": 4.301231104049359e-06, |
|
"loss": 0.7236, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"grad_norm": 0.5451654896042127, |
|
"learning_rate": 4.106707738372357e-06, |
|
"loss": 0.717, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5895012216093011, |
|
"learning_rate": 3.915542717790759e-06, |
|
"loss": 0.7253, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"grad_norm": 0.5991508019426129, |
|
"learning_rate": 3.727844979394526e-06, |
|
"loss": 0.7108, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 0.6024026652126097, |
|
"learning_rate": 3.543721484411976e-06, |
|
"loss": 0.7148, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"grad_norm": 0.5479626437376189, |
|
"learning_rate": 3.3632771572569878e-06, |
|
"loss": 0.7205, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5637951714589972, |
|
"learning_rate": 3.1866148257368666e-06, |
|
"loss": 0.7291, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"grad_norm": 0.5971328138449835, |
|
"learning_rate": 3.0138351624550165e-06, |
|
"loss": 0.7242, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"grad_norm": 0.5566178272277884, |
|
"learning_rate": 2.845036627441755e-06, |
|
"loss": 0.725, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.5747018707425353, |
|
"learning_rate": 2.6803154120460007e-06, |
|
"loss": 0.7232, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 0.583278019846911, |
|
"learning_rate": 2.5197653841197546e-06, |
|
"loss": 0.7593, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.5792687056211038, |
|
"learning_rate": 2.3634780345266805e-06, |
|
"loss": 0.7263, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5936774049714754, |
|
"learning_rate": 2.211542425005223e-06, |
|
"loss": 0.7105, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"grad_norm": 0.5618344372585139, |
|
"learning_rate": 2.064045137415982e-06, |
|
"loss": 0.7148, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"grad_norm": 0.5753081059010117, |
|
"learning_rate": 1.9210702244022616e-06, |
|
"loss": 0.7313, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5418687310157559, |
|
"learning_rate": 1.7826991614919264e-06, |
|
"loss": 0.7245, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 0.5828322063852037, |
|
"learning_rate": 1.6490108006678495e-06, |
|
"loss": 0.7527, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"grad_norm": 0.5878067843611324, |
|
"learning_rate": 1.5200813254334013e-06, |
|
"loss": 0.7213, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5816156229837385, |
|
"learning_rate": 1.3959842073986085e-06, |
|
"loss": 0.7134, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"grad_norm": 0.5962667792304962, |
|
"learning_rate": 1.2767901644116943e-06, |
|
"loss": 0.7233, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"grad_norm": 0.5399112934132381, |
|
"learning_rate": 1.1625671202598875e-06, |
|
"loss": 0.7179, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5397855641716054, |
|
"learning_rate": 1.0533801659624531e-06, |
|
"loss": 0.7114, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 0.5936653550047086, |
|
"learning_rate": 9.492915226779809e-07, |
|
"loss": 0.7175, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"grad_norm": 0.6099602078388837, |
|
"learning_rate": 8.503605062471187e-07, |
|
"loss": 0.7119, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.6177660757674084, |
|
"learning_rate": 7.566434933909006e-07, |
|
"loss": 0.7132, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"grad_norm": 0.5217732163513131, |
|
"learning_rate": 6.681938895839746e-07, |
|
"loss": 0.7128, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 0.5808565758298766, |
|
"learning_rate": 5.850620986210198e-07, |
|
"loss": 0.6972, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 0.5829050029411651, |
|
"learning_rate": 5.072954938936925e-07, |
|
"loss": 0.7253, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5481607356326386, |
|
"learning_rate": 4.3493839139447716e-07, |
|
"loss": 0.7222, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"grad_norm": 0.5652882598658524, |
|
"learning_rate": 3.6803202446282217e-07, |
|
"loss": 0.6845, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"grad_norm": 0.5796535522251339, |
|
"learning_rate": 3.0661452028795335e-07, |
|
"loss": 0.7148, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5887255027538941, |
|
"learning_rate": 2.507208781817638e-07, |
|
"loss": 0.7224, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"grad_norm": 0.5592326454997533, |
|
"learning_rate": 2.0038294963413251e-07, |
|
"loss": 0.7113, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 0.5776817470975002, |
|
"learning_rate": 1.556294201620734e-07, |
|
"loss": 0.7033, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5696597587226961, |
|
"learning_rate": 1.1648579296304252e-07, |
|
"loss": 0.7277, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"grad_norm": 0.5641386210392492, |
|
"learning_rate": 8.297437438170797e-08, |
|
"loss": 0.7256, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"grad_norm": 0.5889080994491116, |
|
"learning_rate": 5.51142611984834e-08, |
|
"loss": 0.7279, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5665974446341169, |
|
"learning_rate": 3.2921329747056527e-08, |
|
"loss": 0.7311, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"grad_norm": 0.5822891522453822, |
|
"learning_rate": 1.6408226867118404e-08, |
|
"loss": 0.7131, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 0.5631956425423463, |
|
"learning_rate": 5.584362697453882e-09, |
|
"loss": 0.7071, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.563567530638776, |
|
"learning_rate": 4.5590531348227443e-10, |
|
"loss": 0.7119, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_loss": 0.7456573247909546, |
|
"eval_runtime": 9.4968, |
|
"eval_samples_per_second": 52.649, |
|
"eval_steps_per_second": 1.685, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 732, |
|
"total_flos": 106029000622080.0, |
|
"train_loss": 0.7605658245216953, |
|
"train_runtime": 6107.4706, |
|
"train_samples_per_second": 15.333, |
|
"train_steps_per_second": 0.12 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 732, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"total_flos": 106029000622080.0, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|