|
{
|
|
"best_metric": 0.9921853502169998,
|
|
"best_model_checkpoint": "C:\\uczonko_clear\\transformery\\models_mobilevitv2_eyes_VIT\\checkpoint-20720",
|
|
"epoch": 3.4835238735709484,
|
|
"eval_steps": 2960,
|
|
"global_step": 20720,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 0.008406186953597848,
|
|
"grad_norm": 0.8845431208610535,
|
|
"learning_rate": 8.44451950684006e-09,
|
|
"loss": 0.6941,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 0.016812373907195696,
|
|
"grad_norm": 0.8216766715049744,
|
|
"learning_rate": 1.688903901368012e-08,
|
|
"loss": 0.6963,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 0.025218560860793545,
|
|
"grad_norm": 0.9512827396392822,
|
|
"learning_rate": 2.533355852052018e-08,
|
|
"loss": 0.6934,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 0.03362474781439139,
|
|
"grad_norm": 0.8841463923454285,
|
|
"learning_rate": 3.377807802736024e-08,
|
|
"loss": 0.6933,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 0.04203093476798924,
|
|
"grad_norm": 0.9045664668083191,
|
|
"learning_rate": 4.22225975342003e-08,
|
|
"loss": 0.6961,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 0.05043712172158709,
|
|
"grad_norm": 0.7906067967414856,
|
|
"learning_rate": 5.066711704104036e-08,
|
|
"loss": 0.6944,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 0.05884330867518493,
|
|
"grad_norm": 0.7868759036064148,
|
|
"learning_rate": 5.911163654788042e-08,
|
|
"loss": 0.6974,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 0.06724949562878278,
|
|
"grad_norm": 0.7998577356338501,
|
|
"learning_rate": 6.755615605472048e-08,
|
|
"loss": 0.696,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 0.07565568258238063,
|
|
"grad_norm": 0.8735635280609131,
|
|
"learning_rate": 7.600067556156055e-08,
|
|
"loss": 0.695,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 0.08406186953597848,
|
|
"grad_norm": 0.824541449546814,
|
|
"learning_rate": 8.44451950684006e-08,
|
|
"loss": 0.6972,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 0.09246805648957633,
|
|
"grad_norm": 0.9033031463623047,
|
|
"learning_rate": 9.288971457524067e-08,
|
|
"loss": 0.6973,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 0.10087424344317418,
|
|
"grad_norm": 0.923117995262146,
|
|
"learning_rate": 1.0133423408208072e-07,
|
|
"loss": 0.6959,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 0.10928043039677203,
|
|
"grad_norm": 0.8658111095428467,
|
|
"learning_rate": 1.0977875358892079e-07,
|
|
"loss": 0.6967,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 0.11768661735036987,
|
|
"grad_norm": 1.0212489366531372,
|
|
"learning_rate": 1.1822327309576084e-07,
|
|
"loss": 0.6961,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 0.12609280430396771,
|
|
"grad_norm": 0.8621204495429993,
|
|
"learning_rate": 1.266677926026009e-07,
|
|
"loss": 0.6964,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 0.13449899125756556,
|
|
"grad_norm": 0.8873909711837769,
|
|
"learning_rate": 1.3511231210944096e-07,
|
|
"loss": 0.693,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 0.14290517821116341,
|
|
"grad_norm": 0.7807337641716003,
|
|
"learning_rate": 1.4355683161628102e-07,
|
|
"loss": 0.6945,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 0.15131136516476126,
|
|
"grad_norm": 0.8349995017051697,
|
|
"learning_rate": 1.520013511231211e-07,
|
|
"loss": 0.6956,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 0.1597175521183591,
|
|
"grad_norm": 0.8659362196922302,
|
|
"learning_rate": 1.6044587062996115e-07,
|
|
"loss": 0.6931,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 0.16812373907195696,
|
|
"grad_norm": 0.888361394405365,
|
|
"learning_rate": 1.688903901368012e-07,
|
|
"loss": 0.697,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 0.1765299260255548,
|
|
"grad_norm": 0.8722349405288696,
|
|
"learning_rate": 1.7733490964364126e-07,
|
|
"loss": 0.6939,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 0.18493611297915266,
|
|
"grad_norm": 0.8624858856201172,
|
|
"learning_rate": 1.8577942915048134e-07,
|
|
"loss": 0.6944,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 0.1933422999327505,
|
|
"grad_norm": 0.846449613571167,
|
|
"learning_rate": 1.942239486573214e-07,
|
|
"loss": 0.6933,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 0.20174848688634836,
|
|
"grad_norm": 0.7982395887374878,
|
|
"learning_rate": 2.0266846816416145e-07,
|
|
"loss": 0.6928,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 0.2101546738399462,
|
|
"grad_norm": 0.8489773273468018,
|
|
"learning_rate": 2.111129876710015e-07,
|
|
"loss": 0.693,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 0.21856086079354406,
|
|
"grad_norm": 0.8371855616569519,
|
|
"learning_rate": 2.1955750717784158e-07,
|
|
"loss": 0.6945,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 0.2269670477471419,
|
|
"grad_norm": 0.7753377556800842,
|
|
"learning_rate": 2.2800202668468163e-07,
|
|
"loss": 0.689,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 0.23537323470073973,
|
|
"grad_norm": 0.8692731261253357,
|
|
"learning_rate": 2.364465461915217e-07,
|
|
"loss": 0.6912,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 0.24377942165433758,
|
|
"grad_norm": 0.8294754028320312,
|
|
"learning_rate": 2.4489106569836174e-07,
|
|
"loss": 0.6956,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 0.25218560860793543,
|
|
"grad_norm": 0.8770684003829956,
|
|
"learning_rate": 2.533355852052018e-07,
|
|
"loss": 0.6913,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 0.2605917955615333,
|
|
"grad_norm": 0.8920065760612488,
|
|
"learning_rate": 2.6178010471204185e-07,
|
|
"loss": 0.6929,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 0.26899798251513113,
|
|
"grad_norm": 0.8197062015533447,
|
|
"learning_rate": 2.7022462421888193e-07,
|
|
"loss": 0.6891,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 0.277404169468729,
|
|
"grad_norm": 0.8367999196052551,
|
|
"learning_rate": 2.7866914372572196e-07,
|
|
"loss": 0.6929,
|
|
"step": 1650
|
|
},
|
|
{
|
|
"epoch": 0.28581035642232683,
|
|
"grad_norm": 0.7874473929405212,
|
|
"learning_rate": 2.8711366323256204e-07,
|
|
"loss": 0.6946,
|
|
"step": 1700
|
|
},
|
|
{
|
|
"epoch": 0.2942165433759247,
|
|
"grad_norm": 0.8376381397247314,
|
|
"learning_rate": 2.955581827394021e-07,
|
|
"loss": 0.6917,
|
|
"step": 1750
|
|
},
|
|
{
|
|
"epoch": 0.3026227303295225,
|
|
"grad_norm": 0.8306110501289368,
|
|
"learning_rate": 3.040027022462422e-07,
|
|
"loss": 0.689,
|
|
"step": 1800
|
|
},
|
|
{
|
|
"epoch": 0.3110289172831204,
|
|
"grad_norm": 0.8515573740005493,
|
|
"learning_rate": 3.124472217530822e-07,
|
|
"loss": 0.6919,
|
|
"step": 1850
|
|
},
|
|
{
|
|
"epoch": 0.3194351042367182,
|
|
"grad_norm": 0.8318465352058411,
|
|
"learning_rate": 3.208917412599223e-07,
|
|
"loss": 0.6925,
|
|
"step": 1900
|
|
},
|
|
{
|
|
"epoch": 0.3278412911903161,
|
|
"grad_norm": 1.0014280080795288,
|
|
"learning_rate": 3.2933626076676233e-07,
|
|
"loss": 0.6888,
|
|
"step": 1950
|
|
},
|
|
{
|
|
"epoch": 0.3362474781439139,
|
|
"grad_norm": 0.9011471271514893,
|
|
"learning_rate": 3.377807802736024e-07,
|
|
"loss": 0.6891,
|
|
"step": 2000
|
|
},
|
|
{
|
|
"epoch": 0.3446536650975118,
|
|
"grad_norm": 0.8618181347846985,
|
|
"learning_rate": 3.4622529978044244e-07,
|
|
"loss": 0.6871,
|
|
"step": 2050
|
|
},
|
|
{
|
|
"epoch": 0.3530598520511096,
|
|
"grad_norm": 0.834791898727417,
|
|
"learning_rate": 3.546698192872825e-07,
|
|
"loss": 0.6896,
|
|
"step": 2100
|
|
},
|
|
{
|
|
"epoch": 0.3614660390047075,
|
|
"grad_norm": 0.8285929560661316,
|
|
"learning_rate": 3.631143387941226e-07,
|
|
"loss": 0.6902,
|
|
"step": 2150
|
|
},
|
|
{
|
|
"epoch": 0.3698722259583053,
|
|
"grad_norm": 0.8645830154418945,
|
|
"learning_rate": 3.715588583009627e-07,
|
|
"loss": 0.6876,
|
|
"step": 2200
|
|
},
|
|
{
|
|
"epoch": 0.3782784129119032,
|
|
"grad_norm": 0.8188508152961731,
|
|
"learning_rate": 3.800033778078027e-07,
|
|
"loss": 0.6896,
|
|
"step": 2250
|
|
},
|
|
{
|
|
"epoch": 0.386684599865501,
|
|
"grad_norm": 0.8208332061767578,
|
|
"learning_rate": 3.884478973146428e-07,
|
|
"loss": 0.6857,
|
|
"step": 2300
|
|
},
|
|
{
|
|
"epoch": 0.39509078681909887,
|
|
"grad_norm": 0.8408875465393066,
|
|
"learning_rate": 3.968924168214828e-07,
|
|
"loss": 0.6894,
|
|
"step": 2350
|
|
},
|
|
{
|
|
"epoch": 0.4034969737726967,
|
|
"grad_norm": 0.8612744808197021,
|
|
"learning_rate": 4.053369363283229e-07,
|
|
"loss": 0.6872,
|
|
"step": 2400
|
|
},
|
|
{
|
|
"epoch": 0.41190316072629457,
|
|
"grad_norm": 0.9161643385887146,
|
|
"learning_rate": 4.137814558351629e-07,
|
|
"loss": 0.6874,
|
|
"step": 2450
|
|
},
|
|
{
|
|
"epoch": 0.4203093476798924,
|
|
"grad_norm": 0.8540963530540466,
|
|
"learning_rate": 4.22225975342003e-07,
|
|
"loss": 0.6888,
|
|
"step": 2500
|
|
},
|
|
{
|
|
"epoch": 0.42871553463349027,
|
|
"grad_norm": 0.9273486137390137,
|
|
"learning_rate": 4.306704948488431e-07,
|
|
"loss": 0.6905,
|
|
"step": 2550
|
|
},
|
|
{
|
|
"epoch": 0.4371217215870881,
|
|
"grad_norm": 0.7874332666397095,
|
|
"learning_rate": 4.3911501435568316e-07,
|
|
"loss": 0.6863,
|
|
"step": 2600
|
|
},
|
|
{
|
|
"epoch": 0.44552790854068597,
|
|
"grad_norm": 0.83612459897995,
|
|
"learning_rate": 4.475595338625232e-07,
|
|
"loss": 0.6877,
|
|
"step": 2650
|
|
},
|
|
{
|
|
"epoch": 0.4539340954942838,
|
|
"grad_norm": 0.9220020771026611,
|
|
"learning_rate": 4.5600405336936327e-07,
|
|
"loss": 0.6878,
|
|
"step": 2700
|
|
},
|
|
{
|
|
"epoch": 0.46234028244788167,
|
|
"grad_norm": 0.9280387759208679,
|
|
"learning_rate": 4.644485728762033e-07,
|
|
"loss": 0.682,
|
|
"step": 2750
|
|
},
|
|
{
|
|
"epoch": 0.47074646940147946,
|
|
"grad_norm": 0.8182604312896729,
|
|
"learning_rate": 4.728930923830434e-07,
|
|
"loss": 0.6835,
|
|
"step": 2800
|
|
},
|
|
{
|
|
"epoch": 0.4791526563550773,
|
|
"grad_norm": 0.7758776545524597,
|
|
"learning_rate": 4.813376118898834e-07,
|
|
"loss": 0.6811,
|
|
"step": 2850
|
|
},
|
|
{
|
|
"epoch": 0.48755884330867516,
|
|
"grad_norm": 0.9325575232505798,
|
|
"learning_rate": 4.897821313967235e-07,
|
|
"loss": 0.6806,
|
|
"step": 2900
|
|
},
|
|
{
|
|
"epoch": 0.495965030262273,
|
|
"grad_norm": 0.8952744007110596,
|
|
"learning_rate": 4.982266509035636e-07,
|
|
"loss": 0.6851,
|
|
"step": 2950
|
|
},
|
|
{
|
|
"epoch": 0.4976462676529926,
|
|
"eval_accuracy": 0.5783783783783784,
|
|
"eval_f1_macro": 0.577083139496245,
|
|
"eval_loss": 0.6811313629150391,
|
|
"eval_precision": 0.5781996700999453,
|
|
"eval_recall": 0.5791102231359357,
|
|
"eval_runtime": 48.5841,
|
|
"eval_samples_per_second": 79.964,
|
|
"eval_steps_per_second": 10.003,
|
|
"step": 2960
|
|
},
|
|
{
|
|
"epoch": 0.5043712172158709,
|
|
"grad_norm": 0.8315622210502625,
|
|
"learning_rate": 5.066711704104036e-07,
|
|
"loss": 0.6814,
|
|
"step": 3000
|
|
},
|
|
{
|
|
"epoch": 0.5127774041694687,
|
|
"grad_norm": 0.9562221169471741,
|
|
"learning_rate": 5.151156899172436e-07,
|
|
"loss": 0.6823,
|
|
"step": 3050
|
|
},
|
|
{
|
|
"epoch": 0.5211835911230666,
|
|
"grad_norm": 0.8448233008384705,
|
|
"learning_rate": 5.235602094240837e-07,
|
|
"loss": 0.683,
|
|
"step": 3100
|
|
},
|
|
{
|
|
"epoch": 0.5295897780766644,
|
|
"grad_norm": 0.848648726940155,
|
|
"learning_rate": 5.320047289309239e-07,
|
|
"loss": 0.6798,
|
|
"step": 3150
|
|
},
|
|
{
|
|
"epoch": 0.5379959650302623,
|
|
"grad_norm": 0.8812288641929626,
|
|
"learning_rate": 5.404492484377639e-07,
|
|
"loss": 0.6822,
|
|
"step": 3200
|
|
},
|
|
{
|
|
"epoch": 0.5464021519838601,
|
|
"grad_norm": 0.8234968781471252,
|
|
"learning_rate": 5.488937679446039e-07,
|
|
"loss": 0.6793,
|
|
"step": 3250
|
|
},
|
|
{
|
|
"epoch": 0.554808338937458,
|
|
"grad_norm": 0.9725661277770996,
|
|
"learning_rate": 5.573382874514439e-07,
|
|
"loss": 0.6784,
|
|
"step": 3300
|
|
},
|
|
{
|
|
"epoch": 0.5632145258910558,
|
|
"grad_norm": 0.8935129046440125,
|
|
"learning_rate": 5.657828069582841e-07,
|
|
"loss": 0.6756,
|
|
"step": 3350
|
|
},
|
|
{
|
|
"epoch": 0.5716207128446537,
|
|
"grad_norm": 0.8144574761390686,
|
|
"learning_rate": 5.742273264651241e-07,
|
|
"loss": 0.6759,
|
|
"step": 3400
|
|
},
|
|
{
|
|
"epoch": 0.5800268997982515,
|
|
"grad_norm": 0.8048451542854309,
|
|
"learning_rate": 5.826718459719642e-07,
|
|
"loss": 0.6756,
|
|
"step": 3450
|
|
},
|
|
{
|
|
"epoch": 0.5884330867518494,
|
|
"grad_norm": 0.9894602298736572,
|
|
"learning_rate": 5.911163654788042e-07,
|
|
"loss": 0.672,
|
|
"step": 3500
|
|
},
|
|
{
|
|
"epoch": 0.5968392737054472,
|
|
"grad_norm": 0.8449563980102539,
|
|
"learning_rate": 5.995608849856443e-07,
|
|
"loss": 0.6751,
|
|
"step": 3550
|
|
},
|
|
{
|
|
"epoch": 0.605245460659045,
|
|
"grad_norm": 0.8340707421302795,
|
|
"learning_rate": 6.080054044924844e-07,
|
|
"loss": 0.6739,
|
|
"step": 3600
|
|
},
|
|
{
|
|
"epoch": 0.6136516476126429,
|
|
"grad_norm": 0.8228305578231812,
|
|
"learning_rate": 6.164499239993244e-07,
|
|
"loss": 0.6733,
|
|
"step": 3650
|
|
},
|
|
{
|
|
"epoch": 0.6220578345662408,
|
|
"grad_norm": 0.7999180555343628,
|
|
"learning_rate": 6.248944435061644e-07,
|
|
"loss": 0.6712,
|
|
"step": 3700
|
|
},
|
|
{
|
|
"epoch": 0.6304640215198386,
|
|
"grad_norm": 0.7966693639755249,
|
|
"learning_rate": 6.333389630130045e-07,
|
|
"loss": 0.6705,
|
|
"step": 3750
|
|
},
|
|
{
|
|
"epoch": 0.6388702084734365,
|
|
"grad_norm": 0.978801429271698,
|
|
"learning_rate": 6.417834825198446e-07,
|
|
"loss": 0.668,
|
|
"step": 3800
|
|
},
|
|
{
|
|
"epoch": 0.6472763954270343,
|
|
"grad_norm": 0.8549861311912537,
|
|
"learning_rate": 6.502280020266847e-07,
|
|
"loss": 0.6707,
|
|
"step": 3850
|
|
},
|
|
{
|
|
"epoch": 0.6556825823806322,
|
|
"grad_norm": 0.8314603567123413,
|
|
"learning_rate": 6.586725215335247e-07,
|
|
"loss": 0.6687,
|
|
"step": 3900
|
|
},
|
|
{
|
|
"epoch": 0.66408876933423,
|
|
"grad_norm": 0.9379679560661316,
|
|
"learning_rate": 6.671170410403648e-07,
|
|
"loss": 0.6678,
|
|
"step": 3950
|
|
},
|
|
{
|
|
"epoch": 0.6724949562878278,
|
|
"grad_norm": 0.8202654719352722,
|
|
"learning_rate": 6.755615605472048e-07,
|
|
"loss": 0.6703,
|
|
"step": 4000
|
|
},
|
|
{
|
|
"epoch": 0.6809011432414257,
|
|
"grad_norm": 0.8476347327232361,
|
|
"learning_rate": 6.840060800540449e-07,
|
|
"loss": 0.6658,
|
|
"step": 4050
|
|
},
|
|
{
|
|
"epoch": 0.6893073301950235,
|
|
"grad_norm": 0.8811922669410706,
|
|
"learning_rate": 6.924505995608849e-07,
|
|
"loss": 0.6619,
|
|
"step": 4100
|
|
},
|
|
{
|
|
"epoch": 0.6977135171486214,
|
|
"grad_norm": 0.8713632225990295,
|
|
"learning_rate": 7.008951190677251e-07,
|
|
"loss": 0.663,
|
|
"step": 4150
|
|
},
|
|
{
|
|
"epoch": 0.7061197041022192,
|
|
"grad_norm": 0.9455136656761169,
|
|
"learning_rate": 7.09339638574565e-07,
|
|
"loss": 0.6621,
|
|
"step": 4200
|
|
},
|
|
{
|
|
"epoch": 0.7145258910558171,
|
|
"grad_norm": 0.7871012687683105,
|
|
"learning_rate": 7.177841580814051e-07,
|
|
"loss": 0.663,
|
|
"step": 4250
|
|
},
|
|
{
|
|
"epoch": 0.722932078009415,
|
|
"grad_norm": 0.8988749980926514,
|
|
"learning_rate": 7.262286775882452e-07,
|
|
"loss": 0.6567,
|
|
"step": 4300
|
|
},
|
|
{
|
|
"epoch": 0.7313382649630128,
|
|
"grad_norm": 0.8420932292938232,
|
|
"learning_rate": 7.346731970950853e-07,
|
|
"loss": 0.6591,
|
|
"step": 4350
|
|
},
|
|
{
|
|
"epoch": 0.7397444519166106,
|
|
"grad_norm": 0.902152955532074,
|
|
"learning_rate": 7.431177166019254e-07,
|
|
"loss": 0.6549,
|
|
"step": 4400
|
|
},
|
|
{
|
|
"epoch": 0.7481506388702085,
|
|
"grad_norm": 0.8646785020828247,
|
|
"learning_rate": 7.515622361087653e-07,
|
|
"loss": 0.6544,
|
|
"step": 4450
|
|
},
|
|
{
|
|
"epoch": 0.7565568258238063,
|
|
"grad_norm": 0.9260663986206055,
|
|
"learning_rate": 7.600067556156054e-07,
|
|
"loss": 0.6503,
|
|
"step": 4500
|
|
},
|
|
{
|
|
"epoch": 0.7649630127774042,
|
|
"grad_norm": 0.9004981517791748,
|
|
"learning_rate": 7.684512751224455e-07,
|
|
"loss": 0.6488,
|
|
"step": 4550
|
|
},
|
|
{
|
|
"epoch": 0.773369199731002,
|
|
"grad_norm": 0.8738273978233337,
|
|
"learning_rate": 7.768957946292856e-07,
|
|
"loss": 0.6515,
|
|
"step": 4600
|
|
},
|
|
{
|
|
"epoch": 0.7817753866845999,
|
|
"grad_norm": 0.8965081572532654,
|
|
"learning_rate": 7.853403141361256e-07,
|
|
"loss": 0.6495,
|
|
"step": 4650
|
|
},
|
|
{
|
|
"epoch": 0.7901815736381977,
|
|
"grad_norm": 0.982996940612793,
|
|
"learning_rate": 7.937848336429656e-07,
|
|
"loss": 0.6415,
|
|
"step": 4700
|
|
},
|
|
{
|
|
"epoch": 0.7985877605917956,
|
|
"grad_norm": 0.86506187915802,
|
|
"learning_rate": 8.022293531498058e-07,
|
|
"loss": 0.6471,
|
|
"step": 4750
|
|
},
|
|
{
|
|
"epoch": 0.8069939475453934,
|
|
"grad_norm": 0.9447119235992432,
|
|
"learning_rate": 8.106738726566458e-07,
|
|
"loss": 0.643,
|
|
"step": 4800
|
|
},
|
|
{
|
|
"epoch": 0.8154001344989913,
|
|
"grad_norm": 0.9167577624320984,
|
|
"learning_rate": 8.191183921634859e-07,
|
|
"loss": 0.6352,
|
|
"step": 4850
|
|
},
|
|
{
|
|
"epoch": 0.8238063214525891,
|
|
"grad_norm": 0.9060333967208862,
|
|
"learning_rate": 8.275629116703258e-07,
|
|
"loss": 0.6345,
|
|
"step": 4900
|
|
},
|
|
{
|
|
"epoch": 0.832212508406187,
|
|
"grad_norm": 0.9279811978340149,
|
|
"learning_rate": 8.36007431177166e-07,
|
|
"loss": 0.6393,
|
|
"step": 4950
|
|
},
|
|
{
|
|
"epoch": 0.8406186953597848,
|
|
"grad_norm": 0.8549557328224182,
|
|
"learning_rate": 8.44451950684006e-07,
|
|
"loss": 0.6327,
|
|
"step": 5000
|
|
},
|
|
{
|
|
"epoch": 0.8490248823133827,
|
|
"grad_norm": 1.002295732498169,
|
|
"learning_rate": 8.528964701908461e-07,
|
|
"loss": 0.6261,
|
|
"step": 5050
|
|
},
|
|
{
|
|
"epoch": 0.8574310692669805,
|
|
"grad_norm": 1.0114225149154663,
|
|
"learning_rate": 8.613409896976862e-07,
|
|
"loss": 0.6294,
|
|
"step": 5100
|
|
},
|
|
{
|
|
"epoch": 0.8658372562205784,
|
|
"grad_norm": 0.8996625542640686,
|
|
"learning_rate": 8.697855092045262e-07,
|
|
"loss": 0.6232,
|
|
"step": 5150
|
|
},
|
|
{
|
|
"epoch": 0.8742434431741762,
|
|
"grad_norm": 1.0029444694519043,
|
|
"learning_rate": 8.782300287113663e-07,
|
|
"loss": 0.6174,
|
|
"step": 5200
|
|
},
|
|
{
|
|
"epoch": 0.8826496301277741,
|
|
"grad_norm": 1.0238999128341675,
|
|
"learning_rate": 8.866745482182063e-07,
|
|
"loss": 0.6166,
|
|
"step": 5250
|
|
},
|
|
{
|
|
"epoch": 0.8910558170813719,
|
|
"grad_norm": 0.8762253522872925,
|
|
"learning_rate": 8.951190677250464e-07,
|
|
"loss": 0.6126,
|
|
"step": 5300
|
|
},
|
|
{
|
|
"epoch": 0.8994620040349698,
|
|
"grad_norm": 1.0304447412490845,
|
|
"learning_rate": 9.035635872318865e-07,
|
|
"loss": 0.6138,
|
|
"step": 5350
|
|
},
|
|
{
|
|
"epoch": 0.9078681909885676,
|
|
"grad_norm": 0.9571165442466736,
|
|
"learning_rate": 9.120081067387265e-07,
|
|
"loss": 0.6088,
|
|
"step": 5400
|
|
},
|
|
{
|
|
"epoch": 0.9162743779421655,
|
|
"grad_norm": 0.9421870708465576,
|
|
"learning_rate": 9.204526262455666e-07,
|
|
"loss": 0.599,
|
|
"step": 5450
|
|
},
|
|
{
|
|
"epoch": 0.9246805648957633,
|
|
"grad_norm": 1.0978301763534546,
|
|
"learning_rate": 9.288971457524066e-07,
|
|
"loss": 0.599,
|
|
"step": 5500
|
|
},
|
|
{
|
|
"epoch": 0.9330867518493612,
|
|
"grad_norm": 0.9784364104270935,
|
|
"learning_rate": 9.373416652592468e-07,
|
|
"loss": 0.5962,
|
|
"step": 5550
|
|
},
|
|
{
|
|
"epoch": 0.9414929388029589,
|
|
"grad_norm": 1.0564428567886353,
|
|
"learning_rate": 9.457861847660867e-07,
|
|
"loss": 0.5941,
|
|
"step": 5600
|
|
},
|
|
{
|
|
"epoch": 0.9498991257565568,
|
|
"grad_norm": 1.0474001169204712,
|
|
"learning_rate": 9.542307042729268e-07,
|
|
"loss": 0.5885,
|
|
"step": 5650
|
|
},
|
|
{
|
|
"epoch": 0.9583053127101546,
|
|
"grad_norm": 0.9988018274307251,
|
|
"learning_rate": 9.626752237797668e-07,
|
|
"loss": 0.5812,
|
|
"step": 5700
|
|
},
|
|
{
|
|
"epoch": 0.9667114996637525,
|
|
"grad_norm": 1.0413827896118164,
|
|
"learning_rate": 9.71119743286607e-07,
|
|
"loss": 0.5786,
|
|
"step": 5750
|
|
},
|
|
{
|
|
"epoch": 0.9751176866173503,
|
|
"grad_norm": 0.9909247159957886,
|
|
"learning_rate": 9.79564262793447e-07,
|
|
"loss": 0.5716,
|
|
"step": 5800
|
|
},
|
|
{
|
|
"epoch": 0.9835238735709482,
|
|
"grad_norm": 1.0758306980133057,
|
|
"learning_rate": 9.880087823002871e-07,
|
|
"loss": 0.5659,
|
|
"step": 5850
|
|
},
|
|
{
|
|
"epoch": 0.991930060524546,
|
|
"grad_norm": 1.275826096534729,
|
|
"learning_rate": 9.964533018071271e-07,
|
|
"loss": 0.5626,
|
|
"step": 5900
|
|
},
|
|
{
|
|
"epoch": 0.9952925353059852,
|
|
"eval_accuracy": 0.896010296010296,
|
|
"eval_f1_macro": 0.8950428029885591,
|
|
"eval_loss": 0.5504117608070374,
|
|
"eval_precision": 0.8942931136930143,
|
|
"eval_recall": 0.8960280806062683,
|
|
"eval_runtime": 43.2959,
|
|
"eval_samples_per_second": 89.731,
|
|
"eval_steps_per_second": 11.225,
|
|
"step": 5920
|
|
},
|
|
{
|
|
"epoch": 1.0003362474781439,
|
|
"grad_norm": 1.089690089225769,
|
|
"learning_rate": 9.994585410481898e-07,
|
|
"loss": 0.5589,
|
|
"step": 5950
|
|
},
|
|
{
|
|
"epoch": 1.0087424344317417,
|
|
"grad_norm": 1.1122716665267944,
|
|
"learning_rate": 9.985249911312757e-07,
|
|
"loss": 0.5511,
|
|
"step": 6000
|
|
},
|
|
{
|
|
"epoch": 1.0171486213853396,
|
|
"grad_norm": 0.9658188223838806,
|
|
"learning_rate": 9.975914412143618e-07,
|
|
"loss": 0.5472,
|
|
"step": 6050
|
|
},
|
|
{
|
|
"epoch": 1.0255548083389374,
|
|
"grad_norm": 1.1076239347457886,
|
|
"learning_rate": 9.966578912974476e-07,
|
|
"loss": 0.5412,
|
|
"step": 6100
|
|
},
|
|
{
|
|
"epoch": 1.0339609952925353,
|
|
"grad_norm": 1.0050643682479858,
|
|
"learning_rate": 9.957243413805335e-07,
|
|
"loss": 0.54,
|
|
"step": 6150
|
|
},
|
|
{
|
|
"epoch": 1.0423671822461331,
|
|
"grad_norm": 1.0132673978805542,
|
|
"learning_rate": 9.947907914636196e-07,
|
|
"loss": 0.53,
|
|
"step": 6200
|
|
},
|
|
{
|
|
"epoch": 1.050773369199731,
|
|
"grad_norm": 1.1316572427749634,
|
|
"learning_rate": 9.938572415467055e-07,
|
|
"loss": 0.5202,
|
|
"step": 6250
|
|
},
|
|
{
|
|
"epoch": 1.0591795561533288,
|
|
"grad_norm": 1.0512237548828125,
|
|
"learning_rate": 9.929236916297914e-07,
|
|
"loss": 0.5176,
|
|
"step": 6300
|
|
},
|
|
{
|
|
"epoch": 1.0675857431069267,
|
|
"grad_norm": 1.1535313129425049,
|
|
"learning_rate": 9.919901417128772e-07,
|
|
"loss": 0.5136,
|
|
"step": 6350
|
|
},
|
|
{
|
|
"epoch": 1.0759919300605245,
|
|
"grad_norm": 1.1831073760986328,
|
|
"learning_rate": 9.910565917959633e-07,
|
|
"loss": 0.4998,
|
|
"step": 6400
|
|
},
|
|
{
|
|
"epoch": 1.0843981170141224,
|
|
"grad_norm": 1.0638761520385742,
|
|
"learning_rate": 9.901230418790492e-07,
|
|
"loss": 0.497,
|
|
"step": 6450
|
|
},
|
|
{
|
|
"epoch": 1.0928043039677202,
|
|
"grad_norm": 1.163748025894165,
|
|
"learning_rate": 9.891894919621353e-07,
|
|
"loss": 0.4922,
|
|
"step": 6500
|
|
},
|
|
{
|
|
"epoch": 1.101210490921318,
|
|
"grad_norm": 1.2004588842391968,
|
|
"learning_rate": 9.882559420452212e-07,
|
|
"loss": 0.4895,
|
|
"step": 6550
|
|
},
|
|
{
|
|
"epoch": 1.109616677874916,
|
|
"grad_norm": 1.098446011543274,
|
|
"learning_rate": 9.87322392128307e-07,
|
|
"loss": 0.4787,
|
|
"step": 6600
|
|
},
|
|
{
|
|
"epoch": 1.1180228648285138,
|
|
"grad_norm": 1.1584326028823853,
|
|
"learning_rate": 9.86388842211393e-07,
|
|
"loss": 0.4672,
|
|
"step": 6650
|
|
},
|
|
{
|
|
"epoch": 1.1264290517821116,
|
|
"grad_norm": 1.1275545358657837,
|
|
"learning_rate": 9.85455292294479e-07,
|
|
"loss": 0.4679,
|
|
"step": 6700
|
|
},
|
|
{
|
|
"epoch": 1.1348352387357095,
|
|
"grad_norm": 1.060410976409912,
|
|
"learning_rate": 9.845217423775649e-07,
|
|
"loss": 0.4581,
|
|
"step": 6750
|
|
},
|
|
{
|
|
"epoch": 1.1432414256893073,
|
|
"grad_norm": 1.1001862287521362,
|
|
"learning_rate": 9.835881924606508e-07,
|
|
"loss": 0.4562,
|
|
"step": 6800
|
|
},
|
|
{
|
|
"epoch": 1.1516476126429052,
|
|
"grad_norm": 1.188932180404663,
|
|
"learning_rate": 9.826546425437368e-07,
|
|
"loss": 0.4485,
|
|
"step": 6850
|
|
},
|
|
{
|
|
"epoch": 1.160053799596503,
|
|
"grad_norm": 1.1793055534362793,
|
|
"learning_rate": 9.817210926268227e-07,
|
|
"loss": 0.4478,
|
|
"step": 6900
|
|
},
|
|
{
|
|
"epoch": 1.1684599865501009,
|
|
"grad_norm": 1.2268601655960083,
|
|
"learning_rate": 9.807875427099086e-07,
|
|
"loss": 0.4321,
|
|
"step": 6950
|
|
},
|
|
{
|
|
"epoch": 1.1768661735036987,
|
|
"grad_norm": 1.1301724910736084,
|
|
"learning_rate": 9.798539927929947e-07,
|
|
"loss": 0.4263,
|
|
"step": 7000
|
|
},
|
|
{
|
|
"epoch": 1.1852723604572966,
|
|
"grad_norm": 1.115373134613037,
|
|
"learning_rate": 9.789204428760806e-07,
|
|
"loss": 0.4154,
|
|
"step": 7050
|
|
},
|
|
{
|
|
"epoch": 1.1936785474108944,
|
|
"grad_norm": 1.180468201637268,
|
|
"learning_rate": 9.779868929591664e-07,
|
|
"loss": 0.4096,
|
|
"step": 7100
|
|
},
|
|
{
|
|
"epoch": 1.2020847343644923,
|
|
"grad_norm": 1.213911533355713,
|
|
"learning_rate": 9.770533430422523e-07,
|
|
"loss": 0.4114,
|
|
"step": 7150
|
|
},
|
|
{
|
|
"epoch": 1.21049092131809,
|
|
"grad_norm": 1.1898268461227417,
|
|
"learning_rate": 9.761197931253384e-07,
|
|
"loss": 0.3947,
|
|
"step": 7200
|
|
},
|
|
{
|
|
"epoch": 1.218897108271688,
|
|
"grad_norm": 1.1514030694961548,
|
|
"learning_rate": 9.751862432084243e-07,
|
|
"loss": 0.3934,
|
|
"step": 7250
|
|
},
|
|
{
|
|
"epoch": 1.2273032952252858,
|
|
"grad_norm": 1.1464074850082397,
|
|
"learning_rate": 9.742526932915104e-07,
|
|
"loss": 0.3822,
|
|
"step": 7300
|
|
},
|
|
{
|
|
"epoch": 1.2357094821788837,
|
|
"grad_norm": 1.1897680759429932,
|
|
"learning_rate": 9.733191433745962e-07,
|
|
"loss": 0.3709,
|
|
"step": 7350
|
|
},
|
|
{
|
|
"epoch": 1.2441156691324815,
|
|
"grad_norm": 1.1825538873672485,
|
|
"learning_rate": 9.723855934576821e-07,
|
|
"loss": 0.3695,
|
|
"step": 7400
|
|
},
|
|
{
|
|
"epoch": 1.2525218560860794,
|
|
"grad_norm": 1.147258996963501,
|
|
"learning_rate": 9.71452043540768e-07,
|
|
"loss": 0.3603,
|
|
"step": 7450
|
|
},
|
|
{
|
|
"epoch": 1.2609280430396772,
|
|
"grad_norm": 1.1026544570922852,
|
|
"learning_rate": 9.70518493623854e-07,
|
|
"loss": 0.3577,
|
|
"step": 7500
|
|
},
|
|
{
|
|
"epoch": 1.269334229993275,
|
|
"grad_norm": 1.1945158243179321,
|
|
"learning_rate": 9.6958494370694e-07,
|
|
"loss": 0.3534,
|
|
"step": 7550
|
|
},
|
|
{
|
|
"epoch": 1.277740416946873,
|
|
"grad_norm": 1.169705867767334,
|
|
"learning_rate": 9.686513937900258e-07,
|
|
"loss": 0.3448,
|
|
"step": 7600
|
|
},
|
|
{
|
|
"epoch": 1.2861466039004708,
|
|
"grad_norm": 1.123337984085083,
|
|
"learning_rate": 9.67717843873112e-07,
|
|
"loss": 0.3431,
|
|
"step": 7650
|
|
},
|
|
{
|
|
"epoch": 1.2945527908540686,
|
|
"grad_norm": 1.1492093801498413,
|
|
"learning_rate": 9.667842939561978e-07,
|
|
"loss": 0.3297,
|
|
"step": 7700
|
|
},
|
|
{
|
|
"epoch": 1.3029589778076665,
|
|
"grad_norm": 0.9801061749458313,
|
|
"learning_rate": 9.658507440392837e-07,
|
|
"loss": 0.3255,
|
|
"step": 7750
|
|
},
|
|
{
|
|
"epoch": 1.3113651647612643,
|
|
"grad_norm": 1.135079264640808,
|
|
"learning_rate": 9.649171941223698e-07,
|
|
"loss": 0.3241,
|
|
"step": 7800
|
|
},
|
|
{
|
|
"epoch": 1.3197713517148622,
|
|
"grad_norm": 1.1149753332138062,
|
|
"learning_rate": 9.639836442054556e-07,
|
|
"loss": 0.3149,
|
|
"step": 7850
|
|
},
|
|
{
|
|
"epoch": 1.32817753866846,
|
|
"grad_norm": 1.078354835510254,
|
|
"learning_rate": 9.630500942885415e-07,
|
|
"loss": 0.3143,
|
|
"step": 7900
|
|
},
|
|
{
|
|
"epoch": 1.3365837256220578,
|
|
"grad_norm": 1.1154347658157349,
|
|
"learning_rate": 9.621165443716274e-07,
|
|
"loss": 0.301,
|
|
"step": 7950
|
|
},
|
|
{
|
|
"epoch": 1.3449899125756557,
|
|
"grad_norm": 1.1075221300125122,
|
|
"learning_rate": 9.611829944547135e-07,
|
|
"loss": 0.2939,
|
|
"step": 8000
|
|
},
|
|
{
|
|
"epoch": 1.3533960995292535,
|
|
"grad_norm": 1.0937137603759766,
|
|
"learning_rate": 9.602494445377994e-07,
|
|
"loss": 0.294,
|
|
"step": 8050
|
|
},
|
|
{
|
|
"epoch": 1.3618022864828514,
|
|
"grad_norm": 1.0870780944824219,
|
|
"learning_rate": 9.593158946208854e-07,
|
|
"loss": 0.2864,
|
|
"step": 8100
|
|
},
|
|
{
|
|
"epoch": 1.3702084734364492,
|
|
"grad_norm": 1.1460663080215454,
|
|
"learning_rate": 9.583823447039713e-07,
|
|
"loss": 0.274,
|
|
"step": 8150
|
|
},
|
|
{
|
|
"epoch": 1.378614660390047,
|
|
"grad_norm": 1.0148537158966064,
|
|
"learning_rate": 9.574487947870572e-07,
|
|
"loss": 0.2727,
|
|
"step": 8200
|
|
},
|
|
{
|
|
"epoch": 1.387020847343645,
|
|
"grad_norm": 1.0275702476501465,
|
|
"learning_rate": 9.56515244870143e-07,
|
|
"loss": 0.2613,
|
|
"step": 8250
|
|
},
|
|
{
|
|
"epoch": 1.3954270342972428,
|
|
"grad_norm": 1.1357285976409912,
|
|
"learning_rate": 9.555816949532292e-07,
|
|
"loss": 0.2539,
|
|
"step": 8300
|
|
},
|
|
{
|
|
"epoch": 1.4038332212508406,
|
|
"grad_norm": 1.0728079080581665,
|
|
"learning_rate": 9.54648145036315e-07,
|
|
"loss": 0.2538,
|
|
"step": 8350
|
|
},
|
|
{
|
|
"epoch": 1.4122394082044385,
|
|
"grad_norm": 1.0374382734298706,
|
|
"learning_rate": 9.53714595119401e-07,
|
|
"loss": 0.2482,
|
|
"step": 8400
|
|
},
|
|
{
|
|
"epoch": 1.4206455951580363,
|
|
"grad_norm": 1.0923174619674683,
|
|
"learning_rate": 9.527810452024869e-07,
|
|
"loss": 0.2392,
|
|
"step": 8450
|
|
},
|
|
{
|
|
"epoch": 1.4290517821116342,
|
|
"grad_norm": 0.9895720481872559,
|
|
"learning_rate": 9.518474952855729e-07,
|
|
"loss": 0.238,
|
|
"step": 8500
|
|
},
|
|
{
|
|
"epoch": 1.437457969065232,
|
|
"grad_norm": 1.0930739641189575,
|
|
"learning_rate": 9.509139453686588e-07,
|
|
"loss": 0.236,
|
|
"step": 8550
|
|
},
|
|
{
|
|
"epoch": 1.44586415601883,
|
|
"grad_norm": 1.1996747255325317,
|
|
"learning_rate": 9.499803954517448e-07,
|
|
"loss": 0.2307,
|
|
"step": 8600
|
|
},
|
|
{
|
|
"epoch": 1.4542703429724277,
|
|
"grad_norm": 0.921042263507843,
|
|
"learning_rate": 9.490468455348307e-07,
|
|
"loss": 0.2186,
|
|
"step": 8650
|
|
},
|
|
{
|
|
"epoch": 1.4626765299260256,
|
|
"grad_norm": 0.9537034630775452,
|
|
"learning_rate": 9.481132956179166e-07,
|
|
"loss": 0.2251,
|
|
"step": 8700
|
|
},
|
|
{
|
|
"epoch": 1.4710827168796234,
|
|
"grad_norm": 0.9689446091651917,
|
|
"learning_rate": 9.471797457010026e-07,
|
|
"loss": 0.2106,
|
|
"step": 8750
|
|
},
|
|
{
|
|
"epoch": 1.4794889038332213,
|
|
"grad_norm": 0.9194216132164001,
|
|
"learning_rate": 9.462461957840885e-07,
|
|
"loss": 0.2043,
|
|
"step": 8800
|
|
},
|
|
{
|
|
"epoch": 1.4878950907868191,
|
|
"grad_norm": 0.9571210145950317,
|
|
"learning_rate": 9.453126458671745e-07,
|
|
"loss": 0.2002,
|
|
"step": 8850
|
|
},
|
|
{
|
|
"epoch": 1.4929388029589779,
|
|
"eval_accuracy": 0.9861003861003861,
|
|
"eval_f1_macro": 0.985912214812837,
|
|
"eval_loss": 0.18007032573223114,
|
|
"eval_precision": 0.9869196757553427,
|
|
"eval_recall": 0.9850439724663349,
|
|
"eval_runtime": 42.4212,
|
|
"eval_samples_per_second": 91.582,
|
|
"eval_steps_per_second": 11.457,
|
|
"step": 8880
|
|
},
|
|
{
|
|
"epoch": 1.496301277740417,
|
|
"grad_norm": 0.7686983346939087,
|
|
"learning_rate": 9.443790959502604e-07,
|
|
"loss": 0.1963,
|
|
"step": 8900
|
|
},
|
|
{
|
|
"epoch": 1.5047074646940148,
|
|
"grad_norm": 0.8870165348052979,
|
|
"learning_rate": 9.434455460333464e-07,
|
|
"loss": 0.1893,
|
|
"step": 8950
|
|
},
|
|
{
|
|
"epoch": 1.5131136516476127,
|
|
"grad_norm": 0.8588102459907532,
|
|
"learning_rate": 9.425119961164323e-07,
|
|
"loss": 0.1831,
|
|
"step": 9000
|
|
},
|
|
{
|
|
"epoch": 1.5215198386012105,
|
|
"grad_norm": 1.1222652196884155,
|
|
"learning_rate": 9.415784461995182e-07,
|
|
"loss": 0.1823,
|
|
"step": 9050
|
|
},
|
|
{
|
|
"epoch": 1.5299260255548084,
|
|
"grad_norm": 1.2664000988006592,
|
|
"learning_rate": 9.406448962826042e-07,
|
|
"loss": 0.1858,
|
|
"step": 9100
|
|
},
|
|
{
|
|
"epoch": 1.5383322125084062,
|
|
"grad_norm": 0.8765432834625244,
|
|
"learning_rate": 9.397113463656901e-07,
|
|
"loss": 0.1734,
|
|
"step": 9150
|
|
},
|
|
{
|
|
"epoch": 1.546738399462004,
|
|
"grad_norm": 0.9564241170883179,
|
|
"learning_rate": 9.387777964487761e-07,
|
|
"loss": 0.162,
|
|
"step": 9200
|
|
},
|
|
{
|
|
"epoch": 1.555144586415602,
|
|
"grad_norm": 0.8159739971160889,
|
|
"learning_rate": 9.37844246531862e-07,
|
|
"loss": 0.1555,
|
|
"step": 9250
|
|
},
|
|
{
|
|
"epoch": 1.5635507733691996,
|
|
"grad_norm": 1.0021076202392578,
|
|
"learning_rate": 9.36910696614948e-07,
|
|
"loss": 0.1663,
|
|
"step": 9300
|
|
},
|
|
{
|
|
"epoch": 1.5719569603227974,
|
|
"grad_norm": 1.0942935943603516,
|
|
"learning_rate": 9.359771466980338e-07,
|
|
"loss": 0.1516,
|
|
"step": 9350
|
|
},
|
|
{
|
|
"epoch": 1.5803631472763953,
|
|
"grad_norm": 0.9106407761573792,
|
|
"learning_rate": 9.350435967811199e-07,
|
|
"loss": 0.1462,
|
|
"step": 9400
|
|
},
|
|
{
|
|
"epoch": 1.5887693342299931,
|
|
"grad_norm": 1.2029844522476196,
|
|
"learning_rate": 9.341100468642058e-07,
|
|
"loss": 0.1334,
|
|
"step": 9450
|
|
},
|
|
{
|
|
"epoch": 1.597175521183591,
|
|
"grad_norm": 1.1830437183380127,
|
|
"learning_rate": 9.331764969472917e-07,
|
|
"loss": 0.1511,
|
|
"step": 9500
|
|
},
|
|
{
|
|
"epoch": 1.6055817081371888,
|
|
"grad_norm": 1.024003267288208,
|
|
"learning_rate": 9.322429470303777e-07,
|
|
"loss": 0.137,
|
|
"step": 9550
|
|
},
|
|
{
|
|
"epoch": 1.6139878950907867,
|
|
"grad_norm": 0.6936477422714233,
|
|
"learning_rate": 9.313093971134635e-07,
|
|
"loss": 0.1309,
|
|
"step": 9600
|
|
},
|
|
{
|
|
"epoch": 1.6223940820443845,
|
|
"grad_norm": 0.9332903027534485,
|
|
"learning_rate": 9.303758471965496e-07,
|
|
"loss": 0.1332,
|
|
"step": 9650
|
|
},
|
|
{
|
|
"epoch": 1.6308002689979824,
|
|
"grad_norm": 0.7311134338378906,
|
|
"learning_rate": 9.294422972796355e-07,
|
|
"loss": 0.1273,
|
|
"step": 9700
|
|
},
|
|
{
|
|
"epoch": 1.6392064559515802,
|
|
"grad_norm": 0.897352397441864,
|
|
"learning_rate": 9.285087473627215e-07,
|
|
"loss": 0.1277,
|
|
"step": 9750
|
|
},
|
|
{
|
|
"epoch": 1.647612642905178,
|
|
"grad_norm": 0.9234735369682312,
|
|
"learning_rate": 9.275751974458074e-07,
|
|
"loss": 0.1285,
|
|
"step": 9800
|
|
},
|
|
{
|
|
"epoch": 1.656018829858776,
|
|
"grad_norm": 1.1619858741760254,
|
|
"learning_rate": 9.266416475288933e-07,
|
|
"loss": 0.1311,
|
|
"step": 9850
|
|
},
|
|
{
|
|
"epoch": 1.6644250168123738,
|
|
"grad_norm": 0.6384488940238953,
|
|
"learning_rate": 9.257080976119793e-07,
|
|
"loss": 0.1165,
|
|
"step": 9900
|
|
},
|
|
{
|
|
"epoch": 1.6728312037659716,
|
|
"grad_norm": 0.5864728689193726,
|
|
"learning_rate": 9.247745476950652e-07,
|
|
"loss": 0.1023,
|
|
"step": 9950
|
|
},
|
|
{
|
|
"epoch": 1.6812373907195695,
|
|
"grad_norm": 1.1280192136764526,
|
|
"learning_rate": 9.238409977781512e-07,
|
|
"loss": 0.11,
|
|
"step": 10000
|
|
},
|
|
{
|
|
"epoch": 1.6896435776731673,
|
|
"grad_norm": 0.5544498562812805,
|
|
"learning_rate": 9.229074478612371e-07,
|
|
"loss": 0.1006,
|
|
"step": 10050
|
|
},
|
|
{
|
|
"epoch": 1.6980497646267652,
|
|
"grad_norm": 1.138969898223877,
|
|
"learning_rate": 9.21973897944323e-07,
|
|
"loss": 0.0998,
|
|
"step": 10100
|
|
},
|
|
{
|
|
"epoch": 1.706455951580363,
|
|
"grad_norm": 0.6470067501068115,
|
|
"learning_rate": 9.210403480274089e-07,
|
|
"loss": 0.0967,
|
|
"step": 10150
|
|
},
|
|
{
|
|
"epoch": 1.7148621385339609,
|
|
"grad_norm": 0.8513116240501404,
|
|
"learning_rate": 9.20106798110495e-07,
|
|
"loss": 0.0955,
|
|
"step": 10200
|
|
},
|
|
{
|
|
"epoch": 1.7232683254875587,
|
|
"grad_norm": 1.3393694162368774,
|
|
"learning_rate": 9.191732481935809e-07,
|
|
"loss": 0.0884,
|
|
"step": 10250
|
|
},
|
|
{
|
|
"epoch": 1.7316745124411566,
|
|
"grad_norm": 0.6969456076622009,
|
|
"learning_rate": 9.182396982766669e-07,
|
|
"loss": 0.1072,
|
|
"step": 10300
|
|
},
|
|
{
|
|
"epoch": 1.7400806993947544,
|
|
"grad_norm": 0.5059545636177063,
|
|
"learning_rate": 9.173061483597527e-07,
|
|
"loss": 0.092,
|
|
"step": 10350
|
|
},
|
|
{
|
|
"epoch": 1.7484868863483523,
|
|
"grad_norm": 0.7818930745124817,
|
|
"learning_rate": 9.163725984428386e-07,
|
|
"loss": 0.0852,
|
|
"step": 10400
|
|
},
|
|
{
|
|
"epoch": 1.75689307330195,
|
|
"grad_norm": 0.7932471632957458,
|
|
"learning_rate": 9.154390485259247e-07,
|
|
"loss": 0.0774,
|
|
"step": 10450
|
|
},
|
|
{
|
|
"epoch": 1.765299260255548,
|
|
"grad_norm": 0.9375746846199036,
|
|
"learning_rate": 9.145054986090106e-07,
|
|
"loss": 0.0913,
|
|
"step": 10500
|
|
},
|
|
{
|
|
"epoch": 1.7737054472091458,
|
|
"grad_norm": 1.0718626976013184,
|
|
"learning_rate": 9.135719486920966e-07,
|
|
"loss": 0.0749,
|
|
"step": 10550
|
|
},
|
|
{
|
|
"epoch": 1.7821116341627437,
|
|
"grad_norm": 1.1264452934265137,
|
|
"learning_rate": 9.126383987751824e-07,
|
|
"loss": 0.08,
|
|
"step": 10600
|
|
},
|
|
{
|
|
"epoch": 1.7905178211163415,
|
|
"grad_norm": 0.40607750415802,
|
|
"learning_rate": 9.117048488582684e-07,
|
|
"loss": 0.0802,
|
|
"step": 10650
|
|
},
|
|
{
|
|
"epoch": 1.7989240080699394,
|
|
"grad_norm": 0.9266816973686218,
|
|
"learning_rate": 9.107712989413544e-07,
|
|
"loss": 0.0702,
|
|
"step": 10700
|
|
},
|
|
{
|
|
"epoch": 1.8073301950235372,
|
|
"grad_norm": 0.9419146180152893,
|
|
"learning_rate": 9.098377490244403e-07,
|
|
"loss": 0.0783,
|
|
"step": 10750
|
|
},
|
|
{
|
|
"epoch": 1.815736381977135,
|
|
"grad_norm": 0.6174508333206177,
|
|
"learning_rate": 9.089041991075263e-07,
|
|
"loss": 0.0647,
|
|
"step": 10800
|
|
},
|
|
{
|
|
"epoch": 1.824142568930733,
|
|
"grad_norm": 0.5224350690841675,
|
|
"learning_rate": 9.079706491906121e-07,
|
|
"loss": 0.0778,
|
|
"step": 10850
|
|
},
|
|
{
|
|
"epoch": 1.8325487558843307,
|
|
"grad_norm": 0.6590968370437622,
|
|
"learning_rate": 9.070370992736981e-07,
|
|
"loss": 0.084,
|
|
"step": 10900
|
|
},
|
|
{
|
|
"epoch": 1.8409549428379286,
|
|
"grad_norm": 0.5679555535316467,
|
|
"learning_rate": 9.06103549356784e-07,
|
|
"loss": 0.0684,
|
|
"step": 10950
|
|
},
|
|
{
|
|
"epoch": 1.8493611297915264,
|
|
"grad_norm": 0.3743615746498108,
|
|
"learning_rate": 9.051699994398701e-07,
|
|
"loss": 0.0696,
|
|
"step": 11000
|
|
},
|
|
{
|
|
"epoch": 1.8577673167451243,
|
|
"grad_norm": 0.7687448263168335,
|
|
"learning_rate": 9.04236449522956e-07,
|
|
"loss": 0.0666,
|
|
"step": 11050
|
|
},
|
|
{
|
|
"epoch": 1.8661735036987221,
|
|
"grad_norm": 0.38414329290390015,
|
|
"learning_rate": 9.033028996060419e-07,
|
|
"loss": 0.066,
|
|
"step": 11100
|
|
},
|
|
{
|
|
"epoch": 1.87457969065232,
|
|
"grad_norm": 0.33975300192832947,
|
|
"learning_rate": 9.023693496891278e-07,
|
|
"loss": 0.0727,
|
|
"step": 11150
|
|
},
|
|
{
|
|
"epoch": 1.8829858776059178,
|
|
"grad_norm": 0.3808116018772125,
|
|
"learning_rate": 9.014357997722137e-07,
|
|
"loss": 0.0765,
|
|
"step": 11200
|
|
},
|
|
{
|
|
"epoch": 1.8913920645595157,
|
|
"grad_norm": 0.3306853175163269,
|
|
"learning_rate": 9.005022498552998e-07,
|
|
"loss": 0.067,
|
|
"step": 11250
|
|
},
|
|
{
|
|
"epoch": 1.8997982515131135,
|
|
"grad_norm": 0.5410125851631165,
|
|
"learning_rate": 8.995686999383857e-07,
|
|
"loss": 0.0642,
|
|
"step": 11300
|
|
},
|
|
{
|
|
"epoch": 1.9082044384667114,
|
|
"grad_norm": 1.0305054187774658,
|
|
"learning_rate": 8.986351500214716e-07,
|
|
"loss": 0.05,
|
|
"step": 11350
|
|
},
|
|
{
|
|
"epoch": 1.9166106254203092,
|
|
"grad_norm": 1.6848655939102173,
|
|
"learning_rate": 8.977016001045575e-07,
|
|
"loss": 0.0551,
|
|
"step": 11400
|
|
},
|
|
{
|
|
"epoch": 1.925016812373907,
|
|
"grad_norm": 0.6348687410354614,
|
|
"learning_rate": 8.967680501876435e-07,
|
|
"loss": 0.0561,
|
|
"step": 11450
|
|
},
|
|
{
|
|
"epoch": 1.933422999327505,
|
|
"grad_norm": 0.3955049216747284,
|
|
"learning_rate": 8.958345002707295e-07,
|
|
"loss": 0.0606,
|
|
"step": 11500
|
|
},
|
|
{
|
|
"epoch": 1.9418291862811028,
|
|
"grad_norm": 1.238265872001648,
|
|
"learning_rate": 8.949009503538155e-07,
|
|
"loss": 0.0568,
|
|
"step": 11550
|
|
},
|
|
{
|
|
"epoch": 1.9502353732347006,
|
|
"grad_norm": 1.927749752998352,
|
|
"learning_rate": 8.939674004369013e-07,
|
|
"loss": 0.0641,
|
|
"step": 11600
|
|
},
|
|
{
|
|
"epoch": 1.9586415601882985,
|
|
"grad_norm": 0.2765219509601593,
|
|
"learning_rate": 8.930338505199872e-07,
|
|
"loss": 0.0613,
|
|
"step": 11650
|
|
},
|
|
{
|
|
"epoch": 1.9670477471418963,
|
|
"grad_norm": 1.0269273519515991,
|
|
"learning_rate": 8.921003006030732e-07,
|
|
"loss": 0.0641,
|
|
"step": 11700
|
|
},
|
|
{
|
|
"epoch": 1.9754539340954942,
|
|
"grad_norm": 0.43984729051589966,
|
|
"learning_rate": 8.911667506861591e-07,
|
|
"loss": 0.0605,
|
|
"step": 11750
|
|
},
|
|
{
|
|
"epoch": 1.983860121049092,
|
|
"grad_norm": 0.6223533153533936,
|
|
"learning_rate": 8.902332007692452e-07,
|
|
"loss": 0.053,
|
|
"step": 11800
|
|
},
|
|
{
|
|
"epoch": 1.9905850706119703,
|
|
"eval_accuracy": 0.9889317889317889,
|
|
"eval_f1_macro": 0.9887785465935663,
|
|
"eval_loss": 0.04083893820643425,
|
|
"eval_precision": 0.989975319388231,
|
|
"eval_recall": 0.9877686245554091,
|
|
"eval_runtime": 43.4219,
|
|
"eval_samples_per_second": 89.471,
|
|
"eval_steps_per_second": 11.193,
|
|
"step": 11840
|
|
},
|
|
{
|
|
"epoch": 1.99226630800269,
|
|
"grad_norm": 0.1801561415195465,
|
|
"learning_rate": 8.89299650852331e-07,
|
|
"loss": 0.0457,
|
|
"step": 11850
|
|
},
|
|
{
|
|
"epoch": 2.0006724949562877,
|
|
"grad_norm": 1.5527310371398926,
|
|
"learning_rate": 8.88366100935417e-07,
|
|
"loss": 0.0628,
|
|
"step": 11900
|
|
},
|
|
{
|
|
"epoch": 2.0090786819098856,
|
|
"grad_norm": 1.5352983474731445,
|
|
"learning_rate": 8.874325510185029e-07,
|
|
"loss": 0.0528,
|
|
"step": 11950
|
|
},
|
|
{
|
|
"epoch": 2.0174848688634834,
|
|
"grad_norm": 2.4047458171844482,
|
|
"learning_rate": 8.864990011015888e-07,
|
|
"loss": 0.0597,
|
|
"step": 12000
|
|
},
|
|
{
|
|
"epoch": 2.0258910558170813,
|
|
"grad_norm": 0.8849202990531921,
|
|
"learning_rate": 8.855654511846749e-07,
|
|
"loss": 0.0524,
|
|
"step": 12050
|
|
},
|
|
{
|
|
"epoch": 2.034297242770679,
|
|
"grad_norm": 0.38976189494132996,
|
|
"learning_rate": 8.846319012677607e-07,
|
|
"loss": 0.0406,
|
|
"step": 12100
|
|
},
|
|
{
|
|
"epoch": 2.042703429724277,
|
|
"grad_norm": 0.31227657198905945,
|
|
"learning_rate": 8.836983513508467e-07,
|
|
"loss": 0.0467,
|
|
"step": 12150
|
|
},
|
|
{
|
|
"epoch": 2.051109616677875,
|
|
"grad_norm": 1.8127578496932983,
|
|
"learning_rate": 8.827648014339326e-07,
|
|
"loss": 0.0689,
|
|
"step": 12200
|
|
},
|
|
{
|
|
"epoch": 2.0595158036314727,
|
|
"grad_norm": 0.23175351321697235,
|
|
"learning_rate": 8.818312515170186e-07,
|
|
"loss": 0.0423,
|
|
"step": 12250
|
|
},
|
|
{
|
|
"epoch": 2.0679219905850705,
|
|
"grad_norm": 1.7162238359451294,
|
|
"learning_rate": 8.808977016001046e-07,
|
|
"loss": 0.0372,
|
|
"step": 12300
|
|
},
|
|
{
|
|
"epoch": 2.0763281775386684,
|
|
"grad_norm": 0.2673948407173157,
|
|
"learning_rate": 8.799641516831905e-07,
|
|
"loss": 0.0425,
|
|
"step": 12350
|
|
},
|
|
{
|
|
"epoch": 2.0847343644922662,
|
|
"grad_norm": 0.3450436294078827,
|
|
"learning_rate": 8.790306017662764e-07,
|
|
"loss": 0.0517,
|
|
"step": 12400
|
|
},
|
|
{
|
|
"epoch": 2.093140551445864,
|
|
"grad_norm": 0.45672616362571716,
|
|
"learning_rate": 8.780970518493623e-07,
|
|
"loss": 0.0582,
|
|
"step": 12450
|
|
},
|
|
{
|
|
"epoch": 2.101546738399462,
|
|
"grad_norm": 0.250683069229126,
|
|
"learning_rate": 8.771635019324483e-07,
|
|
"loss": 0.034,
|
|
"step": 12500
|
|
},
|
|
{
|
|
"epoch": 2.10995292535306,
|
|
"grad_norm": 0.5667222142219543,
|
|
"learning_rate": 8.762299520155342e-07,
|
|
"loss": 0.0377,
|
|
"step": 12550
|
|
},
|
|
{
|
|
"epoch": 2.1183591123066576,
|
|
"grad_norm": 0.15519775450229645,
|
|
"learning_rate": 8.752964020986202e-07,
|
|
"loss": 0.0532,
|
|
"step": 12600
|
|
},
|
|
{
|
|
"epoch": 2.1267652992602555,
|
|
"grad_norm": 0.5342125296592712,
|
|
"learning_rate": 8.743628521817061e-07,
|
|
"loss": 0.0338,
|
|
"step": 12650
|
|
},
|
|
{
|
|
"epoch": 2.1351714862138533,
|
|
"grad_norm": 0.21783240139484406,
|
|
"learning_rate": 8.734293022647921e-07,
|
|
"loss": 0.0524,
|
|
"step": 12700
|
|
},
|
|
{
|
|
"epoch": 2.143577673167451,
|
|
"grad_norm": 0.16794736683368683,
|
|
"learning_rate": 8.72495752347878e-07,
|
|
"loss": 0.0625,
|
|
"step": 12750
|
|
},
|
|
{
|
|
"epoch": 2.151983860121049,
|
|
"grad_norm": 0.715691089630127,
|
|
"learning_rate": 8.715622024309638e-07,
|
|
"loss": 0.0434,
|
|
"step": 12800
|
|
},
|
|
{
|
|
"epoch": 2.160390047074647,
|
|
"grad_norm": 1.9928004741668701,
|
|
"learning_rate": 8.706286525140499e-07,
|
|
"loss": 0.0598,
|
|
"step": 12850
|
|
},
|
|
{
|
|
"epoch": 2.1687962340282447,
|
|
"grad_norm": 1.6334397792816162,
|
|
"learning_rate": 8.696951025971358e-07,
|
|
"loss": 0.0445,
|
|
"step": 12900
|
|
},
|
|
{
|
|
"epoch": 2.1772024209818426,
|
|
"grad_norm": 0.14497284591197968,
|
|
"learning_rate": 8.687615526802218e-07,
|
|
"loss": 0.0468,
|
|
"step": 12950
|
|
},
|
|
{
|
|
"epoch": 2.1856086079354404,
|
|
"grad_norm": 1.2945809364318848,
|
|
"learning_rate": 8.678280027633077e-07,
|
|
"loss": 0.0367,
|
|
"step": 13000
|
|
},
|
|
{
|
|
"epoch": 2.1940147948890383,
|
|
"grad_norm": 0.4334909915924072,
|
|
"learning_rate": 8.668944528463937e-07,
|
|
"loss": 0.0467,
|
|
"step": 13050
|
|
},
|
|
{
|
|
"epoch": 2.202420981842636,
|
|
"grad_norm": 1.6940721273422241,
|
|
"learning_rate": 8.659609029294796e-07,
|
|
"loss": 0.0569,
|
|
"step": 13100
|
|
},
|
|
{
|
|
"epoch": 2.210827168796234,
|
|
"grad_norm": 0.13102596998214722,
|
|
"learning_rate": 8.650273530125656e-07,
|
|
"loss": 0.0453,
|
|
"step": 13150
|
|
},
|
|
{
|
|
"epoch": 2.219233355749832,
|
|
"grad_norm": 0.4492725729942322,
|
|
"learning_rate": 8.640938030956515e-07,
|
|
"loss": 0.0508,
|
|
"step": 13200
|
|
},
|
|
{
|
|
"epoch": 2.2276395427034297,
|
|
"grad_norm": 1.6728957891464233,
|
|
"learning_rate": 8.631602531787374e-07,
|
|
"loss": 0.0433,
|
|
"step": 13250
|
|
},
|
|
{
|
|
"epoch": 2.2360457296570275,
|
|
"grad_norm": 0.1410200595855713,
|
|
"learning_rate": 8.622267032618234e-07,
|
|
"loss": 0.0457,
|
|
"step": 13300
|
|
},
|
|
{
|
|
"epoch": 2.2444519166106254,
|
|
"grad_norm": 0.4576292634010315,
|
|
"learning_rate": 8.612931533449092e-07,
|
|
"loss": 0.0604,
|
|
"step": 13350
|
|
},
|
|
{
|
|
"epoch": 2.2528581035642232,
|
|
"grad_norm": 2.419782876968384,
|
|
"learning_rate": 8.603596034279953e-07,
|
|
"loss": 0.0373,
|
|
"step": 13400
|
|
},
|
|
{
|
|
"epoch": 2.261264290517821,
|
|
"grad_norm": 0.6388083696365356,
|
|
"learning_rate": 8.594260535110812e-07,
|
|
"loss": 0.051,
|
|
"step": 13450
|
|
},
|
|
{
|
|
"epoch": 2.269670477471419,
|
|
"grad_norm": 1.2558374404907227,
|
|
"learning_rate": 8.584925035941672e-07,
|
|
"loss": 0.0382,
|
|
"step": 13500
|
|
},
|
|
{
|
|
"epoch": 2.2780766644250168,
|
|
"grad_norm": 0.6086406707763672,
|
|
"learning_rate": 8.57558953677253e-07,
|
|
"loss": 0.0486,
|
|
"step": 13550
|
|
},
|
|
{
|
|
"epoch": 2.2864828513786146,
|
|
"grad_norm": 1.564790964126587,
|
|
"learning_rate": 8.56625403760339e-07,
|
|
"loss": 0.0343,
|
|
"step": 13600
|
|
},
|
|
{
|
|
"epoch": 2.2948890383322125,
|
|
"grad_norm": 0.11007804423570633,
|
|
"learning_rate": 8.55691853843425e-07,
|
|
"loss": 0.0488,
|
|
"step": 13650
|
|
},
|
|
{
|
|
"epoch": 2.3032952252858103,
|
|
"grad_norm": 0.0976046770811081,
|
|
"learning_rate": 8.547583039265109e-07,
|
|
"loss": 0.0364,
|
|
"step": 13700
|
|
},
|
|
{
|
|
"epoch": 2.311701412239408,
|
|
"grad_norm": 0.08952467888593674,
|
|
"learning_rate": 8.538247540095969e-07,
|
|
"loss": 0.0334,
|
|
"step": 13750
|
|
},
|
|
{
|
|
"epoch": 2.320107599193006,
|
|
"grad_norm": 0.25883930921554565,
|
|
"learning_rate": 8.528912040926828e-07,
|
|
"loss": 0.0483,
|
|
"step": 13800
|
|
},
|
|
{
|
|
"epoch": 2.328513786146604,
|
|
"grad_norm": 0.1525714248418808,
|
|
"learning_rate": 8.519576541757687e-07,
|
|
"loss": 0.0432,
|
|
"step": 13850
|
|
},
|
|
{
|
|
"epoch": 2.3369199731002017,
|
|
"grad_norm": 0.18361321091651917,
|
|
"learning_rate": 8.510241042588547e-07,
|
|
"loss": 0.049,
|
|
"step": 13900
|
|
},
|
|
{
|
|
"epoch": 2.3453261600537996,
|
|
"grad_norm": 0.21892070770263672,
|
|
"learning_rate": 8.500905543419407e-07,
|
|
"loss": 0.0312,
|
|
"step": 13950
|
|
},
|
|
{
|
|
"epoch": 2.3537323470073974,
|
|
"grad_norm": 3.5728206634521484,
|
|
"learning_rate": 8.491570044250266e-07,
|
|
"loss": 0.039,
|
|
"step": 14000
|
|
},
|
|
{
|
|
"epoch": 2.3621385339609953,
|
|
"grad_norm": 0.6285228729248047,
|
|
"learning_rate": 8.482234545081125e-07,
|
|
"loss": 0.0432,
|
|
"step": 14050
|
|
},
|
|
{
|
|
"epoch": 2.370544720914593,
|
|
"grad_norm": 0.3574727475643158,
|
|
"learning_rate": 8.472899045911984e-07,
|
|
"loss": 0.0464,
|
|
"step": 14100
|
|
},
|
|
{
|
|
"epoch": 2.378950907868191,
|
|
"grad_norm": 0.6059629321098328,
|
|
"learning_rate": 8.463563546742843e-07,
|
|
"loss": 0.0466,
|
|
"step": 14150
|
|
},
|
|
{
|
|
"epoch": 2.387357094821789,
|
|
"grad_norm": 0.6901473999023438,
|
|
"learning_rate": 8.454228047573704e-07,
|
|
"loss": 0.052,
|
|
"step": 14200
|
|
},
|
|
{
|
|
"epoch": 2.3957632817753867,
|
|
"grad_norm": 0.47388774156570435,
|
|
"learning_rate": 8.444892548404563e-07,
|
|
"loss": 0.0329,
|
|
"step": 14250
|
|
},
|
|
{
|
|
"epoch": 2.4041694687289845,
|
|
"grad_norm": 0.12275730073451996,
|
|
"learning_rate": 8.435557049235423e-07,
|
|
"loss": 0.0395,
|
|
"step": 14300
|
|
},
|
|
{
|
|
"epoch": 2.4125756556825824,
|
|
"grad_norm": 0.14599856734275818,
|
|
"learning_rate": 8.426221550066281e-07,
|
|
"loss": 0.0402,
|
|
"step": 14350
|
|
},
|
|
{
|
|
"epoch": 2.42098184263618,
|
|
"grad_norm": 2.370673179626465,
|
|
"learning_rate": 8.416886050897141e-07,
|
|
"loss": 0.0596,
|
|
"step": 14400
|
|
},
|
|
{
|
|
"epoch": 2.429388029589778,
|
|
"grad_norm": 1.1168391704559326,
|
|
"learning_rate": 8.407550551728001e-07,
|
|
"loss": 0.0439,
|
|
"step": 14450
|
|
},
|
|
{
|
|
"epoch": 2.437794216543376,
|
|
"grad_norm": 1.3855054378509521,
|
|
"learning_rate": 8.39821505255886e-07,
|
|
"loss": 0.0477,
|
|
"step": 14500
|
|
},
|
|
{
|
|
"epoch": 2.4462004034969738,
|
|
"grad_norm": 0.08173301815986633,
|
|
"learning_rate": 8.38887955338972e-07,
|
|
"loss": 0.0467,
|
|
"step": 14550
|
|
},
|
|
{
|
|
"epoch": 2.4546065904505716,
|
|
"grad_norm": 1.462754487991333,
|
|
"learning_rate": 8.379544054220578e-07,
|
|
"loss": 0.0466,
|
|
"step": 14600
|
|
},
|
|
{
|
|
"epoch": 2.4630127774041695,
|
|
"grad_norm": 0.187517911195755,
|
|
"learning_rate": 8.370208555051438e-07,
|
|
"loss": 0.0539,
|
|
"step": 14650
|
|
},
|
|
{
|
|
"epoch": 2.4714189643577673,
|
|
"grad_norm": 0.17454290390014648,
|
|
"learning_rate": 8.360873055882298e-07,
|
|
"loss": 0.0375,
|
|
"step": 14700
|
|
},
|
|
{
|
|
"epoch": 2.479825151311365,
|
|
"grad_norm": 2.0886340141296387,
|
|
"learning_rate": 8.351537556713158e-07,
|
|
"loss": 0.0486,
|
|
"step": 14750
|
|
},
|
|
{
|
|
"epoch": 2.488231338264963,
|
|
"grad_norm": 1.7701690196990967,
|
|
"learning_rate": 8.342202057544017e-07,
|
|
"loss": 0.0395,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 2.488231338264963,
|
|
"eval_accuracy": 0.9904761904761905,
|
|
"eval_f1_macro": 0.9903547100746427,
|
|
"eval_loss": 0.027853745967149734,
|
|
"eval_precision": 0.9909244803912348,
|
|
"eval_recall": 0.9898341320283959,
|
|
"eval_runtime": 51.6999,
|
|
"eval_samples_per_second": 75.145,
|
|
"eval_steps_per_second": 9.4,
|
|
"step": 14800
|
|
},
|
|
{
|
|
"epoch": 2.496637525218561,
|
|
"grad_norm": 0.22243212163448334,
|
|
"learning_rate": 8.332866558374876e-07,
|
|
"loss": 0.046,
|
|
"step": 14850
|
|
},
|
|
{
|
|
"epoch": 2.5050437121721587,
|
|
"grad_norm": 0.09879665821790695,
|
|
"learning_rate": 8.323531059205735e-07,
|
|
"loss": 0.0334,
|
|
"step": 14900
|
|
},
|
|
{
|
|
"epoch": 2.5134498991257566,
|
|
"grad_norm": 0.11353790014982224,
|
|
"learning_rate": 8.314195560036594e-07,
|
|
"loss": 0.0349,
|
|
"step": 14950
|
|
},
|
|
{
|
|
"epoch": 2.5218560860793544,
|
|
"grad_norm": 0.0835421159863472,
|
|
"learning_rate": 8.304860060867455e-07,
|
|
"loss": 0.0373,
|
|
"step": 15000
|
|
},
|
|
{
|
|
"epoch": 2.5302622730329523,
|
|
"grad_norm": 1.0338134765625,
|
|
"learning_rate": 8.295524561698314e-07,
|
|
"loss": 0.0508,
|
|
"step": 15050
|
|
},
|
|
{
|
|
"epoch": 2.53866845998655,
|
|
"grad_norm": 0.06706652790307999,
|
|
"learning_rate": 8.286189062529173e-07,
|
|
"loss": 0.0413,
|
|
"step": 15100
|
|
},
|
|
{
|
|
"epoch": 2.547074646940148,
|
|
"grad_norm": 1.6149780750274658,
|
|
"learning_rate": 8.276853563360032e-07,
|
|
"loss": 0.0496,
|
|
"step": 15150
|
|
},
|
|
{
|
|
"epoch": 2.555480833893746,
|
|
"grad_norm": 1.550213098526001,
|
|
"learning_rate": 8.267518064190892e-07,
|
|
"loss": 0.0402,
|
|
"step": 15200
|
|
},
|
|
{
|
|
"epoch": 2.5638870208473437,
|
|
"grad_norm": 0.07133010029792786,
|
|
"learning_rate": 8.258182565021752e-07,
|
|
"loss": 0.0445,
|
|
"step": 15250
|
|
},
|
|
{
|
|
"epoch": 2.5722932078009415,
|
|
"grad_norm": 0.6747908592224121,
|
|
"learning_rate": 8.24884706585261e-07,
|
|
"loss": 0.0294,
|
|
"step": 15300
|
|
},
|
|
{
|
|
"epoch": 2.5806993947545394,
|
|
"grad_norm": 0.6118685007095337,
|
|
"learning_rate": 8.23951156668347e-07,
|
|
"loss": 0.0483,
|
|
"step": 15350
|
|
},
|
|
{
|
|
"epoch": 2.589105581708137,
|
|
"grad_norm": 0.09292344748973846,
|
|
"learning_rate": 8.230176067514329e-07,
|
|
"loss": 0.0438,
|
|
"step": 15400
|
|
},
|
|
{
|
|
"epoch": 2.597511768661735,
|
|
"grad_norm": 0.11092889308929443,
|
|
"learning_rate": 8.220840568345189e-07,
|
|
"loss": 0.0258,
|
|
"step": 15450
|
|
},
|
|
{
|
|
"epoch": 2.605917955615333,
|
|
"grad_norm": 0.08764071017503738,
|
|
"learning_rate": 8.211505069176049e-07,
|
|
"loss": 0.0413,
|
|
"step": 15500
|
|
},
|
|
{
|
|
"epoch": 2.6143241425689308,
|
|
"grad_norm": 2.178694248199463,
|
|
"learning_rate": 8.202169570006909e-07,
|
|
"loss": 0.0451,
|
|
"step": 15550
|
|
},
|
|
{
|
|
"epoch": 2.6227303295225286,
|
|
"grad_norm": 0.3444342315196991,
|
|
"learning_rate": 8.192834070837767e-07,
|
|
"loss": 0.0367,
|
|
"step": 15600
|
|
},
|
|
{
|
|
"epoch": 2.6311365164761265,
|
|
"grad_norm": 0.07718443125486374,
|
|
"learning_rate": 8.183498571668627e-07,
|
|
"loss": 0.0448,
|
|
"step": 15650
|
|
},
|
|
{
|
|
"epoch": 2.6395427034297243,
|
|
"grad_norm": 0.07826303690671921,
|
|
"learning_rate": 8.174163072499486e-07,
|
|
"loss": 0.032,
|
|
"step": 15700
|
|
},
|
|
{
|
|
"epoch": 2.647948890383322,
|
|
"grad_norm": 0.1438082605600357,
|
|
"learning_rate": 8.164827573330345e-07,
|
|
"loss": 0.0327,
|
|
"step": 15750
|
|
},
|
|
{
|
|
"epoch": 2.65635507733692,
|
|
"grad_norm": 0.10826120525598526,
|
|
"learning_rate": 8.155492074161206e-07,
|
|
"loss": 0.0353,
|
|
"step": 15800
|
|
},
|
|
{
|
|
"epoch": 2.664761264290518,
|
|
"grad_norm": 0.1938948631286621,
|
|
"learning_rate": 8.146156574992064e-07,
|
|
"loss": 0.043,
|
|
"step": 15850
|
|
},
|
|
{
|
|
"epoch": 2.6731674512441157,
|
|
"grad_norm": 0.17752999067306519,
|
|
"learning_rate": 8.136821075822924e-07,
|
|
"loss": 0.0353,
|
|
"step": 15900
|
|
},
|
|
{
|
|
"epoch": 2.6815736381977135,
|
|
"grad_norm": 0.35069605708122253,
|
|
"learning_rate": 8.127485576653783e-07,
|
|
"loss": 0.0319,
|
|
"step": 15950
|
|
},
|
|
{
|
|
"epoch": 2.6899798251513114,
|
|
"grad_norm": 2.336815357208252,
|
|
"learning_rate": 8.118150077484643e-07,
|
|
"loss": 0.0469,
|
|
"step": 16000
|
|
},
|
|
{
|
|
"epoch": 2.6983860121049092,
|
|
"grad_norm": 0.4466639757156372,
|
|
"learning_rate": 8.108814578315503e-07,
|
|
"loss": 0.0181,
|
|
"step": 16050
|
|
},
|
|
{
|
|
"epoch": 2.706792199058507,
|
|
"grad_norm": 0.8735927939414978,
|
|
"learning_rate": 8.099479079146362e-07,
|
|
"loss": 0.0483,
|
|
"step": 16100
|
|
},
|
|
{
|
|
"epoch": 2.715198386012105,
|
|
"grad_norm": 0.12205011397600174,
|
|
"learning_rate": 8.090143579977221e-07,
|
|
"loss": 0.0369,
|
|
"step": 16150
|
|
},
|
|
{
|
|
"epoch": 2.723604572965703,
|
|
"grad_norm": 0.057348959147930145,
|
|
"learning_rate": 8.08080808080808e-07,
|
|
"loss": 0.032,
|
|
"step": 16200
|
|
},
|
|
{
|
|
"epoch": 2.7320107599193006,
|
|
"grad_norm": 0.13915815949440002,
|
|
"learning_rate": 8.07147258163894e-07,
|
|
"loss": 0.0548,
|
|
"step": 16250
|
|
},
|
|
{
|
|
"epoch": 2.7404169468728985,
|
|
"grad_norm": 0.1121092364192009,
|
|
"learning_rate": 8.0621370824698e-07,
|
|
"loss": 0.0478,
|
|
"step": 16300
|
|
},
|
|
{
|
|
"epoch": 2.7488231338264963,
|
|
"grad_norm": 0.6932018995285034,
|
|
"learning_rate": 8.052801583300659e-07,
|
|
"loss": 0.0235,
|
|
"step": 16350
|
|
},
|
|
{
|
|
"epoch": 2.757229320780094,
|
|
"grad_norm": 0.2540469765663147,
|
|
"learning_rate": 8.043466084131518e-07,
|
|
"loss": 0.0343,
|
|
"step": 16400
|
|
},
|
|
{
|
|
"epoch": 2.765635507733692,
|
|
"grad_norm": 0.09147974848747253,
|
|
"learning_rate": 8.034130584962378e-07,
|
|
"loss": 0.0294,
|
|
"step": 16450
|
|
},
|
|
{
|
|
"epoch": 2.77404169468729,
|
|
"grad_norm": 0.055779941380023956,
|
|
"learning_rate": 8.024795085793237e-07,
|
|
"loss": 0.0306,
|
|
"step": 16500
|
|
},
|
|
{
|
|
"epoch": 2.7824478816408877,
|
|
"grad_norm": 0.17213129997253418,
|
|
"learning_rate": 8.015459586624095e-07,
|
|
"loss": 0.0403,
|
|
"step": 16550
|
|
},
|
|
{
|
|
"epoch": 2.7908540685944856,
|
|
"grad_norm": 0.25217875838279724,
|
|
"learning_rate": 8.006124087454956e-07,
|
|
"loss": 0.0427,
|
|
"step": 16600
|
|
},
|
|
{
|
|
"epoch": 2.7992602555480834,
|
|
"grad_norm": 0.2783985137939453,
|
|
"learning_rate": 7.996788588285815e-07,
|
|
"loss": 0.0348,
|
|
"step": 16650
|
|
},
|
|
{
|
|
"epoch": 2.8076664425016813,
|
|
"grad_norm": 2.88033390045166,
|
|
"learning_rate": 7.987453089116675e-07,
|
|
"loss": 0.0361,
|
|
"step": 16700
|
|
},
|
|
{
|
|
"epoch": 2.816072629455279,
|
|
"grad_norm": 0.35371074080467224,
|
|
"learning_rate": 7.978117589947534e-07,
|
|
"loss": 0.0426,
|
|
"step": 16750
|
|
},
|
|
{
|
|
"epoch": 2.824478816408877,
|
|
"grad_norm": 0.18328945338726044,
|
|
"learning_rate": 7.968782090778394e-07,
|
|
"loss": 0.0187,
|
|
"step": 16800
|
|
},
|
|
{
|
|
"epoch": 2.832885003362475,
|
|
"grad_norm": 0.04678039625287056,
|
|
"learning_rate": 7.959446591609253e-07,
|
|
"loss": 0.027,
|
|
"step": 16850
|
|
},
|
|
{
|
|
"epoch": 2.8412911903160727,
|
|
"grad_norm": 0.12014785408973694,
|
|
"learning_rate": 7.950111092440113e-07,
|
|
"loss": 0.0312,
|
|
"step": 16900
|
|
},
|
|
{
|
|
"epoch": 2.8496973772696705,
|
|
"grad_norm": 0.2731820046901703,
|
|
"learning_rate": 7.940775593270972e-07,
|
|
"loss": 0.0215,
|
|
"step": 16950
|
|
},
|
|
{
|
|
"epoch": 2.8581035642232684,
|
|
"grad_norm": 0.047622449696063995,
|
|
"learning_rate": 7.931440094101831e-07,
|
|
"loss": 0.0367,
|
|
"step": 17000
|
|
},
|
|
{
|
|
"epoch": 2.8665097511768662,
|
|
"grad_norm": 0.49464407563209534,
|
|
"learning_rate": 7.92210459493269e-07,
|
|
"loss": 0.0299,
|
|
"step": 17050
|
|
},
|
|
{
|
|
"epoch": 2.874915938130464,
|
|
"grad_norm": 1.0074212551116943,
|
|
"learning_rate": 7.91276909576355e-07,
|
|
"loss": 0.0309,
|
|
"step": 17100
|
|
},
|
|
{
|
|
"epoch": 2.883322125084062,
|
|
"grad_norm": 3.322622776031494,
|
|
"learning_rate": 7.90343359659441e-07,
|
|
"loss": 0.0387,
|
|
"step": 17150
|
|
},
|
|
{
|
|
"epoch": 2.89172831203766,
|
|
"grad_norm": 0.40377694368362427,
|
|
"learning_rate": 7.894098097425269e-07,
|
|
"loss": 0.0278,
|
|
"step": 17200
|
|
},
|
|
{
|
|
"epoch": 2.9001344989912576,
|
|
"grad_norm": 0.057058703154325485,
|
|
"learning_rate": 7.884762598256129e-07,
|
|
"loss": 0.0322,
|
|
"step": 17250
|
|
},
|
|
{
|
|
"epoch": 2.9085406859448555,
|
|
"grad_norm": 0.05330061540007591,
|
|
"learning_rate": 7.875427099086987e-07,
|
|
"loss": 0.0306,
|
|
"step": 17300
|
|
},
|
|
{
|
|
"epoch": 2.9169468728984533,
|
|
"grad_norm": 0.21193920075893402,
|
|
"learning_rate": 7.866091599917846e-07,
|
|
"loss": 0.0433,
|
|
"step": 17350
|
|
},
|
|
{
|
|
"epoch": 2.925353059852051,
|
|
"grad_norm": 0.16045591235160828,
|
|
"learning_rate": 7.856756100748707e-07,
|
|
"loss": 0.0284,
|
|
"step": 17400
|
|
},
|
|
{
|
|
"epoch": 2.933759246805649,
|
|
"grad_norm": 0.6420087814331055,
|
|
"learning_rate": 7.847420601579566e-07,
|
|
"loss": 0.0209,
|
|
"step": 17450
|
|
},
|
|
{
|
|
"epoch": 2.942165433759247,
|
|
"grad_norm": 0.0893048495054245,
|
|
"learning_rate": 7.838085102410426e-07,
|
|
"loss": 0.03,
|
|
"step": 17500
|
|
},
|
|
{
|
|
"epoch": 2.9505716207128447,
|
|
"grad_norm": 0.07634163647890091,
|
|
"learning_rate": 7.828749603241284e-07,
|
|
"loss": 0.0392,
|
|
"step": 17550
|
|
},
|
|
{
|
|
"epoch": 2.9589778076664426,
|
|
"grad_norm": 1.3213460445404053,
|
|
"learning_rate": 7.819414104072144e-07,
|
|
"loss": 0.0381,
|
|
"step": 17600
|
|
},
|
|
{
|
|
"epoch": 2.9673839946200404,
|
|
"grad_norm": 1.400268793106079,
|
|
"learning_rate": 7.810078604903004e-07,
|
|
"loss": 0.021,
|
|
"step": 17650
|
|
},
|
|
{
|
|
"epoch": 2.9757901815736383,
|
|
"grad_norm": 0.06816738098859787,
|
|
"learning_rate": 7.800743105733864e-07,
|
|
"loss": 0.0307,
|
|
"step": 17700
|
|
},
|
|
{
|
|
"epoch": 2.984196368527236,
|
|
"grad_norm": 1.563502311706543,
|
|
"learning_rate": 7.791407606564723e-07,
|
|
"loss": 0.0225,
|
|
"step": 17750
|
|
},
|
|
{
|
|
"epoch": 2.9858776059179557,
|
|
"eval_accuracy": 0.9904761904761905,
|
|
"eval_f1_macro": 0.9903524417560845,
|
|
"eval_loss": 0.02756033092737198,
|
|
"eval_precision": 0.9910546886131568,
|
|
"eval_recall": 0.9897223308061431,
|
|
"eval_runtime": 42.5014,
|
|
"eval_samples_per_second": 91.409,
|
|
"eval_steps_per_second": 11.435,
|
|
"step": 17760
|
|
},
|
|
{
|
|
"epoch": 2.992602555480834,
|
|
"grad_norm": 0.03866572305560112,
|
|
"learning_rate": 7.782072107395581e-07,
|
|
"loss": 0.0315,
|
|
"step": 17800
|
|
},
|
|
{
|
|
"epoch": 3.001008742434432,
|
|
"grad_norm": 0.14597171545028687,
|
|
"learning_rate": 7.772736608226441e-07,
|
|
"loss": 0.0337,
|
|
"step": 17850
|
|
},
|
|
{
|
|
"epoch": 3.0094149293880297,
|
|
"grad_norm": 0.047286901623010635,
|
|
"learning_rate": 7.7634011090573e-07,
|
|
"loss": 0.0345,
|
|
"step": 17900
|
|
},
|
|
{
|
|
"epoch": 3.0178211163416275,
|
|
"grad_norm": 0.0630001500248909,
|
|
"learning_rate": 7.754065609888161e-07,
|
|
"loss": 0.0369,
|
|
"step": 17950
|
|
},
|
|
{
|
|
"epoch": 3.0262273032952254,
|
|
"grad_norm": 0.1186026781797409,
|
|
"learning_rate": 7.74473011071902e-07,
|
|
"loss": 0.033,
|
|
"step": 18000
|
|
},
|
|
{
|
|
"epoch": 3.0346334902488232,
|
|
"grad_norm": 0.048053912818431854,
|
|
"learning_rate": 7.73539461154988e-07,
|
|
"loss": 0.025,
|
|
"step": 18050
|
|
},
|
|
{
|
|
"epoch": 3.043039677202421,
|
|
"grad_norm": 0.08836133778095245,
|
|
"learning_rate": 7.726059112380738e-07,
|
|
"loss": 0.0333,
|
|
"step": 18100
|
|
},
|
|
{
|
|
"epoch": 3.051445864156019,
|
|
"grad_norm": 0.12423136085271835,
|
|
"learning_rate": 7.716723613211598e-07,
|
|
"loss": 0.036,
|
|
"step": 18150
|
|
},
|
|
{
|
|
"epoch": 3.0598520511096168,
|
|
"grad_norm": 1.3503236770629883,
|
|
"learning_rate": 7.707388114042458e-07,
|
|
"loss": 0.032,
|
|
"step": 18200
|
|
},
|
|
{
|
|
"epoch": 3.0682582380632146,
|
|
"grad_norm": 1.4914406538009644,
|
|
"learning_rate": 7.698052614873317e-07,
|
|
"loss": 0.0194,
|
|
"step": 18250
|
|
},
|
|
{
|
|
"epoch": 3.0766644250168125,
|
|
"grad_norm": 0.1584930419921875,
|
|
"learning_rate": 7.688717115704177e-07,
|
|
"loss": 0.0352,
|
|
"step": 18300
|
|
},
|
|
{
|
|
"epoch": 3.0850706119704103,
|
|
"grad_norm": 4.718121528625488,
|
|
"learning_rate": 7.679381616535035e-07,
|
|
"loss": 0.0534,
|
|
"step": 18350
|
|
},
|
|
{
|
|
"epoch": 3.093476798924008,
|
|
"grad_norm": 0.11320281028747559,
|
|
"learning_rate": 7.670046117365895e-07,
|
|
"loss": 0.026,
|
|
"step": 18400
|
|
},
|
|
{
|
|
"epoch": 3.101882985877606,
|
|
"grad_norm": 4.923747539520264,
|
|
"learning_rate": 7.660710618196755e-07,
|
|
"loss": 0.0385,
|
|
"step": 18450
|
|
},
|
|
{
|
|
"epoch": 3.110289172831204,
|
|
"grad_norm": 0.5125885009765625,
|
|
"learning_rate": 7.651375119027615e-07,
|
|
"loss": 0.0333,
|
|
"step": 18500
|
|
},
|
|
{
|
|
"epoch": 3.1186953597848017,
|
|
"grad_norm": 1.254514455795288,
|
|
"learning_rate": 7.642039619858473e-07,
|
|
"loss": 0.0384,
|
|
"step": 18550
|
|
},
|
|
{
|
|
"epoch": 3.1271015467383996,
|
|
"grad_norm": 0.09596805274486542,
|
|
"learning_rate": 7.632704120689332e-07,
|
|
"loss": 0.0363,
|
|
"step": 18600
|
|
},
|
|
{
|
|
"epoch": 3.1355077336919974,
|
|
"grad_norm": 2.396228313446045,
|
|
"learning_rate": 7.623368621520192e-07,
|
|
"loss": 0.0274,
|
|
"step": 18650
|
|
},
|
|
{
|
|
"epoch": 3.1439139206455953,
|
|
"grad_norm": 0.04524122551083565,
|
|
"learning_rate": 7.614033122351051e-07,
|
|
"loss": 0.0374,
|
|
"step": 18700
|
|
},
|
|
{
|
|
"epoch": 3.152320107599193,
|
|
"grad_norm": 0.03596815839409828,
|
|
"learning_rate": 7.604697623181912e-07,
|
|
"loss": 0.0208,
|
|
"step": 18750
|
|
},
|
|
{
|
|
"epoch": 3.160726294552791,
|
|
"grad_norm": 0.04798245429992676,
|
|
"learning_rate": 7.59536212401277e-07,
|
|
"loss": 0.0426,
|
|
"step": 18800
|
|
},
|
|
{
|
|
"epoch": 3.169132481506389,
|
|
"grad_norm": 0.06811300665140152,
|
|
"learning_rate": 7.58602662484363e-07,
|
|
"loss": 0.0176,
|
|
"step": 18850
|
|
},
|
|
{
|
|
"epoch": 3.1775386684599867,
|
|
"grad_norm": 0.2457869052886963,
|
|
"learning_rate": 7.576691125674489e-07,
|
|
"loss": 0.0254,
|
|
"step": 18900
|
|
},
|
|
{
|
|
"epoch": 3.1859448554135845,
|
|
"grad_norm": 0.7892419695854187,
|
|
"learning_rate": 7.567355626505349e-07,
|
|
"loss": 0.0394,
|
|
"step": 18950
|
|
},
|
|
{
|
|
"epoch": 3.1943510423671824,
|
|
"grad_norm": 2.816732883453369,
|
|
"learning_rate": 7.558020127336209e-07,
|
|
"loss": 0.0394,
|
|
"step": 19000
|
|
},
|
|
{
|
|
"epoch": 3.20275722932078,
|
|
"grad_norm": 0.4110700786113739,
|
|
"learning_rate": 7.548684628167067e-07,
|
|
"loss": 0.0264,
|
|
"step": 19050
|
|
},
|
|
{
|
|
"epoch": 3.211163416274378,
|
|
"grad_norm": 1.199163794517517,
|
|
"learning_rate": 7.539349128997927e-07,
|
|
"loss": 0.0415,
|
|
"step": 19100
|
|
},
|
|
{
|
|
"epoch": 3.219569603227976,
|
|
"grad_norm": 0.17944256961345673,
|
|
"learning_rate": 7.530013629828786e-07,
|
|
"loss": 0.0236,
|
|
"step": 19150
|
|
},
|
|
{
|
|
"epoch": 3.2279757901815738,
|
|
"grad_norm": 2.5870165824890137,
|
|
"learning_rate": 7.520678130659646e-07,
|
|
"loss": 0.0253,
|
|
"step": 19200
|
|
},
|
|
{
|
|
"epoch": 3.2363819771351716,
|
|
"grad_norm": 1.1989127397537231,
|
|
"learning_rate": 7.511342631490506e-07,
|
|
"loss": 0.0339,
|
|
"step": 19250
|
|
},
|
|
{
|
|
"epoch": 3.2447881640887695,
|
|
"grad_norm": 2.1989920139312744,
|
|
"learning_rate": 7.502007132321366e-07,
|
|
"loss": 0.032,
|
|
"step": 19300
|
|
},
|
|
{
|
|
"epoch": 3.2531943510423673,
|
|
"grad_norm": 3.3766326904296875,
|
|
"learning_rate": 7.492671633152224e-07,
|
|
"loss": 0.0338,
|
|
"step": 19350
|
|
},
|
|
{
|
|
"epoch": 3.261600537995965,
|
|
"grad_norm": 2.154630184173584,
|
|
"learning_rate": 7.483336133983084e-07,
|
|
"loss": 0.0433,
|
|
"step": 19400
|
|
},
|
|
{
|
|
"epoch": 3.270006724949563,
|
|
"grad_norm": 0.17257541418075562,
|
|
"learning_rate": 7.474000634813943e-07,
|
|
"loss": 0.0241,
|
|
"step": 19450
|
|
},
|
|
{
|
|
"epoch": 3.278412911903161,
|
|
"grad_norm": 0.35731831192970276,
|
|
"learning_rate": 7.464665135644802e-07,
|
|
"loss": 0.0314,
|
|
"step": 19500
|
|
},
|
|
{
|
|
"epoch": 3.2868190988567587,
|
|
"grad_norm": 0.7590059041976929,
|
|
"learning_rate": 7.455329636475663e-07,
|
|
"loss": 0.0325,
|
|
"step": 19550
|
|
},
|
|
{
|
|
"epoch": 3.2952252858103566,
|
|
"grad_norm": 0.07474666833877563,
|
|
"learning_rate": 7.445994137306521e-07,
|
|
"loss": 0.0267,
|
|
"step": 19600
|
|
},
|
|
{
|
|
"epoch": 3.3036314727639544,
|
|
"grad_norm": 0.6764310002326965,
|
|
"learning_rate": 7.436658638137381e-07,
|
|
"loss": 0.0409,
|
|
"step": 19650
|
|
},
|
|
{
|
|
"epoch": 3.3120376597175523,
|
|
"grad_norm": 0.17073293030261993,
|
|
"learning_rate": 7.42732313896824e-07,
|
|
"loss": 0.0352,
|
|
"step": 19700
|
|
},
|
|
{
|
|
"epoch": 3.32044384667115,
|
|
"grad_norm": 0.4134461581707001,
|
|
"learning_rate": 7.4179876397991e-07,
|
|
"loss": 0.045,
|
|
"step": 19750
|
|
},
|
|
{
|
|
"epoch": 3.328850033624748,
|
|
"grad_norm": 0.05611399933695793,
|
|
"learning_rate": 7.40865214062996e-07,
|
|
"loss": 0.0395,
|
|
"step": 19800
|
|
},
|
|
{
|
|
"epoch": 3.337256220578346,
|
|
"grad_norm": 0.23517532646656036,
|
|
"learning_rate": 7.399316641460819e-07,
|
|
"loss": 0.032,
|
|
"step": 19850
|
|
},
|
|
{
|
|
"epoch": 3.3456624075319437,
|
|
"grad_norm": 1.1617560386657715,
|
|
"learning_rate": 7.389981142291678e-07,
|
|
"loss": 0.0255,
|
|
"step": 19900
|
|
},
|
|
{
|
|
"epoch": 3.3540685944855415,
|
|
"grad_norm": 1.7898571491241455,
|
|
"learning_rate": 7.380645643122537e-07,
|
|
"loss": 0.0396,
|
|
"step": 19950
|
|
},
|
|
{
|
|
"epoch": 3.3624747814391394,
|
|
"grad_norm": 0.30372563004493713,
|
|
"learning_rate": 7.371310143953397e-07,
|
|
"loss": 0.0273,
|
|
"step": 20000
|
|
},
|
|
{
|
|
"epoch": 3.370880968392737,
|
|
"grad_norm": 0.03882971778512001,
|
|
"learning_rate": 7.361974644784256e-07,
|
|
"loss": 0.0311,
|
|
"step": 20050
|
|
},
|
|
{
|
|
"epoch": 3.379287155346335,
|
|
"grad_norm": 0.027714738622307777,
|
|
"learning_rate": 7.352639145615116e-07,
|
|
"loss": 0.0233,
|
|
"step": 20100
|
|
},
|
|
{
|
|
"epoch": 3.387693342299933,
|
|
"grad_norm": 0.06837920099496841,
|
|
"learning_rate": 7.343303646445975e-07,
|
|
"loss": 0.0258,
|
|
"step": 20150
|
|
},
|
|
{
|
|
"epoch": 3.3960995292535308,
|
|
"grad_norm": 0.06077512726187706,
|
|
"learning_rate": 7.333968147276835e-07,
|
|
"loss": 0.0365,
|
|
"step": 20200
|
|
},
|
|
{
|
|
"epoch": 3.4045057162071286,
|
|
"grad_norm": 3.514616012573242,
|
|
"learning_rate": 7.324632648107694e-07,
|
|
"loss": 0.0353,
|
|
"step": 20250
|
|
},
|
|
{
|
|
"epoch": 3.4129119031607265,
|
|
"grad_norm": 0.13428473472595215,
|
|
"learning_rate": 7.315297148938552e-07,
|
|
"loss": 0.0283,
|
|
"step": 20300
|
|
},
|
|
{
|
|
"epoch": 3.4213180901143243,
|
|
"grad_norm": 0.05296952277421951,
|
|
"learning_rate": 7.305961649769413e-07,
|
|
"loss": 0.0328,
|
|
"step": 20350
|
|
},
|
|
{
|
|
"epoch": 3.429724277067922,
|
|
"grad_norm": 0.42664337158203125,
|
|
"learning_rate": 7.296626150600272e-07,
|
|
"loss": 0.0279,
|
|
"step": 20400
|
|
},
|
|
{
|
|
"epoch": 3.43813046402152,
|
|
"grad_norm": 0.5672262907028198,
|
|
"learning_rate": 7.287290651431132e-07,
|
|
"loss": 0.0333,
|
|
"step": 20450
|
|
},
|
|
{
|
|
"epoch": 3.446536650975118,
|
|
"grad_norm": 0.08966855704784393,
|
|
"learning_rate": 7.277955152261991e-07,
|
|
"loss": 0.0185,
|
|
"step": 20500
|
|
},
|
|
{
|
|
"epoch": 3.4549428379287157,
|
|
"grad_norm": 0.08157237619161606,
|
|
"learning_rate": 7.26861965309285e-07,
|
|
"loss": 0.0249,
|
|
"step": 20550
|
|
},
|
|
{
|
|
"epoch": 3.4633490248823136,
|
|
"grad_norm": 0.05281166359782219,
|
|
"learning_rate": 7.25928415392371e-07,
|
|
"loss": 0.0354,
|
|
"step": 20600
|
|
},
|
|
{
|
|
"epoch": 3.4717552118359114,
|
|
"grad_norm": 0.03640764206647873,
|
|
"learning_rate": 7.24994865475457e-07,
|
|
"loss": 0.0238,
|
|
"step": 20650
|
|
},
|
|
{
|
|
"epoch": 3.4801613987895093,
|
|
"grad_norm": 0.10095292329788208,
|
|
"learning_rate": 7.240613155585429e-07,
|
|
"loss": 0.0291,
|
|
"step": 20700
|
|
},
|
|
{
|
|
"epoch": 3.4835238735709484,
|
|
"eval_accuracy": 0.9922779922779923,
|
|
"eval_f1_macro": 0.9921853502169998,
|
|
"eval_loss": 0.0231217909604311,
|
|
"eval_precision": 0.9923553160253439,
|
|
"eval_recall": 0.9920202883023748,
|
|
"eval_runtime": 41.9983,
|
|
"eval_samples_per_second": 92.504,
|
|
"eval_steps_per_second": 11.572,
|
|
"step": 20720
|
|
}
|
|
],
|
|
"logging_steps": 50,
|
|
"max_steps": 59480,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 10,
|
|
"save_steps": 2960,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": false
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 3.433256024715952e+18,
|
|
"train_batch_size": 32,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|