|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9995119570522206, |
|
"eval_steps": 500, |
|
"global_step": 1024, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009760858955588092, |
|
"grad_norm": 23.035247556232232, |
|
"learning_rate": 9.70873786407767e-08, |
|
"loss": 1.305, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004880429477794046, |
|
"grad_norm": 20.719836042073574, |
|
"learning_rate": 4.854368932038835e-07, |
|
"loss": 1.3146, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.009760858955588092, |
|
"grad_norm": 9.316004282384458, |
|
"learning_rate": 9.70873786407767e-07, |
|
"loss": 1.2051, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014641288433382138, |
|
"grad_norm": 8.495855354939422, |
|
"learning_rate": 1.4563106796116506e-06, |
|
"loss": 1.0417, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019521717911176184, |
|
"grad_norm": 3.0384642525762553, |
|
"learning_rate": 1.941747572815534e-06, |
|
"loss": 0.9234, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02440214738897023, |
|
"grad_norm": 2.435241261554877, |
|
"learning_rate": 2.427184466019418e-06, |
|
"loss": 0.873, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.029282576866764276, |
|
"grad_norm": 2.2514226333854612, |
|
"learning_rate": 2.912621359223301e-06, |
|
"loss": 0.8389, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03416300634455832, |
|
"grad_norm": 2.2291597467335396, |
|
"learning_rate": 3.398058252427185e-06, |
|
"loss": 0.8266, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03904343582235237, |
|
"grad_norm": 2.3180250181670745, |
|
"learning_rate": 3.883495145631068e-06, |
|
"loss": 0.8014, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.043923865300146414, |
|
"grad_norm": 2.242281011137039, |
|
"learning_rate": 4.368932038834952e-06, |
|
"loss": 0.7868, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04880429477794046, |
|
"grad_norm": 2.265455274846197, |
|
"learning_rate": 4.854368932038836e-06, |
|
"loss": 0.7727, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.053684724255734506, |
|
"grad_norm": 2.513657587709498, |
|
"learning_rate": 5.3398058252427185e-06, |
|
"loss": 0.7657, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05856515373352855, |
|
"grad_norm": 2.5260021282408194, |
|
"learning_rate": 5.825242718446602e-06, |
|
"loss": 0.7469, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0634455832113226, |
|
"grad_norm": 2.37579367638948, |
|
"learning_rate": 6.310679611650487e-06, |
|
"loss": 0.7318, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06832601268911664, |
|
"grad_norm": 2.3867971053464965, |
|
"learning_rate": 6.79611650485437e-06, |
|
"loss": 0.7206, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07320644216691069, |
|
"grad_norm": 2.4935586404236156, |
|
"learning_rate": 7.2815533980582534e-06, |
|
"loss": 0.7054, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07808687164470474, |
|
"grad_norm": 2.294194399695781, |
|
"learning_rate": 7.766990291262136e-06, |
|
"loss": 0.7024, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08296730112249878, |
|
"grad_norm": 2.6104379893467917, |
|
"learning_rate": 8.25242718446602e-06, |
|
"loss": 0.6972, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08784773060029283, |
|
"grad_norm": 2.279295024137011, |
|
"learning_rate": 8.737864077669904e-06, |
|
"loss": 0.6811, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09272816007808687, |
|
"grad_norm": 2.1438545371438047, |
|
"learning_rate": 9.223300970873788e-06, |
|
"loss": 0.6929, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09760858955588092, |
|
"grad_norm": 2.0963423668390213, |
|
"learning_rate": 9.708737864077671e-06, |
|
"loss": 0.684, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10248901903367497, |
|
"grad_norm": 2.346237075039726, |
|
"learning_rate": 9.999883646674445e-06, |
|
"loss": 0.6836, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10736944851146901, |
|
"grad_norm": 2.079088926479871, |
|
"learning_rate": 9.998574733951775e-06, |
|
"loss": 0.6777, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11224987798926306, |
|
"grad_norm": 2.012614649795243, |
|
"learning_rate": 9.995811848851807e-06, |
|
"loss": 0.6796, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.1171303074670571, |
|
"grad_norm": 2.4597764317331383, |
|
"learning_rate": 9.991595795035352e-06, |
|
"loss": 0.6677, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12201073694485115, |
|
"grad_norm": 2.0190253268907354, |
|
"learning_rate": 9.985927798857143e-06, |
|
"loss": 0.6604, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1268911664226452, |
|
"grad_norm": 2.056019595630264, |
|
"learning_rate": 9.978809509009121e-06, |
|
"loss": 0.671, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.13177159590043924, |
|
"grad_norm": 1.9920289467971395, |
|
"learning_rate": 9.970242996040865e-06, |
|
"loss": 0.6619, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1366520253782333, |
|
"grad_norm": 2.427476265620817, |
|
"learning_rate": 9.960230751757318e-06, |
|
"loss": 0.653, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14153245485602733, |
|
"grad_norm": 2.138699924734492, |
|
"learning_rate": 9.948775688493974e-06, |
|
"loss": 0.6428, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.14641288433382138, |
|
"grad_norm": 2.2832062237916007, |
|
"learning_rate": 9.93588113826975e-06, |
|
"loss": 0.6556, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15129331381161543, |
|
"grad_norm": 2.330757441488492, |
|
"learning_rate": 9.921550851817774e-06, |
|
"loss": 0.6597, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.15617374328940947, |
|
"grad_norm": 2.028862838643975, |
|
"learning_rate": 9.905788997494377e-06, |
|
"loss": 0.6449, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16105417276720352, |
|
"grad_norm": 2.097525756087534, |
|
"learning_rate": 9.888600160066627e-06, |
|
"loss": 0.6469, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.16593460224499756, |
|
"grad_norm": 2.238936294256646, |
|
"learning_rate": 9.869989339378706e-06, |
|
"loss": 0.6345, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1708150317227916, |
|
"grad_norm": 2.3275889632266225, |
|
"learning_rate": 9.849961948897582e-06, |
|
"loss": 0.6361, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.17569546120058566, |
|
"grad_norm": 2.1824511863577096, |
|
"learning_rate": 9.828523814138344e-06, |
|
"loss": 0.6286, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.1805758906783797, |
|
"grad_norm": 1.9316765699674343, |
|
"learning_rate": 9.8056811709697e-06, |
|
"loss": 0.6312, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.18545632015617375, |
|
"grad_norm": 1.913481112213078, |
|
"learning_rate": 9.781440663800099e-06, |
|
"loss": 0.6252, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1903367496339678, |
|
"grad_norm": 2.126509994727766, |
|
"learning_rate": 9.755809343645021e-06, |
|
"loss": 0.6197, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.19521717911176184, |
|
"grad_norm": 2.0612294747506437, |
|
"learning_rate": 9.728794666076004e-06, |
|
"loss": 0.6095, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2000976085895559, |
|
"grad_norm": 1.957338282859279, |
|
"learning_rate": 9.700404489051974e-06, |
|
"loss": 0.6161, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.20497803806734993, |
|
"grad_norm": 2.0078755538466955, |
|
"learning_rate": 9.670647070633554e-06, |
|
"loss": 0.6091, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.20985846754514398, |
|
"grad_norm": 2.023099319748942, |
|
"learning_rate": 9.639531066580979e-06, |
|
"loss": 0.6085, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.21473889702293802, |
|
"grad_norm": 2.0118211094195773, |
|
"learning_rate": 9.607065527836324e-06, |
|
"loss": 0.5993, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.21961932650073207, |
|
"grad_norm": 2.1563303821214626, |
|
"learning_rate": 9.573259897890794e-06, |
|
"loss": 0.5958, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.22449975597852612, |
|
"grad_norm": 2.0414228158229792, |
|
"learning_rate": 9.538124010037832e-06, |
|
"loss": 0.5886, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.22938018545632016, |
|
"grad_norm": 2.0788427448618, |
|
"learning_rate": 9.501668084512827e-06, |
|
"loss": 0.5985, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.2342606149341142, |
|
"grad_norm": 2.1797519504265885, |
|
"learning_rate": 9.46390272552028e-06, |
|
"loss": 0.5928, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.23914104441190825, |
|
"grad_norm": 2.0188184151034383, |
|
"learning_rate": 9.424838918149285e-06, |
|
"loss": 0.5938, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.2440214738897023, |
|
"grad_norm": 2.010370103702029, |
|
"learning_rate": 9.384488025178214e-06, |
|
"loss": 0.5916, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.24890190336749635, |
|
"grad_norm": 2.0376138426044377, |
|
"learning_rate": 9.342861783769535e-06, |
|
"loss": 0.5977, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2537823328452904, |
|
"grad_norm": 1.97689791600861, |
|
"learning_rate": 9.29997230205575e-06, |
|
"loss": 0.585, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.25866276232308444, |
|
"grad_norm": 2.0361234487708573, |
|
"learning_rate": 9.2558320556174e-06, |
|
"loss": 0.5753, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2635431918008785, |
|
"grad_norm": 1.979410664780559, |
|
"learning_rate": 9.210453883854204e-06, |
|
"loss": 0.5898, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.26842362127867253, |
|
"grad_norm": 1.9815660852967445, |
|
"learning_rate": 9.163850986250375e-06, |
|
"loss": 0.5836, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2733040507564666, |
|
"grad_norm": 2.2880133483104625, |
|
"learning_rate": 9.11603691853518e-06, |
|
"loss": 0.5663, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2781844802342606, |
|
"grad_norm": 1.9874397029111008, |
|
"learning_rate": 9.067025588739889e-06, |
|
"loss": 0.5658, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.28306490971205467, |
|
"grad_norm": 1.9026409986400121, |
|
"learning_rate": 9.016831253152244e-06, |
|
"loss": 0.5689, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2879453391898487, |
|
"grad_norm": 2.241058211758017, |
|
"learning_rate": 8.96546851216962e-06, |
|
"loss": 0.5557, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.29282576866764276, |
|
"grad_norm": 1.9994798416425632, |
|
"learning_rate": 8.912952306052109e-06, |
|
"loss": 0.5628, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2977061981454368, |
|
"grad_norm": 1.9657323697266753, |
|
"learning_rate": 8.859297910576732e-06, |
|
"loss": 0.5556, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.30258662762323085, |
|
"grad_norm": 2.275704776988123, |
|
"learning_rate": 8.804520932594061e-06, |
|
"loss": 0.547, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3074670571010249, |
|
"grad_norm": 2.02689211581186, |
|
"learning_rate": 8.748637305488537e-06, |
|
"loss": 0.5481, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.31234748657881894, |
|
"grad_norm": 1.9315973089401948, |
|
"learning_rate": 8.691663284543812e-06, |
|
"loss": 0.5489, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.317227916056613, |
|
"grad_norm": 1.9190728935534196, |
|
"learning_rate": 8.633615442214452e-06, |
|
"loss": 0.5476, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.32210834553440704, |
|
"grad_norm": 2.326687095328215, |
|
"learning_rate": 8.574510663305388e-06, |
|
"loss": 0.535, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3269887750122011, |
|
"grad_norm": 2.0644871195721652, |
|
"learning_rate": 8.514366140060504e-06, |
|
"loss": 0.5503, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.33186920448999513, |
|
"grad_norm": 2.1583645554795226, |
|
"learning_rate": 8.453199367161804e-06, |
|
"loss": 0.5323, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3367496339677892, |
|
"grad_norm": 2.108056214865082, |
|
"learning_rate": 8.391028136640604e-06, |
|
"loss": 0.5268, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.3416300634455832, |
|
"grad_norm": 1.9847999461871777, |
|
"learning_rate": 8.32787053270223e-06, |
|
"loss": 0.5234, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.34651049292337727, |
|
"grad_norm": 2.032846767263459, |
|
"learning_rate": 8.263744926465744e-06, |
|
"loss": 0.5389, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3513909224011713, |
|
"grad_norm": 2.066277009423882, |
|
"learning_rate": 8.198669970620177e-06, |
|
"loss": 0.5194, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.35627135187896536, |
|
"grad_norm": 2.4408622009476364, |
|
"learning_rate": 8.13266459399891e-06, |
|
"loss": 0.5279, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.3611517813567594, |
|
"grad_norm": 2.0939994519934997, |
|
"learning_rate": 8.065747996073681e-06, |
|
"loss": 0.5176, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.36603221083455345, |
|
"grad_norm": 2.1186198571951773, |
|
"learning_rate": 7.997939641369909e-06, |
|
"loss": 0.5132, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3709126403123475, |
|
"grad_norm": 1.894201627794718, |
|
"learning_rate": 7.929259253804903e-06, |
|
"loss": 0.521, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.37579306979014154, |
|
"grad_norm": 2.083317144453098, |
|
"learning_rate": 7.859726810950606e-06, |
|
"loss": 0.5084, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3806734992679356, |
|
"grad_norm": 1.8916170318680645, |
|
"learning_rate": 7.789362538222585e-06, |
|
"loss": 0.5127, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.38555392874572963, |
|
"grad_norm": 2.208287429807416, |
|
"learning_rate": 7.718186902996912e-06, |
|
"loss": 0.5082, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.3904343582235237, |
|
"grad_norm": 1.9376050467874282, |
|
"learning_rate": 7.646220608656662e-06, |
|
"loss": 0.5033, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3953147877013177, |
|
"grad_norm": 1.9494765756044568, |
|
"learning_rate": 7.573484588569775e-06, |
|
"loss": 0.5083, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.4001952171791118, |
|
"grad_norm": 1.9102990648958087, |
|
"learning_rate": 7.500000000000001e-06, |
|
"loss": 0.5063, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.4050756466569058, |
|
"grad_norm": 1.997919784615126, |
|
"learning_rate": 7.425788217952744e-06, |
|
"loss": 0.494, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.40995607613469986, |
|
"grad_norm": 2.080884858986304, |
|
"learning_rate": 7.350870828957547e-06, |
|
"loss": 0.4985, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4148365056124939, |
|
"grad_norm": 1.9539843013372942, |
|
"learning_rate": 7.27526962478906e-06, |
|
"loss": 0.4887, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.41971693509028796, |
|
"grad_norm": 2.072970721277029, |
|
"learning_rate": 7.1990065961283075e-06, |
|
"loss": 0.497, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.424597364568082, |
|
"grad_norm": 1.9942120864124357, |
|
"learning_rate": 7.122103926166096e-06, |
|
"loss": 0.4841, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.42947779404587605, |
|
"grad_norm": 1.9812280986457487, |
|
"learning_rate": 7.044583984150425e-06, |
|
"loss": 0.4762, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4343582235236701, |
|
"grad_norm": 2.0123859041578678, |
|
"learning_rate": 6.9664693188797776e-06, |
|
"loss": 0.478, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.43923865300146414, |
|
"grad_norm": 1.8650862463799345, |
|
"learning_rate": 6.887782652144186e-06, |
|
"loss": 0.4784, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4441190824792582, |
|
"grad_norm": 1.9432343558958571, |
|
"learning_rate": 6.808546872115976e-06, |
|
"loss": 0.4727, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.44899951195705223, |
|
"grad_norm": 1.9279715713548145, |
|
"learning_rate": 6.728785026692113e-06, |
|
"loss": 0.4753, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4538799414348463, |
|
"grad_norm": 1.954814041702271, |
|
"learning_rate": 6.648520316790102e-06, |
|
"loss": 0.4561, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4587603709126403, |
|
"grad_norm": 1.8912197483130344, |
|
"learning_rate": 6.567776089599339e-06, |
|
"loss": 0.4712, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.46364080039043437, |
|
"grad_norm": 1.8511135512437755, |
|
"learning_rate": 6.486575831789974e-06, |
|
"loss": 0.4564, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.4685212298682284, |
|
"grad_norm": 2.0169192033711063, |
|
"learning_rate": 6.404943162681144e-06, |
|
"loss": 0.4612, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.47340165934602246, |
|
"grad_norm": 1.9398646592145217, |
|
"learning_rate": 6.322901827370659e-06, |
|
"loss": 0.4622, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.4782820888238165, |
|
"grad_norm": 1.9832620948402848, |
|
"learning_rate": 6.240475689828087e-06, |
|
"loss": 0.4589, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.48316251830161056, |
|
"grad_norm": 2.0795997220341325, |
|
"learning_rate": 6.1576887259532695e-06, |
|
"loss": 0.447, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.4880429477794046, |
|
"grad_norm": 1.8116820288469397, |
|
"learning_rate": 6.074565016602263e-06, |
|
"loss": 0.4519, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.49292337725719865, |
|
"grad_norm": 1.943068708736876, |
|
"learning_rate": 5.991128740582774e-06, |
|
"loss": 0.453, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.4978038067349927, |
|
"grad_norm": 2.0127161873905948, |
|
"learning_rate": 5.907404167621087e-06, |
|
"loss": 0.4551, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5026842362127867, |
|
"grad_norm": 1.9262937498205195, |
|
"learning_rate": 5.823415651302545e-06, |
|
"loss": 0.4526, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.5075646656905808, |
|
"grad_norm": 1.890004605269585, |
|
"learning_rate": 5.739187621987649e-06, |
|
"loss": 0.4471, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5124450951683748, |
|
"grad_norm": 1.986812970174512, |
|
"learning_rate": 5.654744579705815e-06, |
|
"loss": 0.4318, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.5173255246461689, |
|
"grad_norm": 2.074431055050664, |
|
"learning_rate": 5.570111087028868e-06, |
|
"loss": 0.4449, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5222059541239629, |
|
"grad_norm": 1.8815049244771753, |
|
"learning_rate": 5.4853117619263496e-06, |
|
"loss": 0.4407, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.527086383601757, |
|
"grad_norm": 1.9186809916757779, |
|
"learning_rate": 5.4003712706047055e-06, |
|
"loss": 0.4457, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.531966813079551, |
|
"grad_norm": 1.9444758137939766, |
|
"learning_rate": 5.315314320332438e-06, |
|
"loss": 0.4343, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5368472425573451, |
|
"grad_norm": 2.1154813361539424, |
|
"learning_rate": 5.230165652253329e-06, |
|
"loss": 0.4301, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.541727672035139, |
|
"grad_norm": 1.8462242497921442, |
|
"learning_rate": 5.144950034189798e-06, |
|
"loss": 0.4348, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5466081015129332, |
|
"grad_norm": 1.8891533773792688, |
|
"learning_rate": 5.059692253438495e-06, |
|
"loss": 0.4238, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5514885309907271, |
|
"grad_norm": 1.8943839327223533, |
|
"learning_rate": 4.97441710956025e-06, |
|
"loss": 0.4278, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.5563689604685212, |
|
"grad_norm": 1.8842523650803675, |
|
"learning_rate": 4.8891494071664315e-06, |
|
"loss": 0.4381, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5612493899463152, |
|
"grad_norm": 1.8611349476498642, |
|
"learning_rate": 4.803913948703845e-06, |
|
"loss": 0.4057, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5661298194241093, |
|
"grad_norm": 2.250944612674544, |
|
"learning_rate": 4.71873552724027e-06, |
|
"loss": 0.4102, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5710102489019033, |
|
"grad_norm": 1.846182706672498, |
|
"learning_rate": 4.633638919252712e-06, |
|
"loss": 0.4123, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5758906783796974, |
|
"grad_norm": 1.9576170198699478, |
|
"learning_rate": 4.548648877420481e-06, |
|
"loss": 0.4126, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5807711078574914, |
|
"grad_norm": 2.0956436020765166, |
|
"learning_rate": 4.463790123425209e-06, |
|
"loss": 0.4059, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5856515373352855, |
|
"grad_norm": 1.957295844436941, |
|
"learning_rate": 4.379087340759861e-06, |
|
"loss": 0.416, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5905319668130795, |
|
"grad_norm": 1.9288351907184336, |
|
"learning_rate": 4.294565167548866e-06, |
|
"loss": 0.4088, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5954123962908736, |
|
"grad_norm": 1.8695034228231788, |
|
"learning_rate": 4.2102481893814504e-06, |
|
"loss": 0.4021, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6002928257686676, |
|
"grad_norm": 1.932666429763739, |
|
"learning_rate": 4.1261609321602406e-06, |
|
"loss": 0.4066, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.6051732552464617, |
|
"grad_norm": 1.863599655356967, |
|
"learning_rate": 4.042327854967231e-06, |
|
"loss": 0.4029, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6100536847242557, |
|
"grad_norm": 1.9017066401572869, |
|
"learning_rate": 3.958773342949196e-06, |
|
"loss": 0.4014, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.6149341142020498, |
|
"grad_norm": 1.8354296840477746, |
|
"learning_rate": 3.875521700224598e-06, |
|
"loss": 0.3996, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6198145436798438, |
|
"grad_norm": 1.9012134040742121, |
|
"learning_rate": 3.7925971428140655e-06, |
|
"loss": 0.3963, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.6246949731576379, |
|
"grad_norm": 1.8307728966391115, |
|
"learning_rate": 3.71002379159651e-06, |
|
"loss": 0.3815, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6295754026354319, |
|
"grad_norm": 1.8463068672322085, |
|
"learning_rate": 3.627825665292899e-06, |
|
"loss": 0.3866, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.634455832113226, |
|
"grad_norm": 1.8799977224296363, |
|
"learning_rate": 3.546026673479755e-06, |
|
"loss": 0.3862, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.63933626159102, |
|
"grad_norm": 1.9499227663945158, |
|
"learning_rate": 3.464650609634403e-06, |
|
"loss": 0.3834, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6442166910688141, |
|
"grad_norm": 2.085562927435089, |
|
"learning_rate": 3.383721144213985e-06, |
|
"loss": 0.3885, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6490971205466081, |
|
"grad_norm": 1.8062059990982582, |
|
"learning_rate": 3.3032618177702546e-06, |
|
"loss": 0.3882, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6539775500244022, |
|
"grad_norm": 1.8689683106850827, |
|
"learning_rate": 3.2232960341021703e-06, |
|
"loss": 0.3836, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6588579795021962, |
|
"grad_norm": 1.7607073017774915, |
|
"learning_rate": 3.1438470534482547e-06, |
|
"loss": 0.3864, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.6637384089799903, |
|
"grad_norm": 1.8221640618653892, |
|
"learning_rate": 3.064937985720717e-06, |
|
"loss": 0.3851, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6686188384577842, |
|
"grad_norm": 1.7866065439568548, |
|
"learning_rate": 2.9865917837833025e-06, |
|
"loss": 0.3791, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6734992679355783, |
|
"grad_norm": 1.854981360240035, |
|
"learning_rate": 2.9088312367748257e-06, |
|
"loss": 0.3767, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6783796974133723, |
|
"grad_norm": 1.8356520705100887, |
|
"learning_rate": 2.8316789634803223e-06, |
|
"loss": 0.3791, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6832601268911664, |
|
"grad_norm": 1.9624888900737716, |
|
"learning_rate": 2.75515740575176e-06, |
|
"loss": 0.3754, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6881405563689604, |
|
"grad_norm": 1.8570794747458175, |
|
"learning_rate": 2.6792888219802017e-06, |
|
"loss": 0.3732, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6930209858467545, |
|
"grad_norm": 1.8773607924105495, |
|
"learning_rate": 2.604095280621354e-06, |
|
"loss": 0.3641, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6979014153245485, |
|
"grad_norm": 1.9091813518590455, |
|
"learning_rate": 2.529598653776349e-06, |
|
"loss": 0.3683, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.7027818448023426, |
|
"grad_norm": 1.8992343259387405, |
|
"learning_rate": 2.4558206108296394e-06, |
|
"loss": 0.3701, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7076622742801366, |
|
"grad_norm": 1.8563794906563806, |
|
"learning_rate": 2.3827826121458713e-06, |
|
"loss": 0.3725, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.7125427037579307, |
|
"grad_norm": 1.7665178047594396, |
|
"learning_rate": 2.3105059028275467e-06, |
|
"loss": 0.3704, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7174231332357247, |
|
"grad_norm": 1.8625985682622697, |
|
"learning_rate": 2.2390115065352974e-06, |
|
"loss": 0.3695, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.7223035627135188, |
|
"grad_norm": 1.9454683490909128, |
|
"learning_rate": 2.16832021937259e-06, |
|
"loss": 0.3728, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7271839921913128, |
|
"grad_norm": 1.8998464205788768, |
|
"learning_rate": 2.0984526038366005e-06, |
|
"loss": 0.3584, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.7320644216691069, |
|
"grad_norm": 1.7833634293472909, |
|
"learning_rate": 2.0294289828370506e-06, |
|
"loss": 0.3655, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7369448511469009, |
|
"grad_norm": 1.7357199941063493, |
|
"learning_rate": 1.9612694337847334e-06, |
|
"loss": 0.3607, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.741825280624695, |
|
"grad_norm": 1.822672774536763, |
|
"learning_rate": 1.8939937827514509e-06, |
|
"loss": 0.3558, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.746705710102489, |
|
"grad_norm": 1.8562189172640455, |
|
"learning_rate": 1.8276215987030489e-06, |
|
"loss": 0.3627, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7515861395802831, |
|
"grad_norm": 1.778360536639837, |
|
"learning_rate": 1.7621721878072601e-06, |
|
"loss": 0.3565, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7564665690580771, |
|
"grad_norm": 1.7353609171885467, |
|
"learning_rate": 1.6976645878179677e-06, |
|
"loss": 0.3564, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.7613469985358712, |
|
"grad_norm": 1.724352647878197, |
|
"learning_rate": 1.6341175625375554e-06, |
|
"loss": 0.3542, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7662274280136652, |
|
"grad_norm": 1.702190896366777, |
|
"learning_rate": 1.5715495963589434e-06, |
|
"loss": 0.359, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7711078574914593, |
|
"grad_norm": 1.791687835303583, |
|
"learning_rate": 1.509978888888894e-06, |
|
"loss": 0.3505, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7759882869692533, |
|
"grad_norm": 1.9174372654606664, |
|
"learning_rate": 1.4494233496541548e-06, |
|
"loss": 0.3523, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7808687164470474, |
|
"grad_norm": 1.90072457106355, |
|
"learning_rate": 1.3899005928919901e-06, |
|
"loss": 0.3546, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7857491459248414, |
|
"grad_norm": 1.657112918762303, |
|
"learning_rate": 1.3314279324265922e-06, |
|
"loss": 0.3539, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7906295754026355, |
|
"grad_norm": 1.749712238075224, |
|
"learning_rate": 1.2740223766328813e-06, |
|
"loss": 0.3414, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7955100048804294, |
|
"grad_norm": 1.9679950016790901, |
|
"learning_rate": 1.2177006234891548e-06, |
|
"loss": 0.3506, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.8003904343582235, |
|
"grad_norm": 2.1432874187201594, |
|
"learning_rate": 1.1624790557200255e-06, |
|
"loss": 0.3444, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.8052708638360175, |
|
"grad_norm": 1.8338877374043903, |
|
"learning_rate": 1.1083737360310487e-06, |
|
"loss": 0.3451, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.8101512933138116, |
|
"grad_norm": 1.7170366478986214, |
|
"learning_rate": 1.0554004024364573e-06, |
|
"loss": 0.3399, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.8150317227916056, |
|
"grad_norm": 1.680152272001201, |
|
"learning_rate": 1.0035744636813188e-06, |
|
"loss": 0.3351, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.8199121522693997, |
|
"grad_norm": 1.9673664630690946, |
|
"learning_rate": 9.529109947594834e-07, |
|
"loss": 0.34, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8247925817471937, |
|
"grad_norm": 1.7733447956263555, |
|
"learning_rate": 9.034247325286122e-07, |
|
"loss": 0.3334, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.8296730112249878, |
|
"grad_norm": 1.6571893614203954, |
|
"learning_rate": 8.551300714235494e-07, |
|
"loss": 0.3401, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8345534407027818, |
|
"grad_norm": 1.6136229922890815, |
|
"learning_rate": 8.080410592693183e-07, |
|
"loss": 0.3342, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.8394338701805759, |
|
"grad_norm": 1.8811832308333971, |
|
"learning_rate": 7.621713931949181e-07, |
|
"loss": 0.3466, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8443142996583699, |
|
"grad_norm": 1.690925430698955, |
|
"learning_rate": 7.175344156491432e-07, |
|
"loss": 0.3402, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.849194729136164, |
|
"grad_norm": 1.7672934144690045, |
|
"learning_rate": 6.741431105195623e-07, |
|
"loss": 0.3421, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.854075158613958, |
|
"grad_norm": 1.6317452879722867, |
|
"learning_rate": 6.32010099355806e-07, |
|
"loss": 0.3387, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8589555880917521, |
|
"grad_norm": 1.6773249666989778, |
|
"learning_rate": 5.911476376982333e-07, |
|
"loss": 0.3291, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8638360175695461, |
|
"grad_norm": 1.8539252747080448, |
|
"learning_rate": 5.515676115130819e-07, |
|
"loss": 0.3439, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.8687164470473402, |
|
"grad_norm": 1.6252654529292938, |
|
"learning_rate": 5.132815337351038e-07, |
|
"loss": 0.3333, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8735968765251342, |
|
"grad_norm": 1.7572937209892616, |
|
"learning_rate": 4.763005409187155e-07, |
|
"loss": 0.3346, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8784773060029283, |
|
"grad_norm": 1.6547399559546787, |
|
"learning_rate": 4.406353899986221e-07, |
|
"loss": 0.3293, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8833577354807223, |
|
"grad_norm": 1.7444267110302711, |
|
"learning_rate": 4.06296455160875e-07, |
|
"loss": 0.3384, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8882381649585164, |
|
"grad_norm": 1.6099424270875666, |
|
"learning_rate": 3.732937248252472e-07, |
|
"loss": 0.3335, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8931185944363104, |
|
"grad_norm": 1.7062101914063132, |
|
"learning_rate": 3.416367987398345e-07, |
|
"loss": 0.3346, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8979990239141045, |
|
"grad_norm": 1.611954806755092, |
|
"learning_rate": 3.113348851887038e-07, |
|
"loss": 0.3288, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9028794533918985, |
|
"grad_norm": 1.7251802193314405, |
|
"learning_rate": 2.8239679831341126e-07, |
|
"loss": 0.3299, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.9077598828696926, |
|
"grad_norm": 1.6429002170577425, |
|
"learning_rate": 2.548309555491674e-07, |
|
"loss": 0.3381, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9126403123474865, |
|
"grad_norm": 1.9456711393571784, |
|
"learning_rate": 2.2864537517639618e-07, |
|
"loss": 0.3336, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.9175207418252807, |
|
"grad_norm": 1.6656697539121814, |
|
"learning_rate": 2.038476739883982e-07, |
|
"loss": 0.3319, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9224011713030746, |
|
"grad_norm": 1.6161650401186363, |
|
"learning_rate": 1.804450650757972e-07, |
|
"loss": 0.3268, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.9272816007808687, |
|
"grad_norm": 1.7642679481071266, |
|
"learning_rate": 1.5844435572841544e-07, |
|
"loss": 0.3325, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9321620302586627, |
|
"grad_norm": 1.6788640143992795, |
|
"learning_rate": 1.3785194545518965e-07, |
|
"loss": 0.3209, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.9370424597364568, |
|
"grad_norm": 1.6412136858855984, |
|
"learning_rate": 1.1867382412269257e-07, |
|
"loss": 0.3279, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.9419228892142508, |
|
"grad_norm": 1.660638704825638, |
|
"learning_rate": 1.0091557021282283e-07, |
|
"loss": 0.3246, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9468033186920449, |
|
"grad_norm": 1.676587421925541, |
|
"learning_rate": 8.458234920014685e-08, |
|
"loss": 0.3319, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9516837481698389, |
|
"grad_norm": 1.6537486653683027, |
|
"learning_rate": 6.967891204937737e-08, |
|
"loss": 0.3358, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.956564177647633, |
|
"grad_norm": 1.6606920687073268, |
|
"learning_rate": 5.620959383343061e-08, |
|
"loss": 0.3252, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.961444607125427, |
|
"grad_norm": 1.6627144649185686, |
|
"learning_rate": 4.417831247244819e-08, |
|
"loss": 0.3277, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.9663250366032211, |
|
"grad_norm": 1.624001266110359, |
|
"learning_rate": 3.3588567594161625e-08, |
|
"loss": 0.3192, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9712054660810151, |
|
"grad_norm": 1.672812217323314, |
|
"learning_rate": 2.4443439515933754e-08, |
|
"loss": 0.3281, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9760858955588092, |
|
"grad_norm": 1.6582644783125504, |
|
"learning_rate": 1.6745588348758836e-08, |
|
"loss": 0.3276, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9809663250366032, |
|
"grad_norm": 1.7011718232749053, |
|
"learning_rate": 1.0497253223502035e-08, |
|
"loss": 0.3202, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9858467545143973, |
|
"grad_norm": 1.6469486959612, |
|
"learning_rate": 5.700251639581544e-09, |
|
"loss": 0.3212, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9907271839921913, |
|
"grad_norm": 1.6927823132524529, |
|
"learning_rate": 2.355978936303127e-09, |
|
"loss": 0.3298, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9956076134699854, |
|
"grad_norm": 1.6684894053484163, |
|
"learning_rate": 4.6540788698534735e-10, |
|
"loss": 0.3303, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9995119570522206, |
|
"eval_loss": 0.3325524628162384, |
|
"eval_runtime": 96.5862, |
|
"eval_samples_per_second": 3.127, |
|
"eval_steps_per_second": 0.787, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.9995119570522206, |
|
"step": 1024, |
|
"total_flos": 214352422502400.0, |
|
"train_loss": 0.0, |
|
"train_runtime": 0.0086, |
|
"train_samples_per_second": 3817651.704, |
|
"train_steps_per_second": 119261.58 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 1024, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 214352422502400.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|