|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9915764139590855, |
|
"eval_steps": 25, |
|
"global_step": 103, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009626955475330927, |
|
"grad_norm": 76.00764465332031, |
|
"learning_rate": 9.997674418116759e-06, |
|
"loss": 10.061, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.019253910950661854, |
|
"grad_norm": 68.18009185791016, |
|
"learning_rate": 9.99069983579947e-06, |
|
"loss": 9.9795, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02888086642599278, |
|
"grad_norm": 46.29243850708008, |
|
"learning_rate": 9.979082741033047e-06, |
|
"loss": 9.5387, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.03850782190132371, |
|
"grad_norm": Infinity, |
|
"learning_rate": 9.979082741033047e-06, |
|
"loss": 9.6956, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.048134777376654635, |
|
"grad_norm": 55.74219512939453, |
|
"learning_rate": 9.96283394041954e-06, |
|
"loss": 9.7291, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.05776173285198556, |
|
"grad_norm": Infinity, |
|
"learning_rate": 9.96283394041954e-06, |
|
"loss": 9.7559, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.06738868832731648, |
|
"grad_norm": 74.81196594238281, |
|
"learning_rate": 9.941968549125481e-06, |
|
"loss": 9.8749, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.07701564380264742, |
|
"grad_norm": 93.73626708984375, |
|
"learning_rate": 9.916505976821262e-06, |
|
"loss": 9.9976, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.08664259927797834, |
|
"grad_norm": 66.88408660888672, |
|
"learning_rate": 9.886469909625624e-06, |
|
"loss": 9.8316, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.09626955475330927, |
|
"grad_norm": 51.5136833190918, |
|
"learning_rate": 9.851888288072053e-06, |
|
"loss": 9.653, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.10589651022864019, |
|
"grad_norm": 58.80715560913086, |
|
"learning_rate": 9.81279328111758e-06, |
|
"loss": 9.6294, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.11552346570397112, |
|
"grad_norm": 49.53478240966797, |
|
"learning_rate": 9.769221256218165e-06, |
|
"loss": 9.5226, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.12515042117930206, |
|
"grad_norm": 50.32080841064453, |
|
"learning_rate": 9.721212745498493e-06, |
|
"loss": 9.4886, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.13477737665463296, |
|
"grad_norm": 72.49040222167969, |
|
"learning_rate": 9.66881240804768e-06, |
|
"loss": 9.7044, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.1444043321299639, |
|
"grad_norm": 65.08924865722656, |
|
"learning_rate": 9.612068988375898e-06, |
|
"loss": 9.596, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.15403128760529483, |
|
"grad_norm": 51.38800811767578, |
|
"learning_rate": 9.551035271070665e-06, |
|
"loss": 9.4843, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.16365824308062576, |
|
"grad_norm": 65.1738510131836, |
|
"learning_rate": 9.485768031694872e-06, |
|
"loss": 9.5644, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.17328519855595667, |
|
"grad_norm": 71.97135162353516, |
|
"learning_rate": 9.416327983972304e-06, |
|
"loss": 9.574, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1829121540312876, |
|
"grad_norm": 61.20840835571289, |
|
"learning_rate": 9.342779723309746e-06, |
|
"loss": 9.4448, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.19253910950661854, |
|
"grad_norm": 53.58602523803711, |
|
"learning_rate": 9.26519166670821e-06, |
|
"loss": 9.3692, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.20216606498194944, |
|
"grad_norm": 46.41659927368164, |
|
"learning_rate": 9.183635989119211e-06, |
|
"loss": 9.3402, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.21179302045728038, |
|
"grad_norm": 57.2279052734375, |
|
"learning_rate": 9.098188556305262e-06, |
|
"loss": 9.3307, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.2214199759326113, |
|
"grad_norm": 69.78079223632812, |
|
"learning_rate": 9.008928854267054e-06, |
|
"loss": 9.3439, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.23104693140794225, |
|
"grad_norm": 98.2174301147461, |
|
"learning_rate": 8.91593991530297e-06, |
|
"loss": 9.3648, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.24067388688327315, |
|
"grad_norm": 62.788814544677734, |
|
"learning_rate": 8.819308240769726e-06, |
|
"loss": 9.2964, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.24067388688327315, |
|
"eval_clap": 0.23925480246543884, |
|
"eval_loss": 3.6824278831481934, |
|
"eval_runtime": 551.8966, |
|
"eval_samples_per_second": 0.058, |
|
"eval_steps_per_second": 0.058, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.2503008423586041, |
|
"grad_norm": 67.70923614501953, |
|
"learning_rate": 8.71912372061598e-06, |
|
"loss": 9.2203, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.259927797833935, |
|
"grad_norm": 55.661197662353516, |
|
"learning_rate": 8.615479549763756e-06, |
|
"loss": 9.1849, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.2695547533092659, |
|
"grad_norm": 50.55519104003906, |
|
"learning_rate": 8.508472141415468e-06, |
|
"loss": 9.1337, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.27918170878459686, |
|
"grad_norm": 44.694305419921875, |
|
"learning_rate": 8.398201037367202e-06, |
|
"loss": 9.1279, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2888086642599278, |
|
"grad_norm": 45.560523986816406, |
|
"learning_rate": 8.284768815411693e-06, |
|
"loss": 9.1314, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.29843561973525873, |
|
"grad_norm": 49.15139389038086, |
|
"learning_rate": 8.168280993917078e-06, |
|
"loss": 9.0014, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.30806257521058966, |
|
"grad_norm": 80.88079833984375, |
|
"learning_rate": 8.048845933670274e-06, |
|
"loss": 9.0433, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.3176895306859206, |
|
"grad_norm": 56.16194534301758, |
|
"learning_rate": 7.92657473707621e-06, |
|
"loss": 8.9263, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.32731648616125153, |
|
"grad_norm": 90.87415313720703, |
|
"learning_rate": 7.801581144806752e-06, |
|
"loss": 8.9342, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.3369434416365824, |
|
"grad_norm": 71.51280212402344, |
|
"learning_rate": 7.673981429995372e-06, |
|
"loss": 8.9876, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.34657039711191334, |
|
"grad_norm": 74.96561431884766, |
|
"learning_rate": 7.5438942900761035e-06, |
|
"loss": 8.9411, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.3561973525872443, |
|
"grad_norm": 65.20281982421875, |
|
"learning_rate": 7.411440736367281e-06, |
|
"loss": 8.8483, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.3658243080625752, |
|
"grad_norm": 88.80401611328125, |
|
"learning_rate": 7.276743981502856e-06, |
|
"loss": 8.854, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.37545126353790614, |
|
"grad_norm": 57.629215240478516, |
|
"learning_rate": 7.139929324815965e-06, |
|
"loss": 8.7488, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.3850782190132371, |
|
"grad_norm": 68.3960952758789, |
|
"learning_rate": 7.00112403578139e-06, |
|
"loss": 8.6557, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.394705174488568, |
|
"grad_norm": Infinity, |
|
"learning_rate": 7.00112403578139e-06, |
|
"loss": 8.6279, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.4043321299638989, |
|
"grad_norm": 60.18490982055664, |
|
"learning_rate": 6.860457235625322e-06, |
|
"loss": 8.7681, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.4139590854392298, |
|
"grad_norm": 47.897037506103516, |
|
"learning_rate": 6.7180597772125665e-06, |
|
"loss": 8.6985, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.42358604091456076, |
|
"grad_norm": 67.03247833251953, |
|
"learning_rate": 6.574064123322925e-06, |
|
"loss": 8.8191, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.4332129963898917, |
|
"grad_norm": 81.16341400146484, |
|
"learning_rate": 6.42860422342998e-06, |
|
"loss": 8.4126, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.4428399518652226, |
|
"grad_norm": 60.89366912841797, |
|
"learning_rate": 6.281815389096903e-06, |
|
"loss": 8.5561, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.45246690734055356, |
|
"grad_norm": 65.52647399902344, |
|
"learning_rate": 6.133834168105206e-06, |
|
"loss": 8.4911, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.4620938628158845, |
|
"grad_norm": 51.18867111206055, |
|
"learning_rate": 5.9847982174335314e-06, |
|
"loss": 8.5119, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.4717208182912154, |
|
"grad_norm": 82.5035171508789, |
|
"learning_rate": 5.834846175204612e-06, |
|
"loss": 8.416, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.4813477737665463, |
|
"grad_norm": 51.8901252746582, |
|
"learning_rate": 5.684117531719552e-06, |
|
"loss": 8.452, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.4813477737665463, |
|
"eval_clap": 0.22033847868442535, |
|
"eval_loss": 3.7089993953704834, |
|
"eval_runtime": 552.5298, |
|
"eval_samples_per_second": 0.058, |
|
"eval_steps_per_second": 0.058, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.49097472924187724, |
|
"grad_norm": 52.55012893676758, |
|
"learning_rate": 5.532752499699381e-06, |
|
"loss": 8.4129, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.5006016847172082, |
|
"grad_norm": 51.49845886230469, |
|
"learning_rate": 5.380891883854591e-06, |
|
"loss": 8.4629, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.5102286401925391, |
|
"grad_norm": 61.926048278808594, |
|
"learning_rate": 5.228676949903974e-06, |
|
"loss": 8.3679, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.51985559566787, |
|
"grad_norm": 52.02131652832031, |
|
"learning_rate": 5.07624929316463e-06, |
|
"loss": 8.4213, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.529482551143201, |
|
"grad_norm": 57.23763656616211, |
|
"learning_rate": 4.923750706835371e-06, |
|
"loss": 8.436, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.5391095066185319, |
|
"grad_norm": 42.910003662109375, |
|
"learning_rate": 4.771323050096028e-06, |
|
"loss": 8.4824, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.5487364620938628, |
|
"grad_norm": 66.24418640136719, |
|
"learning_rate": 4.619108116145411e-06, |
|
"loss": 8.1537, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.5583634175691937, |
|
"grad_norm": 40.68498229980469, |
|
"learning_rate": 4.467247500300621e-06, |
|
"loss": 8.437, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.5679903730445247, |
|
"grad_norm": 42.7538948059082, |
|
"learning_rate": 4.31588246828045e-06, |
|
"loss": 8.3363, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.5776173285198556, |
|
"grad_norm": 57.252750396728516, |
|
"learning_rate": 4.165153824795391e-06, |
|
"loss": 8.0644, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.5872442839951865, |
|
"grad_norm": 67.81254577636719, |
|
"learning_rate": 4.015201782566471e-06, |
|
"loss": 8.0197, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.5968712394705175, |
|
"grad_norm": 52.997032165527344, |
|
"learning_rate": 3.866165831894796e-06, |
|
"loss": 8.2462, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.6064981949458483, |
|
"grad_norm": 52.59479522705078, |
|
"learning_rate": 3.7181846109031007e-06, |
|
"loss": 8.1063, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.6161251504211793, |
|
"grad_norm": 49.231109619140625, |
|
"learning_rate": 3.5713957765700224e-06, |
|
"loss": 8.1307, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.6257521058965102, |
|
"grad_norm": 88.22990417480469, |
|
"learning_rate": 3.425935876677077e-06, |
|
"loss": 7.6548, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.6353790613718412, |
|
"grad_norm": 38.582950592041016, |
|
"learning_rate": 3.2819402227874364e-06, |
|
"loss": 8.2685, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.6450060168471721, |
|
"grad_norm": 44.78527069091797, |
|
"learning_rate": 3.1395427643746802e-06, |
|
"loss": 8.1258, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.6546329723225031, |
|
"grad_norm": 53.792606353759766, |
|
"learning_rate": 2.99887596421861e-06, |
|
"loss": 8.0466, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.6642599277978339, |
|
"grad_norm": 54.07836151123047, |
|
"learning_rate": 2.860070675184036e-06, |
|
"loss": 7.8841, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.6738868832731648, |
|
"grad_norm": 59.08965301513672, |
|
"learning_rate": 2.7232560184971437e-06, |
|
"loss": 7.8315, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.6835138387484958, |
|
"grad_norm": 60.553993225097656, |
|
"learning_rate": 2.588559263632719e-06, |
|
"loss": 7.9891, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.6931407942238267, |
|
"grad_norm": 29.405763626098633, |
|
"learning_rate": 2.4561057099238973e-06, |
|
"loss": 8.3634, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.7027677496991577, |
|
"grad_norm": 53.22356033325195, |
|
"learning_rate": 2.3260185700046295e-06, |
|
"loss": 7.921, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.7123947051744886, |
|
"grad_norm": 33.783748626708984, |
|
"learning_rate": 2.1984188551932513e-06, |
|
"loss": 8.2299, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.7220216606498195, |
|
"grad_norm": 34.38785934448242, |
|
"learning_rate": 2.0734252629237892e-06, |
|
"loss": 8.2681, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7220216606498195, |
|
"eval_clap": 0.19338133931159973, |
|
"eval_loss": 3.7505452632904053, |
|
"eval_runtime": 552.0559, |
|
"eval_samples_per_second": 0.058, |
|
"eval_steps_per_second": 0.058, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.7316486161251504, |
|
"grad_norm": 60.097808837890625, |
|
"learning_rate": 1.9511540663297284e-06, |
|
"loss": 7.715, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.7412755716004813, |
|
"grad_norm": 37.17088317871094, |
|
"learning_rate": 1.8317190060829242e-06, |
|
"loss": 8.0466, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.7509025270758123, |
|
"grad_norm": 38.48884963989258, |
|
"learning_rate": 1.7152311845883096e-06, |
|
"loss": 8.0262, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.7605294825511432, |
|
"grad_norm": 41.10515594482422, |
|
"learning_rate": 1.601798962632799e-06, |
|
"loss": 7.9378, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.7701564380264742, |
|
"grad_norm": 53.396087646484375, |
|
"learning_rate": 1.491527858584535e-06, |
|
"loss": 7.7085, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.779783393501805, |
|
"grad_norm": 40.815067291259766, |
|
"learning_rate": 1.3845204502362442e-06, |
|
"loss": 7.9538, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.789410348977136, |
|
"grad_norm": 35.33771896362305, |
|
"learning_rate": 1.28087627938402e-06, |
|
"loss": 8.1486, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.7990373044524669, |
|
"grad_norm": 49.30295944213867, |
|
"learning_rate": 1.1806917592302763e-06, |
|
"loss": 7.9905, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.8086642599277978, |
|
"grad_norm": 53.45514678955078, |
|
"learning_rate": 1.0840600846970333e-06, |
|
"loss": 7.7255, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.8182912154031288, |
|
"grad_norm": 59.27412796020508, |
|
"learning_rate": 9.91071145732948e-07, |
|
"loss": 7.8194, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.8279181708784596, |
|
"grad_norm": 39.87610626220703, |
|
"learning_rate": 9.018114436947373e-07, |
|
"loss": 7.9528, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.8375451263537906, |
|
"grad_norm": 74.84903717041016, |
|
"learning_rate": 8.163640108807897e-07, |
|
"loss": 7.5929, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.8471720818291215, |
|
"grad_norm": 36.163143157958984, |
|
"learning_rate": 7.348083332917927e-07, |
|
"loss": 8.0817, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.8567990373044525, |
|
"grad_norm": 75.65863037109375, |
|
"learning_rate": 6.572202766902569e-07, |
|
"loss": 7.7245, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.8664259927797834, |
|
"grad_norm": 34.32239532470703, |
|
"learning_rate": 5.836720160276971e-07, |
|
"loss": 8.0978, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.8760529482551144, |
|
"grad_norm": 47.71330642700195, |
|
"learning_rate": 5.1423196830513e-07, |
|
"loss": 7.9117, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.8856799037304453, |
|
"grad_norm": 60.308658599853516, |
|
"learning_rate": 4.4896472892933693e-07, |
|
"loss": 7.5789, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.8953068592057761, |
|
"grad_norm": 32.330928802490234, |
|
"learning_rate": 3.8793101162410417e-07, |
|
"loss": 8.0885, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.9049338146811071, |
|
"grad_norm": 66.5735855102539, |
|
"learning_rate": 3.3118759195232273e-07, |
|
"loss": 7.4854, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.914560770156438, |
|
"grad_norm": 35.20449447631836, |
|
"learning_rate": 2.787872545015069e-07, |
|
"loss": 8.0211, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.924187725631769, |
|
"grad_norm": 37.92588424682617, |
|
"learning_rate": 2.307787437818365e-07, |
|
"loss": 8.1618, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.9338146811070999, |
|
"grad_norm": 61.763824462890625, |
|
"learning_rate": 1.8720671888242058e-07, |
|
"loss": 7.7689, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.9434416365824309, |
|
"grad_norm": 42.408660888671875, |
|
"learning_rate": 1.4811171192794628e-07, |
|
"loss": 8.0395, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.9530685920577617, |
|
"grad_norm": 53.46616744995117, |
|
"learning_rate": 1.1353009037437523e-07, |
|
"loss": 7.6451, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.9626955475330926, |
|
"grad_norm": 34.209293365478516, |
|
"learning_rate": 8.34940231787379e-08, |
|
"loss": 8.0758, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9626955475330926, |
|
"eval_clap": 0.20374518632888794, |
|
"eval_loss": 3.7648110389709473, |
|
"eval_runtime": 557.5953, |
|
"eval_samples_per_second": 0.057, |
|
"eval_steps_per_second": 0.057, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.9723225030084236, |
|
"grad_norm": 63.23237228393555, |
|
"learning_rate": 5.803145087451945e-08, |
|
"loss": 7.5566, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.9819494584837545, |
|
"grad_norm": 52.794029235839844, |
|
"learning_rate": 3.716605958046071e-08, |
|
"loss": 7.7234, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.9915764139590855, |
|
"grad_norm": 46.99095916748047, |
|
"learning_rate": 2.0917258966953735e-08, |
|
"loss": 7.7302, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.9915764139590855, |
|
"step": 103, |
|
"total_flos": 168544032748224.0, |
|
"train_loss": 8.598366718847775, |
|
"train_runtime": 2954.9231, |
|
"train_samples_per_second": 0.281, |
|
"train_steps_per_second": 0.035 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 103, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 168544032748224.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|