|
{ |
|
"best_metric": 11.5, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-50", |
|
"epoch": 0.09770395701025891, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0004885197850512946, |
|
"grad_norm": 0.00014923018170520663, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 46.0, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0004885197850512946, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 21.8085, |
|
"eval_samples_per_second": 158.103, |
|
"eval_steps_per_second": 79.052, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0009770395701025891, |
|
"grad_norm": 0.00010189843305852264, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 46.0, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0014655593551538837, |
|
"grad_norm": 0.00011581017315620556, |
|
"learning_rate": 1e-05, |
|
"loss": 46.0, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0019540791402051783, |
|
"grad_norm": 9.342029079562053e-05, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 46.0, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002442598925256473, |
|
"grad_norm": 0.0001268885243916884, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0029311187103077674, |
|
"grad_norm": 0.0001576822978677228, |
|
"learning_rate": 2e-05, |
|
"loss": 46.0, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.003419638495359062, |
|
"grad_norm": 0.00015863515727687627, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 46.0, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0039081582804103565, |
|
"grad_norm": 0.00016007298836484551, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004396678065461651, |
|
"grad_norm": 0.00016516570758540183, |
|
"learning_rate": 3e-05, |
|
"loss": 46.0, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.004885197850512946, |
|
"grad_norm": 0.00019161883392371237, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 46.0, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.00537371763556424, |
|
"grad_norm": 0.00019221696129534394, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 46.0, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.005862237420615535, |
|
"grad_norm": 0.00019196397624909878, |
|
"learning_rate": 4e-05, |
|
"loss": 46.0, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.006350757205666829, |
|
"grad_norm": 0.00018356094369664788, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 46.0, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.006839276990718124, |
|
"grad_norm": 0.0001760963787091896, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0073277967757694185, |
|
"grad_norm": 0.0001812873815651983, |
|
"learning_rate": 5e-05, |
|
"loss": 46.0, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.007816316560820713, |
|
"grad_norm": 0.00020668996148742735, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 46.0, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.008304836345872008, |
|
"grad_norm": 0.0001919849164551124, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.008793356130923302, |
|
"grad_norm": 0.00018678826745599508, |
|
"learning_rate": 6e-05, |
|
"loss": 46.0, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.009281875915974597, |
|
"grad_norm": 0.00017623617895878851, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 46.0, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.009770395701025891, |
|
"grad_norm": 0.00020370143465697765, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.010258915486077186, |
|
"grad_norm": 0.0001907388650579378, |
|
"learning_rate": 7e-05, |
|
"loss": 46.0, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01074743527112848, |
|
"grad_norm": 0.00018769825692288578, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 46.0, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.011235955056179775, |
|
"grad_norm": 0.00017153348017018288, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01172447484123107, |
|
"grad_norm": 0.00022031365369912237, |
|
"learning_rate": 8e-05, |
|
"loss": 46.0, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.012212994626282364, |
|
"grad_norm": 0.0002413343609077856, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 46.0, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.012701514411333659, |
|
"grad_norm": 0.0002299462357768789, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.013190034196384953, |
|
"grad_norm": 0.0002480004623066634, |
|
"learning_rate": 9e-05, |
|
"loss": 46.0, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.013678553981436248, |
|
"grad_norm": 0.0002981836150866002, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 46.0, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.014167073766487542, |
|
"grad_norm": 0.0003038592985831201, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 46.0, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.014655593551538837, |
|
"grad_norm": 0.00028463880880735815, |
|
"learning_rate": 0.0001, |
|
"loss": 46.0, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.015144113336590131, |
|
"grad_norm": 0.00029816399910487235, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 46.0, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.015632633121641426, |
|
"grad_norm": 0.00033598695881664753, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 46.0, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01612115290669272, |
|
"grad_norm": 0.0003494895063340664, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 46.0, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.016609672691744015, |
|
"grad_norm": 0.0003391339269001037, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 46.0, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01709819247679531, |
|
"grad_norm": 0.0003875583643093705, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 46.0, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.017586712261846604, |
|
"grad_norm": 0.00042881048284471035, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 46.0, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0180752320468979, |
|
"grad_norm": 0.0004535310436040163, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 46.0, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.018563751831949193, |
|
"grad_norm": 0.0004276837280485779, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 46.0, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.019052271617000488, |
|
"grad_norm": 0.0004478572809603065, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 46.0, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.019540791402051783, |
|
"grad_norm": 0.0004176196234766394, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 46.0, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.020029311187103077, |
|
"grad_norm": 0.0005269350949674845, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 46.0, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.02051783097215437, |
|
"grad_norm": 0.0005566150066442788, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 46.0, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.021006350757205666, |
|
"grad_norm": 0.0005373305757530034, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 46.0, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.02149487054225696, |
|
"grad_norm": 0.0006445915205404162, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 46.0, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.021983390327308255, |
|
"grad_norm": 0.0006316164508461952, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 46.0, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02247191011235955, |
|
"grad_norm": 0.0006566172814927995, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 46.0, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.022960429897410845, |
|
"grad_norm": 0.000673891045153141, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 46.0, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02344894968246214, |
|
"grad_norm": 0.0007460052729584277, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 46.0, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.023937469467513434, |
|
"grad_norm": 0.0008335084421560168, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 46.0, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.024425989252564728, |
|
"grad_norm": 0.0007683219737373292, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 46.0, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.024425989252564728, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 21.8811, |
|
"eval_samples_per_second": 157.579, |
|
"eval_steps_per_second": 78.79, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.024914509037616023, |
|
"grad_norm": 0.0005438526277430356, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 46.0, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.025403028822667317, |
|
"grad_norm": 0.000465790944872424, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 46.0, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.025891548607718612, |
|
"grad_norm": 0.0005006449646316469, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 46.0, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.026380068392769906, |
|
"grad_norm": 0.0004962133825756609, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 46.0, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0268685881778212, |
|
"grad_norm": 0.0005621546879410744, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 46.0, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.027357107962872496, |
|
"grad_norm": 0.0008538339752703905, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 46.0, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02784562774792379, |
|
"grad_norm": 0.0008000246016308665, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 46.0, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.028334147532975085, |
|
"grad_norm": 0.0008223172626458108, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 46.0, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02882266731802638, |
|
"grad_norm": 0.0008595731924287975, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 46.0, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.029311187103077674, |
|
"grad_norm": 0.0009680544608272612, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 46.0, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.02979970688812897, |
|
"grad_norm": 0.0008582666050642729, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 46.0, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.030288226673180263, |
|
"grad_norm": 0.0009988963138312101, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 46.0, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.030776746458231558, |
|
"grad_norm": 0.0010351977543905377, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 46.0, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.03126526624328285, |
|
"grad_norm": 0.000999148585833609, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 46.0, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.03175378602833415, |
|
"grad_norm": 0.0010952550219371915, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 46.0, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03224230581338544, |
|
"grad_norm": 0.001108844648115337, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 46.0, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.032730825598436736, |
|
"grad_norm": 0.0010663546854630113, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 46.0, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03321934538348803, |
|
"grad_norm": 0.0011809360003098845, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 46.0, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.033707865168539325, |
|
"grad_norm": 0.00114791642408818, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 46.0, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.03419638495359062, |
|
"grad_norm": 0.001080791698768735, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 46.0, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.034684904738641914, |
|
"grad_norm": 0.0010704582091420889, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 46.0, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03517342452369321, |
|
"grad_norm": 0.0011543328873813152, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 46.0, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0356619443087445, |
|
"grad_norm": 0.0013566854177042842, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 46.0, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.0361504640937958, |
|
"grad_norm": 0.0013899672776460648, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 46.0, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.03663898387884709, |
|
"grad_norm": 0.0013129275757819414, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 46.0, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.03712750366389839, |
|
"grad_norm": 0.0012440407881513238, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 46.0, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03761602344894968, |
|
"grad_norm": 0.0013878497993573546, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 46.0, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.038104543234000976, |
|
"grad_norm": 0.0014280028408393264, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 46.0, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03859306301905227, |
|
"grad_norm": 0.001614846638403833, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 46.0, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.039081582804103565, |
|
"grad_norm": 0.0015674001770094037, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 46.0, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03957010258915486, |
|
"grad_norm": 0.001519041950814426, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 46.0, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.040058622374206154, |
|
"grad_norm": 0.0017371849389746785, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 46.0, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04054714215925745, |
|
"grad_norm": 0.0015264844987541437, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 46.0, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.04103566194430874, |
|
"grad_norm": 0.0015574233839288354, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 46.0, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.04152418172936004, |
|
"grad_norm": 0.0017922529950737953, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 46.0, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.04201270151441133, |
|
"grad_norm": 0.0015132039552554488, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 46.0, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.04250122129946263, |
|
"grad_norm": 0.0014118703547865152, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 46.0, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.04298974108451392, |
|
"grad_norm": 0.0015360661782324314, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 46.0, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.043478260869565216, |
|
"grad_norm": 0.0016589147271588445, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 46.0, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.04396678065461651, |
|
"grad_norm": 0.0018973118858411908, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 46.0, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.044455300439667805, |
|
"grad_norm": 0.0018632501596584916, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 46.0, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.0449438202247191, |
|
"grad_norm": 0.0018421127460896969, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 46.0, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.045432340009770394, |
|
"grad_norm": 0.001867462880909443, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 46.0, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.04592085979482169, |
|
"grad_norm": 0.0016661660047248006, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 46.0, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.046409379579872984, |
|
"grad_norm": 0.0020030655432492495, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 46.0, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04689789936492428, |
|
"grad_norm": 0.0022019976750016212, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 46.0, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04738641914997557, |
|
"grad_norm": 0.0023582875728607178, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 46.0, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.04787493893502687, |
|
"grad_norm": 0.0020912224426865578, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 46.0, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.04836345872007816, |
|
"grad_norm": 0.0023314449936151505, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 46.0, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.048851978505129456, |
|
"grad_norm": 0.002223197603598237, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 46.0, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.048851978505129456, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 21.7472, |
|
"eval_samples_per_second": 158.549, |
|
"eval_steps_per_second": 79.275, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04934049829018075, |
|
"grad_norm": 0.0019651062320917845, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 46.0, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.049829018075232046, |
|
"grad_norm": 0.0010920295026153326, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 46.0, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05031753786028334, |
|
"grad_norm": 0.001620462746359408, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 46.0, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.050806057645334635, |
|
"grad_norm": 0.0015201811911538243, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 46.0, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05129457743038593, |
|
"grad_norm": 0.0017807093681767583, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 46.0, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.051783097215437224, |
|
"grad_norm": 0.0019678312819451094, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 46.0, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.05227161700048852, |
|
"grad_norm": 0.0021614041179418564, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 46.0, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.05276013678553981, |
|
"grad_norm": 0.00198457227088511, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 46.0, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.05324865657059111, |
|
"grad_norm": 0.001815136638469994, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 46.0, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.0537371763556424, |
|
"grad_norm": 0.0023733500856906176, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 46.0, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.0542256961406937, |
|
"grad_norm": 0.0020339249167591333, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 46.0, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.05471421592574499, |
|
"grad_norm": 0.002384207909926772, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 46.0, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.055202735710796286, |
|
"grad_norm": 0.0023456341587007046, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 46.0, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.05569125549584758, |
|
"grad_norm": 0.002359037986025214, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 46.0, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.056179775280898875, |
|
"grad_norm": 0.0022823396138846874, |
|
"learning_rate": 5e-05, |
|
"loss": 46.0, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.05666829506595017, |
|
"grad_norm": 0.002569975098595023, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 46.0, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.057156814851001464, |
|
"grad_norm": 0.0023908009752631187, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 46.0, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.05764533463605276, |
|
"grad_norm": 0.002633164869621396, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 46.0, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05813385442110405, |
|
"grad_norm": 0.0026206020265817642, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 46.0, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05862237420615535, |
|
"grad_norm": 0.002555572660639882, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 46.0, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.05911089399120664, |
|
"grad_norm": 0.0024891262874007225, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 46.0, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.05959941377625794, |
|
"grad_norm": 0.002794750966131687, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 46.0, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06008793356130923, |
|
"grad_norm": 0.002416015602648258, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 46.0, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.060576453346360526, |
|
"grad_norm": 0.0028717226814478636, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 46.0, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06106497313141182, |
|
"grad_norm": 0.00271116872318089, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 46.0, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.061553492916463115, |
|
"grad_norm": 0.0025964034721255302, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 46.0, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.06204201270151441, |
|
"grad_norm": 0.002626827685162425, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 46.0, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0625305324865657, |
|
"grad_norm": 0.002837597858160734, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 46.0, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.063019052271617, |
|
"grad_norm": 0.0029369723051786423, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 46.0, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.0635075720566683, |
|
"grad_norm": 0.003107696305960417, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 46.0, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.06399609184171959, |
|
"grad_norm": 0.003282048273831606, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 46.0, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.06448461162677088, |
|
"grad_norm": 0.003092048689723015, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 46.0, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.06497313141182218, |
|
"grad_norm": 0.002742452546954155, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 46.0, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.06546165119687347, |
|
"grad_norm": 0.002884424291551113, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 46.0, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.06595017098192477, |
|
"grad_norm": 0.003205804852768779, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 46.0, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.06643869076697606, |
|
"grad_norm": 0.0032421215437352657, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 46.0, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.06692721055202736, |
|
"grad_norm": 0.002821906702592969, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 46.0, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06741573033707865, |
|
"grad_norm": 0.0034650960005819798, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 46.0, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.06790425012212994, |
|
"grad_norm": 0.0033117257989943027, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 46.0, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.06839276990718124, |
|
"grad_norm": 0.0030436816159635782, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 46.0, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06888128969223253, |
|
"grad_norm": 0.0032486796844750643, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 46.0, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.06936980947728383, |
|
"grad_norm": 0.003315830836072564, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 46.0, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.06985832926233512, |
|
"grad_norm": 0.0029912549071013927, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 46.0, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07034684904738642, |
|
"grad_norm": 0.003400547197088599, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 46.0, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07083536883243771, |
|
"grad_norm": 0.0037027092184871435, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 46.0, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.071323888617489, |
|
"grad_norm": 0.0033600705210119486, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 46.0, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0718124084025403, |
|
"grad_norm": 0.0032807497773319483, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 46.0, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.0723009281875916, |
|
"grad_norm": 0.0033261869102716446, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 46.0, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07278944797264289, |
|
"grad_norm": 0.004576267208904028, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 46.0, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.07327796775769418, |
|
"grad_norm": 0.003643499920144677, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 46.0, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07327796775769418, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 21.7893, |
|
"eval_samples_per_second": 158.243, |
|
"eval_steps_per_second": 79.122, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.07376648754274548, |
|
"grad_norm": 0.0026295215357095003, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 46.0, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.07425500732779677, |
|
"grad_norm": 0.0019322294974699616, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 46.0, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.07474352711284807, |
|
"grad_norm": 0.0024281959049403667, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 46.0, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.07523204689789936, |
|
"grad_norm": 0.00209889211691916, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 46.0, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.07572056668295066, |
|
"grad_norm": 0.0027790747117251158, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 46.0, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.07620908646800195, |
|
"grad_norm": 0.0029743951745331287, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 46.0, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.07669760625305325, |
|
"grad_norm": 0.003012306522578001, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 46.0, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.07718612603810454, |
|
"grad_norm": 0.0030355937778949738, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 46.0, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.07767464582315584, |
|
"grad_norm": 0.002729633590206504, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 46.0, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.07816316560820713, |
|
"grad_norm": 0.003061029827222228, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 46.0, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07865168539325842, |
|
"grad_norm": 0.0034317467361688614, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 46.0, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07914020517830972, |
|
"grad_norm": 0.0037723646964877844, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 46.0, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.07962872496336101, |
|
"grad_norm": 0.003198886290192604, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 46.0, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.08011724474841231, |
|
"grad_norm": 0.0031450283713638783, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 46.0, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0806057645334636, |
|
"grad_norm": 0.003614857094362378, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 46.0, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0810942843185149, |
|
"grad_norm": 0.0034642990212887526, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 46.0, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.08158280410356619, |
|
"grad_norm": 0.003436095081269741, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 46.0, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.08207132388861749, |
|
"grad_norm": 0.0033289180137217045, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 46.0, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.08255984367366878, |
|
"grad_norm": 0.0033829372841864824, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 46.0, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.08304836345872008, |
|
"grad_norm": 0.003401255002245307, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 46.0, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.08353688324377137, |
|
"grad_norm": 0.0036590571980923414, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 46.0, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.08402540302882266, |
|
"grad_norm": 0.003637349233031273, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 46.0, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.08451392281387396, |
|
"grad_norm": 0.003337201429530978, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 46.0, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.08500244259892525, |
|
"grad_norm": 0.0034019306767731905, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 46.0, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.08549096238397655, |
|
"grad_norm": 0.0034504553768783808, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 46.0, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.08597948216902784, |
|
"grad_norm": 0.003214376512914896, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 46.0, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.08646800195407914, |
|
"grad_norm": 0.0036590604577213526, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 46.0, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.08695652173913043, |
|
"grad_norm": 0.0033399956300854683, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 46.0, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.08744504152418173, |
|
"grad_norm": 0.0034210023004561663, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 46.0, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.08793356130923302, |
|
"grad_norm": 0.003220006823539734, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 46.0, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.08842208109428432, |
|
"grad_norm": 0.003696272848173976, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 46.0, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.08891060087933561, |
|
"grad_norm": 0.003575393231585622, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 46.0, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.0893991206643869, |
|
"grad_norm": 0.003938958048820496, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 46.0, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.0898876404494382, |
|
"grad_norm": 0.0039198389276862144, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 46.0, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.0903761602344895, |
|
"grad_norm": 0.0035615130327641964, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 46.0, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09086468001954079, |
|
"grad_norm": 0.0030337281059473753, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 46.0, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.09135319980459208, |
|
"grad_norm": 0.003380219452083111, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 46.0, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.09184171958964338, |
|
"grad_norm": 0.0038383265491575003, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 46.0, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.09233023937469467, |
|
"grad_norm": 0.003710967255756259, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 46.0, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.09281875915974597, |
|
"grad_norm": 0.003858968149870634, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 46.0, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.09330727894479726, |
|
"grad_norm": 0.00442510237917304, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 46.0, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.09379579872984856, |
|
"grad_norm": 0.0036304136738181114, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 46.0, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.09428431851489985, |
|
"grad_norm": 0.003316792892292142, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 46.0, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.09477283829995115, |
|
"grad_norm": 0.00361361145041883, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 46.0, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.09526135808500244, |
|
"grad_norm": 0.004398842807859182, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 46.0, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.09574987787005373, |
|
"grad_norm": 0.004400990903377533, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 46.0, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.09623839765510503, |
|
"grad_norm": 0.004007602110505104, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 46.0, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.09672691744015632, |
|
"grad_norm": 0.004391137044876814, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 46.0, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.09721543722520762, |
|
"grad_norm": 0.004615367855876684, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 46.0, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.09770395701025891, |
|
"grad_norm": 0.003925880882889032, |
|
"learning_rate": 0.0, |
|
"loss": 46.0, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.09770395701025891, |
|
"eval_loss": 11.5, |
|
"eval_runtime": 21.7902, |
|
"eval_samples_per_second": 158.236, |
|
"eval_steps_per_second": 79.118, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 3 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 32721235869696.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|