|
{ |
|
"best_metric": 0.2649017870426178, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.20714655618850336, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0010357327809425167, |
|
"grad_norm": 2.934951066970825, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.3458, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010357327809425167, |
|
"eval_loss": 2.4307608604431152, |
|
"eval_runtime": 131.1348, |
|
"eval_samples_per_second": 12.399, |
|
"eval_steps_per_second": 6.2, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0020714655618850335, |
|
"grad_norm": 3.631234884262085, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.5473, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0031071983428275505, |
|
"grad_norm": 3.7098679542541504, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7358, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004142931123770067, |
|
"grad_norm": 4.27931547164917, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3322, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.005178663904712584, |
|
"grad_norm": 4.007849216461182, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.5253, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.006214396685655101, |
|
"grad_norm": 5.52664041519165, |
|
"learning_rate": 2e-05, |
|
"loss": 1.8545, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0072501294665976174, |
|
"grad_norm": 3.834798574447632, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.3665, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.008285862247540134, |
|
"grad_norm": 3.7450332641601562, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.3339, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.009321595028482652, |
|
"grad_norm": 3.6901252269744873, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4489, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.010357327809425169, |
|
"grad_norm": 3.269052505493164, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.9925, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.011393060590367685, |
|
"grad_norm": 3.4667394161224365, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.0799, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.012428793371310202, |
|
"grad_norm": 4.5746917724609375, |
|
"learning_rate": 4e-05, |
|
"loss": 1.0442, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.013464526152252718, |
|
"grad_norm": 3.5450432300567627, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 0.9699, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.014500258933195235, |
|
"grad_norm": 2.999580144882202, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 0.8588, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.015535991714137753, |
|
"grad_norm": 2.0789074897766113, |
|
"learning_rate": 5e-05, |
|
"loss": 0.7789, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.016571724495080268, |
|
"grad_norm": 2.5798442363739014, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 0.7985, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.017607457276022784, |
|
"grad_norm": 2.789106607437134, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 0.8539, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.018643190056965304, |
|
"grad_norm": 2.157308578491211, |
|
"learning_rate": 6e-05, |
|
"loss": 0.7339, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01967892283790782, |
|
"grad_norm": 2.024956464767456, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 0.6393, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.020714655618850338, |
|
"grad_norm": 2.095165967941284, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.7112, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.021750388399792854, |
|
"grad_norm": 2.395766258239746, |
|
"learning_rate": 7e-05, |
|
"loss": 0.5803, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02278612118073537, |
|
"grad_norm": 1.9139028787612915, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 0.5602, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.023821853961677887, |
|
"grad_norm": 1.8153733015060425, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 0.5852, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.024857586742620404, |
|
"grad_norm": 1.7927237749099731, |
|
"learning_rate": 8e-05, |
|
"loss": 0.5787, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02589331952356292, |
|
"grad_norm": 2.2322211265563965, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 0.7009, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.026929052304505437, |
|
"grad_norm": 1.8694380521774292, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 0.6042, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.027964785085447953, |
|
"grad_norm": 1.5141125917434692, |
|
"learning_rate": 9e-05, |
|
"loss": 0.5847, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02900051786639047, |
|
"grad_norm": 1.4388060569763184, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 0.533, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03003625064733299, |
|
"grad_norm": 1.4760385751724243, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 0.4269, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.031071983428275506, |
|
"grad_norm": 1.4737350940704346, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3929, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03210771620921802, |
|
"grad_norm": 1.3517489433288574, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 0.4369, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.033143448990160536, |
|
"grad_norm": 1.7029956579208374, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 0.453, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03417918177110305, |
|
"grad_norm": 2.4142861366271973, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 0.5166, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03521491455204557, |
|
"grad_norm": 1.5307400226593018, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 0.4085, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.03625064733298809, |
|
"grad_norm": 1.6694791316986084, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 0.2848, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03728638011393061, |
|
"grad_norm": 0.9046441316604614, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 0.1912, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.038322112894873125, |
|
"grad_norm": 1.6716495752334595, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 0.632, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03935784567581564, |
|
"grad_norm": 2.2129456996917725, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 0.8857, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04039357845675816, |
|
"grad_norm": 3.774207592010498, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 1.4254, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.041429311237700675, |
|
"grad_norm": 1.758363127708435, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 0.8625, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04246504401864319, |
|
"grad_norm": 1.6825367212295532, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 0.6659, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.04350077679958571, |
|
"grad_norm": 1.9193270206451416, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 0.5724, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.044536509580528225, |
|
"grad_norm": 1.610196828842163, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 0.5503, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04557224236147074, |
|
"grad_norm": 1.7814068794250488, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 0.4417, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04660797514241326, |
|
"grad_norm": 1.386198878288269, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 0.3452, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.047643707923355774, |
|
"grad_norm": 1.408173680305481, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 0.259, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04867944070429829, |
|
"grad_norm": 4.415549278259277, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9239, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04971517348524081, |
|
"grad_norm": 3.1557812690734863, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 0.8021, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.050750906266183324, |
|
"grad_norm": 3.0184805393218994, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 0.6193, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.05178663904712584, |
|
"grad_norm": 2.46248197555542, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 0.848, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05178663904712584, |
|
"eval_loss": 0.8538514971733093, |
|
"eval_runtime": 133.2166, |
|
"eval_samples_per_second": 12.206, |
|
"eval_steps_per_second": 6.103, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05282237182806836, |
|
"grad_norm": 5.02744197845459, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 0.9884, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.05385810460901087, |
|
"grad_norm": 2.4663264751434326, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 0.8416, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.05489383738995339, |
|
"grad_norm": 1.7390788793563843, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 0.8593, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.055929570170895906, |
|
"grad_norm": 1.908245325088501, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 0.732, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05696530295183842, |
|
"grad_norm": 2.3819148540496826, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 0.6403, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.05800103573278094, |
|
"grad_norm": 1.402185082435608, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 0.6708, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.059036768513723456, |
|
"grad_norm": 1.2513192892074585, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 0.6277, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.06007250129466598, |
|
"grad_norm": 1.1688061952590942, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 0.5229, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.061108234075608496, |
|
"grad_norm": 1.108825922012329, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 0.5915, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.06214396685655101, |
|
"grad_norm": 1.0653815269470215, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 0.6964, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.06317969963749352, |
|
"grad_norm": 1.674397587776184, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 0.5143, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.06421543241843604, |
|
"grad_norm": 0.9258373379707336, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 0.4886, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.06525116519937856, |
|
"grad_norm": 0.8897579908370972, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 0.4374, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.06628689798032107, |
|
"grad_norm": 0.9600769281387329, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.4138, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06732263076126359, |
|
"grad_norm": 0.7555741667747498, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 0.35, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0683583635422061, |
|
"grad_norm": 1.175614595413208, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 0.5386, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06939409632314862, |
|
"grad_norm": 0.9770200252532959, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 0.4241, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.07042982910409114, |
|
"grad_norm": 1.079787254333496, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 0.4143, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.07146556188503367, |
|
"grad_norm": 0.921312689781189, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 0.3022, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.07250129466597618, |
|
"grad_norm": 0.830862283706665, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.2061, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0735370274469187, |
|
"grad_norm": 0.9453504085540771, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 0.283, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.07457276022786122, |
|
"grad_norm": 1.1474324464797974, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 0.4113, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.07560849300880373, |
|
"grad_norm": 1.249191164970398, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 0.5516, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.07664422578974625, |
|
"grad_norm": 1.1734344959259033, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 0.5408, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.07767995857068877, |
|
"grad_norm": 1.2528908252716064, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 0.5155, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07871569135163128, |
|
"grad_norm": 0.936934232711792, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 0.2787, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0797514241325738, |
|
"grad_norm": 0.708920419216156, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 0.3339, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.08078715691351632, |
|
"grad_norm": 1.2183189392089844, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 0.5208, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.08182288969445883, |
|
"grad_norm": 1.1501481533050537, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 0.408, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.08285862247540135, |
|
"grad_norm": 0.823788046836853, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 0.3337, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08389435525634387, |
|
"grad_norm": 0.9144259095191956, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3301, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.08493008803728638, |
|
"grad_norm": 0.7938789129257202, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 0.234, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0859658208182289, |
|
"grad_norm": 0.8019108772277832, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 0.2253, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.08700155359917142, |
|
"grad_norm": 0.9795384407043457, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 0.3915, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.08803728638011393, |
|
"grad_norm": 1.122196912765503, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 0.4886, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08907301916105645, |
|
"grad_norm": 0.8640231490135193, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 0.2448, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.09010875194199897, |
|
"grad_norm": 2.5851869583129883, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 0.3743, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.09114448472294148, |
|
"grad_norm": 0.8353838920593262, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 0.3504, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.092180217503884, |
|
"grad_norm": 1.0095223188400269, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 0.3531, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.09321595028482652, |
|
"grad_norm": 1.6652759313583374, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 0.4985, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09425168306576903, |
|
"grad_norm": 1.2830318212509155, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.5003, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.09528741584671155, |
|
"grad_norm": 0.8330721855163574, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.1982, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.09632314862765406, |
|
"grad_norm": 0.6261827945709229, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 0.2427, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.09735888140859658, |
|
"grad_norm": 0.6739867925643921, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.2442, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.0983946141895391, |
|
"grad_norm": 0.5700361132621765, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 0.09, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.09943034697048161, |
|
"grad_norm": 1.3761444091796875, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 0.1098, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.10046607975142413, |
|
"grad_norm": 1.5139930248260498, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 0.5464, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.10150181253236665, |
|
"grad_norm": 0.995734691619873, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.4197, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.10253754531330916, |
|
"grad_norm": 1.7123396396636963, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 0.4591, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.10357327809425168, |
|
"grad_norm": 1.226818323135376, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 0.4507, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10357327809425168, |
|
"eval_loss": 0.46107152104377747, |
|
"eval_runtime": 132.5673, |
|
"eval_samples_per_second": 12.265, |
|
"eval_steps_per_second": 6.133, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1046090108751942, |
|
"grad_norm": 0.9650932550430298, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 0.6433, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.10564474365613671, |
|
"grad_norm": 1.0569595098495483, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 0.6046, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.10668047643707923, |
|
"grad_norm": 1.3118841648101807, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 0.6978, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.10771620921802175, |
|
"grad_norm": 0.9649919867515564, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 0.5771, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.10875194199896426, |
|
"grad_norm": 0.8630145788192749, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 0.4207, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.10978767477990678, |
|
"grad_norm": 0.8723099231719971, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 0.4919, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1108234075608493, |
|
"grad_norm": 1.1895697116851807, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.5618, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.11185914034179181, |
|
"grad_norm": 1.1931686401367188, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 0.4773, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.11289487312273433, |
|
"grad_norm": 1.6731840372085571, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 0.4266, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.11393060590367685, |
|
"grad_norm": 1.227846622467041, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 0.5589, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11496633868461936, |
|
"grad_norm": 1.0807693004608154, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 0.4194, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.11600207146556188, |
|
"grad_norm": 0.823137104511261, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 0.623, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1170378042465044, |
|
"grad_norm": 0.9013914465904236, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 0.4202, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.11807353702744691, |
|
"grad_norm": 0.896589457988739, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.4487, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.11910926980838944, |
|
"grad_norm": 0.7383155226707458, |
|
"learning_rate": 5e-05, |
|
"loss": 0.3712, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.12014500258933196, |
|
"grad_norm": 1.5368659496307373, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.3895, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.12118073537027448, |
|
"grad_norm": 0.825655996799469, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.3584, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.12221646815121699, |
|
"grad_norm": 0.7657477855682373, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.3234, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.12325220093215951, |
|
"grad_norm": 0.7747549414634705, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.363, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.12428793371310203, |
|
"grad_norm": 1.066019892692566, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.4481, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12532366649404453, |
|
"grad_norm": 0.8845261931419373, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 0.2096, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.12635939927498704, |
|
"grad_norm": 0.969825267791748, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 0.3525, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.12739513205592956, |
|
"grad_norm": 0.9056379199028015, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 0.3636, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.12843086483687208, |
|
"grad_norm": 0.8290777206420898, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 0.2886, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1294665976178146, |
|
"grad_norm": 0.8031933903694153, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 0.336, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1305023303987571, |
|
"grad_norm": 1.104204773902893, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.3722, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.13153806317969963, |
|
"grad_norm": 0.9014912247657776, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.3251, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.13257379596064214, |
|
"grad_norm": 0.5569969415664673, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.2197, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.13360952874158466, |
|
"grad_norm": 0.5489453077316284, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.157, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.13464526152252718, |
|
"grad_norm": 0.700421929359436, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.2833, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.1356809943034697, |
|
"grad_norm": 0.8463655114173889, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.3084, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.1367167270844122, |
|
"grad_norm": 1.074282169342041, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.2022, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.13775245986535473, |
|
"grad_norm": 0.7204672694206238, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 0.1456, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.13878819264629724, |
|
"grad_norm": 1.4383536577224731, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.2751, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.13982392542723976, |
|
"grad_norm": 0.6281360387802124, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 0.0874, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.14085965820818228, |
|
"grad_norm": 0.6139625906944275, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 0.1965, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1418953909891248, |
|
"grad_norm": 1.241881012916565, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 0.2662, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.14293112377006734, |
|
"grad_norm": 0.6423341631889343, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 0.1051, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.14396685655100985, |
|
"grad_norm": 1.093138337135315, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.2853, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.14500258933195237, |
|
"grad_norm": 1.1384899616241455, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.3369, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14603832211289489, |
|
"grad_norm": 0.7722360491752625, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.2649, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1470740548938374, |
|
"grad_norm": 1.1144746541976929, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.2119, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.14810978767477992, |
|
"grad_norm": 0.6660451292991638, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.1605, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.14914552045572244, |
|
"grad_norm": 0.6069556474685669, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.1004, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.15018125323666495, |
|
"grad_norm": 0.9130787253379822, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.1797, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.15121698601760747, |
|
"grad_norm": 0.6856268644332886, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.1913, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.15225271879854999, |
|
"grad_norm": 1.281296730041504, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 0.4095, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1532884515794925, |
|
"grad_norm": 1.5272610187530518, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 0.5091, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.15432418436043502, |
|
"grad_norm": 1.2128320932388306, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.3015, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.15535991714137753, |
|
"grad_norm": 0.903167724609375, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 0.3632, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15535991714137753, |
|
"eval_loss": 0.3055230975151062, |
|
"eval_runtime": 132.6347, |
|
"eval_samples_per_second": 12.259, |
|
"eval_steps_per_second": 6.13, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15639564992232005, |
|
"grad_norm": 1.0121864080429077, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.5259, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.15743138270326257, |
|
"grad_norm": 1.1729909181594849, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.6326, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.15846711548420508, |
|
"grad_norm": 0.995249330997467, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.5665, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1595028482651476, |
|
"grad_norm": 0.9343785643577576, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.5047, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.16053858104609012, |
|
"grad_norm": 1.207404375076294, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.5292, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.16157431382703263, |
|
"grad_norm": 1.1004372835159302, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.4597, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.16261004660797515, |
|
"grad_norm": 0.9158421754837036, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.4519, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.16364577938891767, |
|
"grad_norm": 0.6641129851341248, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.3259, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.16468151216986018, |
|
"grad_norm": 0.798582136631012, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.3704, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.1657172449508027, |
|
"grad_norm": 0.7701166868209839, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.336, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16675297773174522, |
|
"grad_norm": 0.9152107238769531, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 0.3435, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.16778871051268773, |
|
"grad_norm": 0.9507977366447449, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 0.4295, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.16882444329363025, |
|
"grad_norm": 0.6636240482330322, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.2605, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.16986017607457277, |
|
"grad_norm": 0.6697791814804077, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.2748, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.17089590885551528, |
|
"grad_norm": 0.763862669467926, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.2329, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.1719316416364578, |
|
"grad_norm": 0.5785434246063232, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.219, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.17296737441740032, |
|
"grad_norm": 0.7660300135612488, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.278, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.17400310719834283, |
|
"grad_norm": 0.701310932636261, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.2762, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.17503883997928535, |
|
"grad_norm": 0.7874704003334045, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.3061, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.17607457276022787, |
|
"grad_norm": 0.586209237575531, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.2317, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.17711030554117038, |
|
"grad_norm": 0.8026313781738281, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.1928, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1781460383221129, |
|
"grad_norm": 0.6386998295783997, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.2347, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.17918177110305541, |
|
"grad_norm": 0.8125709295272827, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 0.2274, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.18021750388399793, |
|
"grad_norm": 0.8692264556884766, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 0.4774, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.18125323666494045, |
|
"grad_norm": 0.7680791616439819, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 0.2766, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.18228896944588296, |
|
"grad_norm": 0.8680493831634521, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.2881, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.18332470222682548, |
|
"grad_norm": 0.5676426291465759, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.2081, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.184360435007768, |
|
"grad_norm": 0.5735141038894653, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.222, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.18539616778871051, |
|
"grad_norm": 0.5580146908760071, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.1749, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.18643190056965303, |
|
"grad_norm": 0.5910290479660034, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.2142, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18746763335059555, |
|
"grad_norm": 0.8039419651031494, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.2259, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.18850336613153806, |
|
"grad_norm": 0.6810411214828491, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.1636, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.18953909891248058, |
|
"grad_norm": 0.595245361328125, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.1396, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.1905748316934231, |
|
"grad_norm": 0.9223597645759583, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.287, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1916105644743656, |
|
"grad_norm": 0.7162910103797913, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.1353, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.19264629725530813, |
|
"grad_norm": 0.9694966077804565, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 0.2145, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.19368203003625065, |
|
"grad_norm": 0.644223153591156, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 0.1416, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.19471776281719316, |
|
"grad_norm": 1.156400442123413, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.265, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.19575349559813568, |
|
"grad_norm": 0.7973266839981079, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.2066, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.1967892283790782, |
|
"grad_norm": 0.8829982876777649, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.2602, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.1978249611600207, |
|
"grad_norm": 0.8796784281730652, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.3425, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.19886069394096323, |
|
"grad_norm": 0.6855463981628418, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.127, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.19989642672190575, |
|
"grad_norm": 0.5383428335189819, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.1866, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.20093215950284826, |
|
"grad_norm": 0.45419228076934814, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.0759, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.20196789228379078, |
|
"grad_norm": 0.7053388357162476, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.1603, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2030036250647333, |
|
"grad_norm": 0.5925009846687317, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.1047, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.2040393578456758, |
|
"grad_norm": 1.1599555015563965, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.4283, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.20507509062661833, |
|
"grad_norm": 1.152112603187561, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.3806, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.20611082340756084, |
|
"grad_norm": 1.0169992446899414, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 0.1831, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.20714655618850336, |
|
"grad_norm": 1.3161717653274536, |
|
"learning_rate": 0.0, |
|
"loss": 0.6848, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.20714655618850336, |
|
"eval_loss": 0.2649017870426178, |
|
"eval_runtime": 133.2348, |
|
"eval_samples_per_second": 12.204, |
|
"eval_steps_per_second": 6.102, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.660993159135232e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|