|
{ |
|
"best_metric": 0.22498248517513275, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.517799352750809, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0025889967637540453, |
|
"grad_norm": 8.612391471862793, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.8772, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0025889967637540453, |
|
"eval_loss": 1.1311349868774414, |
|
"eval_runtime": 63.1174, |
|
"eval_samples_per_second": 41.241, |
|
"eval_steps_per_second": 5.165, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005177993527508091, |
|
"grad_norm": 10.561990737915039, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 2.3642, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.007766990291262136, |
|
"grad_norm": 12.674636840820312, |
|
"learning_rate": 1e-05, |
|
"loss": 2.7038, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.010355987055016181, |
|
"grad_norm": 11.861933708190918, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 2.9593, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.012944983818770227, |
|
"grad_norm": 10.037701606750488, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 2.8463, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.015533980582524271, |
|
"grad_norm": 8.951319694519043, |
|
"learning_rate": 2e-05, |
|
"loss": 2.6833, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.018122977346278317, |
|
"grad_norm": 11.811137199401855, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 2.8029, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.020711974110032363, |
|
"grad_norm": 11.602544784545898, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 2.5999, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02330097087378641, |
|
"grad_norm": 10.852923393249512, |
|
"learning_rate": 3e-05, |
|
"loss": 2.3762, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.025889967637540454, |
|
"grad_norm": 16.908262252807617, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 2.2558, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0284789644012945, |
|
"grad_norm": 19.876672744750977, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.9599, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.031067961165048542, |
|
"grad_norm": 36.011985778808594, |
|
"learning_rate": 4e-05, |
|
"loss": 1.9861, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03365695792880259, |
|
"grad_norm": 37.01054000854492, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.9578, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.036245954692556634, |
|
"grad_norm": 19.07244300842285, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.7452, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.038834951456310676, |
|
"grad_norm": 11.258912086486816, |
|
"learning_rate": 5e-05, |
|
"loss": 1.473, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.041423948220064725, |
|
"grad_norm": 9.571911811828613, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.4802, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04401294498381877, |
|
"grad_norm": 5.4818878173828125, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.449, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04660194174757282, |
|
"grad_norm": 3.5284886360168457, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3482, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04919093851132686, |
|
"grad_norm": 4.385798931121826, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.3401, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05177993527508091, |
|
"grad_norm": 4.73576545715332, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.3967, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05436893203883495, |
|
"grad_norm": 7.08858585357666, |
|
"learning_rate": 7e-05, |
|
"loss": 1.4665, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.056957928802589, |
|
"grad_norm": 5.8825225830078125, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.4479, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05954692556634304, |
|
"grad_norm": 6.359987735748291, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.4314, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.062135922330097085, |
|
"grad_norm": 26.79585075378418, |
|
"learning_rate": 8e-05, |
|
"loss": 1.499, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06472491909385113, |
|
"grad_norm": 9.032891273498535, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.5914, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06731391585760518, |
|
"grad_norm": 10.370737075805664, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.5544, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06990291262135923, |
|
"grad_norm": 7.443507671356201, |
|
"learning_rate": 9e-05, |
|
"loss": 1.3836, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.07249190938511327, |
|
"grad_norm": 4.631689548492432, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.2513, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07508090614886731, |
|
"grad_norm": 3.5808937549591064, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.265, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07766990291262135, |
|
"grad_norm": 3.6336581707000732, |
|
"learning_rate": 0.0001, |
|
"loss": 1.2874, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.08025889967637541, |
|
"grad_norm": 3.7123985290527344, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 1.2587, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.08284789644012945, |
|
"grad_norm": 3.7729804515838623, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.3292, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0854368932038835, |
|
"grad_norm": 6.0967206954956055, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 1.3803, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08802588996763754, |
|
"grad_norm": 4.336344242095947, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 1.328, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.09061488673139159, |
|
"grad_norm": 4.434997081756592, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 1.2927, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.09320388349514563, |
|
"grad_norm": 7.466955661773682, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 1.5473, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.09579288025889968, |
|
"grad_norm": 6.0542497634887695, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 1.4992, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09838187702265372, |
|
"grad_norm": 6.534477233886719, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 1.3268, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.10097087378640776, |
|
"grad_norm": 6.788901329040527, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 1.2662, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.10355987055016182, |
|
"grad_norm": 4.574948310852051, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 1.2097, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.10614886731391586, |
|
"grad_norm": 2.7755305767059326, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 1.1472, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1087378640776699, |
|
"grad_norm": 3.1703152656555176, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.2213, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.11132686084142394, |
|
"grad_norm": 2.9166173934936523, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 1.1592, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.113915857605178, |
|
"grad_norm": 3.054363965988159, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 1.1857, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.11650485436893204, |
|
"grad_norm": 3.5410144329071045, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 1.2165, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11909385113268608, |
|
"grad_norm": 4.187359809875488, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 1.2585, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.12168284789644013, |
|
"grad_norm": 4.440476894378662, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 1.258, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.12427184466019417, |
|
"grad_norm": 7.523629188537598, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 1.2681, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1268608414239482, |
|
"grad_norm": 19.543785095214844, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 1.3755, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.12944983818770225, |
|
"grad_norm": 14.362659454345703, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 1.4683, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12944983818770225, |
|
"eval_loss": 0.3648124039173126, |
|
"eval_runtime": 64.5567, |
|
"eval_samples_per_second": 40.321, |
|
"eval_steps_per_second": 5.05, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.13203883495145632, |
|
"grad_norm": 12.063788414001465, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 1.4899, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.13462783171521037, |
|
"grad_norm": 8.200540542602539, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 1.3093, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.1372168284789644, |
|
"grad_norm": 3.508943796157837, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 1.1634, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.13980582524271845, |
|
"grad_norm": 2.361328601837158, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 1.0916, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1423948220064725, |
|
"grad_norm": 2.5727005004882812, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 1.0954, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.14498381877022654, |
|
"grad_norm": 3.032360792160034, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 1.175, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.14757281553398058, |
|
"grad_norm": 2.9916622638702393, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 1.1121, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.15016181229773462, |
|
"grad_norm": 3.048910140991211, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 1.0988, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.15275080906148866, |
|
"grad_norm": 3.363703489303589, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 1.1841, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.1553398058252427, |
|
"grad_norm": 3.3770105838775635, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 1.1814, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.15792880258899678, |
|
"grad_norm": 4.700929641723633, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 1.2283, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.16051779935275082, |
|
"grad_norm": 5.556697845458984, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 1.3271, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.16310679611650486, |
|
"grad_norm": 5.743304252624512, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 1.2262, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1656957928802589, |
|
"grad_norm": 5.529282093048096, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 1.198, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.16828478964401294, |
|
"grad_norm": 3.399848222732544, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 1.1249, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.170873786407767, |
|
"grad_norm": 2.213207244873047, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 1.0662, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.17346278317152103, |
|
"grad_norm": 2.491964101791382, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 1.0862, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.17605177993527507, |
|
"grad_norm": 2.7167015075683594, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 1.1059, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1786407766990291, |
|
"grad_norm": 2.776489734649658, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 1.0858, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.18122977346278318, |
|
"grad_norm": 2.965355396270752, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 1.125, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18381877022653723, |
|
"grad_norm": 2.7822370529174805, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 1.0688, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.18640776699029127, |
|
"grad_norm": 4.001903533935547, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 1.1053, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.1889967637540453, |
|
"grad_norm": 3.649930477142334, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 1.1806, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.19158576051779935, |
|
"grad_norm": 4.8959550857543945, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 1.2705, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.1941747572815534, |
|
"grad_norm": 7.251972198486328, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 1.3655, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.19676375404530744, |
|
"grad_norm": 9.770423889160156, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 1.1064, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.19935275080906148, |
|
"grad_norm": 8.65103530883789, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 1.1839, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.20194174757281552, |
|
"grad_norm": 5.945736885070801, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 1.1019, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2045307443365696, |
|
"grad_norm": 3.460510730743408, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 1.0443, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.20711974110032363, |
|
"grad_norm": 2.2635905742645264, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 1.0537, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.20970873786407768, |
|
"grad_norm": 2.609692335128784, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 1.0779, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.21229773462783172, |
|
"grad_norm": 2.645723819732666, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.0427, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.21488673139158576, |
|
"grad_norm": 3.2835347652435303, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 1.1034, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.2174757281553398, |
|
"grad_norm": 3.677543878555298, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 1.2105, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.22006472491909385, |
|
"grad_norm": 3.5897769927978516, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 1.2181, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2226537216828479, |
|
"grad_norm": 3.5425643920898438, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 1.1825, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.22524271844660193, |
|
"grad_norm": 4.001893997192383, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 1.2106, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.227831715210356, |
|
"grad_norm": 3.9724557399749756, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 1.1139, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.23042071197411004, |
|
"grad_norm": 4.996490955352783, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 1.1814, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.23300970873786409, |
|
"grad_norm": 3.354581117630005, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 1.0731, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.23559870550161813, |
|
"grad_norm": 1.9178783893585205, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.9357, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.23818770226537217, |
|
"grad_norm": 2.0360922813415527, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.9789, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.2407766990291262, |
|
"grad_norm": 2.401505470275879, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 1.0518, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.24336569579288025, |
|
"grad_norm": 2.738330364227295, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 1.0681, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.2459546925566343, |
|
"grad_norm": 3.1010687351226807, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 1.1431, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.24854368932038834, |
|
"grad_norm": 3.6441831588745117, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 1.1438, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2511326860841424, |
|
"grad_norm": 3.7766201496124268, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 1.1669, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2537216828478964, |
|
"grad_norm": 4.321850299835205, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 1.1743, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2563106796116505, |
|
"grad_norm": 5.203691005706787, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 1.2916, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.2588996763754045, |
|
"grad_norm": 6.3613057136535645, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 1.2034, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2588996763754045, |
|
"eval_loss": 0.2764929533004761, |
|
"eval_runtime": 67.6851, |
|
"eval_samples_per_second": 38.457, |
|
"eval_steps_per_second": 4.816, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.2614886731391586, |
|
"grad_norm": 3.3049983978271484, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 0.9484, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.26407766990291265, |
|
"grad_norm": 3.41595196723938, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 1.0463, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 3.7063159942626953, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 1.0549, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.26925566343042073, |
|
"grad_norm": 3.5637729167938232, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 1.0105, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.27184466019417475, |
|
"grad_norm": 3.321582317352295, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 1.017, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.2744336569579288, |
|
"grad_norm": 2.760406732559204, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 1.0604, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.27702265372168283, |
|
"grad_norm": 2.2482504844665527, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.9956, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.2796116504854369, |
|
"grad_norm": 2.5008554458618164, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 1.0292, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2822006472491909, |
|
"grad_norm": 3.1802356243133545, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 1.0959, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.284789644012945, |
|
"grad_norm": 4.109099388122559, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 1.1156, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.287378640776699, |
|
"grad_norm": 5.0009307861328125, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 1.2309, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.28996763754045307, |
|
"grad_norm": 5.912919521331787, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 1.1948, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.29255663430420714, |
|
"grad_norm": 4.550836086273193, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 1.0999, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.29514563106796116, |
|
"grad_norm": 1.984678864479065, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.8638, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2977346278317152, |
|
"grad_norm": 2.326871395111084, |
|
"learning_rate": 5e-05, |
|
"loss": 0.8946, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.30032362459546924, |
|
"grad_norm": 2.4947257041931152, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.8815, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.3029126213592233, |
|
"grad_norm": 2.2519426345825195, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.9208, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.3055016181229773, |
|
"grad_norm": 2.274266242980957, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.9498, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.3080906148867314, |
|
"grad_norm": 2.3184547424316406, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.9624, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.3106796116504854, |
|
"grad_norm": 2.551255941390991, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.9275, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.3132686084142395, |
|
"grad_norm": 2.7774176597595215, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 1.0286, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.31585760517799355, |
|
"grad_norm": 3.2560079097747803, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 1.0915, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.31844660194174756, |
|
"grad_norm": 3.729802370071411, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 1.0848, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.32103559870550163, |
|
"grad_norm": 4.8128886222839355, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 1.2002, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.32362459546925565, |
|
"grad_norm": 6.0503363609313965, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 1.3538, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.3262135922330097, |
|
"grad_norm": 1.4161758422851562, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.7361, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.32880258899676373, |
|
"grad_norm": 1.7377864122390747, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.8361, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.3313915857605178, |
|
"grad_norm": 1.9191443920135498, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.8595, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.3339805825242718, |
|
"grad_norm": 1.9472070932388306, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.8621, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3365695792880259, |
|
"grad_norm": 1.959277629852295, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.8201, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.33915857605177996, |
|
"grad_norm": 2.203312635421753, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.8951, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.341747572815534, |
|
"grad_norm": 2.3594112396240234, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.9094, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.34433656957928804, |
|
"grad_norm": 2.7733709812164307, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 1.0573, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.34692556634304206, |
|
"grad_norm": 2.709257125854492, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.8995, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.34951456310679613, |
|
"grad_norm": 3.445384979248047, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 1.0222, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.35210355987055014, |
|
"grad_norm": 4.074124336242676, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 1.2069, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3546925566343042, |
|
"grad_norm": 5.069247245788574, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 1.2274, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3572815533980582, |
|
"grad_norm": 4.2536211013793945, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 1.0609, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.3598705501618123, |
|
"grad_norm": 1.5287104845046997, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.7689, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.36245954692556637, |
|
"grad_norm": 1.7438405752182007, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.8113, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.3650485436893204, |
|
"grad_norm": 1.8701605796813965, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.8464, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.36763754045307445, |
|
"grad_norm": 1.9416054487228394, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.8544, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.37022653721682847, |
|
"grad_norm": 1.9130077362060547, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.8093, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.37281553398058254, |
|
"grad_norm": 2.212625741958618, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.942, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.37540453074433655, |
|
"grad_norm": 2.4185450077056885, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.9415, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.3779935275080906, |
|
"grad_norm": 2.5846853256225586, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.9444, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.38058252427184464, |
|
"grad_norm": 3.020327091217041, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 1.0272, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.3831715210355987, |
|
"grad_norm": 3.689166784286499, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 1.0999, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.3857605177993528, |
|
"grad_norm": 4.469290256500244, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 1.1435, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.3883495145631068, |
|
"grad_norm": 5.824495315551758, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 1.2122, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.3883495145631068, |
|
"eval_loss": 0.23401835560798645, |
|
"eval_runtime": 65.3101, |
|
"eval_samples_per_second": 39.856, |
|
"eval_steps_per_second": 4.992, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.39093851132686086, |
|
"grad_norm": 1.4624581336975098, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.7601, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.3935275080906149, |
|
"grad_norm": 1.5358047485351562, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.7365, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.39611650485436894, |
|
"grad_norm": 1.8620479106903076, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.8749, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.39870550161812296, |
|
"grad_norm": 1.838742733001709, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.8029, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.40129449838187703, |
|
"grad_norm": 1.8708423376083374, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.8374, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.40388349514563104, |
|
"grad_norm": 1.9949666261672974, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.7693, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.4064724919093851, |
|
"grad_norm": 2.179053783416748, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.8748, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.4090614886731392, |
|
"grad_norm": 2.389301300048828, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.9519, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.4116504854368932, |
|
"grad_norm": 2.7463135719299316, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.9744, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.41423948220064727, |
|
"grad_norm": 2.8853588104248047, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.9669, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.4168284789644013, |
|
"grad_norm": 3.77809739112854, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 1.0887, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.41941747572815535, |
|
"grad_norm": 4.965099811553955, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 1.1214, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.42200647249190937, |
|
"grad_norm": 3.675790548324585, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.8787, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.42459546925566344, |
|
"grad_norm": 1.4695855379104614, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.7685, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.42718446601941745, |
|
"grad_norm": 1.6097335815429688, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.7544, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.4297734627831715, |
|
"grad_norm": 1.7829991579055786, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.8276, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.4323624595469256, |
|
"grad_norm": 1.853384017944336, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.8276, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.4349514563106796, |
|
"grad_norm": 1.9285295009613037, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.7999, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.4375404530744337, |
|
"grad_norm": 1.986692190170288, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.8228, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.4401294498381877, |
|
"grad_norm": 2.2278285026550293, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.8695, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.44271844660194176, |
|
"grad_norm": 2.41101336479187, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.9275, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.4453074433656958, |
|
"grad_norm": 2.734466791152954, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.914, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.44789644012944985, |
|
"grad_norm": 3.5805823802948, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 1.076, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.45048543689320386, |
|
"grad_norm": 4.150850296020508, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 1.0419, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.45307443365695793, |
|
"grad_norm": 5.833034992218018, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 1.2505, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.455663430420712, |
|
"grad_norm": 1.305367112159729, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.686, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.458252427184466, |
|
"grad_norm": 1.51358962059021, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.7401, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.4608414239482201, |
|
"grad_norm": 1.6367487907409668, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.7588, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4634304207119741, |
|
"grad_norm": 1.768928050994873, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.7916, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.46601941747572817, |
|
"grad_norm": 1.8522348403930664, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.7913, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.4686084142394822, |
|
"grad_norm": 1.9595167636871338, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.7881, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.47119741100323626, |
|
"grad_norm": 2.242990493774414, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.9085, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.47378640776699027, |
|
"grad_norm": 2.380833625793457, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.883, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.47637540453074434, |
|
"grad_norm": 2.463675022125244, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.8586, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.47896440129449835, |
|
"grad_norm": 3.089585304260254, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.9745, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.4815533980582524, |
|
"grad_norm": 3.7443113327026367, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 1.0367, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.4841423948220065, |
|
"grad_norm": 4.425037860870361, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 1.0985, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.4867313915857605, |
|
"grad_norm": 3.4299700260162354, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.9096, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4893203883495146, |
|
"grad_norm": 1.3923226594924927, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.6823, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4919093851132686, |
|
"grad_norm": 1.570144772529602, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.7374, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.49449838187702266, |
|
"grad_norm": 1.7391892671585083, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.8171, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.4970873786407767, |
|
"grad_norm": 1.8321576118469238, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.807, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.49967637540453075, |
|
"grad_norm": 1.9245028495788574, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.774, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.5022653721682848, |
|
"grad_norm": 2.206279754638672, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.8755, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.5048543689320388, |
|
"grad_norm": 2.2507548332214355, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.8724, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.5074433656957928, |
|
"grad_norm": 2.3572773933410645, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.9072, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.510032362459547, |
|
"grad_norm": 2.630845785140991, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.8965, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.512621359223301, |
|
"grad_norm": 3.3030409812927246, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.9997, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.515210355987055, |
|
"grad_norm": 4.137526988983154, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 1.0538, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.517799352750809, |
|
"grad_norm": 5.67355489730835, |
|
"learning_rate": 0.0, |
|
"loss": 1.1843, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.517799352750809, |
|
"eval_loss": 0.22498248517513275, |
|
"eval_runtime": 64.2833, |
|
"eval_samples_per_second": 40.493, |
|
"eval_steps_per_second": 5.071, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.1605406885431214e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|