|
{ |
|
"best_metric": 9.01811695098877, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 2.6084142394822005, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.025889967637540454, |
|
"grad_norm": 11.74299144744873, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 9.4965, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.025889967637540454, |
|
"eval_loss": 9.478606224060059, |
|
"eval_runtime": 0.4121, |
|
"eval_samples_per_second": 157.735, |
|
"eval_steps_per_second": 41.254, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.05177993527508091, |
|
"grad_norm": 11.671175003051758, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 9.5064, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.07766990291262135, |
|
"grad_norm": 12.935548782348633, |
|
"learning_rate": 1.5e-06, |
|
"loss": 9.3157, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.10355987055016182, |
|
"grad_norm": 13.709819793701172, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 9.7602, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.12944983818770225, |
|
"grad_norm": 13.389347076416016, |
|
"learning_rate": 2.5e-06, |
|
"loss": 9.5181, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.1553398058252427, |
|
"grad_norm": 12.952323913574219, |
|
"learning_rate": 3e-06, |
|
"loss": 9.5492, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.18122977346278318, |
|
"grad_norm": 12.68387508392334, |
|
"learning_rate": 3.5e-06, |
|
"loss": 9.5063, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.20711974110032363, |
|
"grad_norm": 12.480040550231934, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 9.5266, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.23300970873786409, |
|
"grad_norm": 12.971917152404785, |
|
"learning_rate": 4.5e-06, |
|
"loss": 9.4577, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.2588996763754045, |
|
"grad_norm": 11.367093086242676, |
|
"learning_rate": 5e-06, |
|
"loss": 9.6127, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.284789644012945, |
|
"grad_norm": 11.295645713806152, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 9.3534, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.3106796116504854, |
|
"grad_norm": 12.023831367492676, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 9.055, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.3365695792880259, |
|
"grad_norm": 12.505583763122559, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 9.3667, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.36245954692556637, |
|
"grad_norm": 12.947257041931152, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 9.396, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.3883495145631068, |
|
"grad_norm": 12.820213317871094, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 9.3142, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.41423948220064727, |
|
"grad_norm": 11.85090160369873, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 9.3381, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.4401294498381877, |
|
"grad_norm": 14.81877613067627, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 9.519, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.46601941747572817, |
|
"grad_norm": 14.836597442626953, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 9.4054, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.4919093851132686, |
|
"grad_norm": 12.943735122680664, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 9.4375, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.517799352750809, |
|
"grad_norm": 12.675292015075684, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 9.3666, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.5436893203883495, |
|
"grad_norm": 11.758707046508789, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 9.3973, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.56957928802589, |
|
"grad_norm": 12.52666187286377, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 9.3869, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.5954692556634305, |
|
"grad_norm": 13.23238468170166, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 9.423, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.6213592233009708, |
|
"grad_norm": 11.95095443725586, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 9.3316, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.6472491909385113, |
|
"grad_norm": 12.786251068115234, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 9.2699, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.6731391585760518, |
|
"grad_norm": 11.97091293334961, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 9.2894, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.6990291262135923, |
|
"grad_norm": 13.269583702087402, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 9.3116, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.7249190938511327, |
|
"grad_norm": 11.793617248535156, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 9.3654, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.7508090614886731, |
|
"grad_norm": 11.819991111755371, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 9.2624, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.7766990291262136, |
|
"grad_norm": 13.980379104614258, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 9.2752, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.8025889967637541, |
|
"grad_norm": 15.096542358398438, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 9.3893, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.8284789644012945, |
|
"grad_norm": 12.700614929199219, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 9.2669, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.8543689320388349, |
|
"grad_norm": 13.240720748901367, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 9.1943, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.8802588996763754, |
|
"grad_norm": 12.622847557067871, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 9.2701, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.9061488673139159, |
|
"grad_norm": 14.08974552154541, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 9.4026, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.9320388349514563, |
|
"grad_norm": 12.45661449432373, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 9.2997, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.9579288025889967, |
|
"grad_norm": 12.231471061706543, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 9.1133, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.9838187702265372, |
|
"grad_norm": 13.44810962677002, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 9.3883, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 1.0194174757281553, |
|
"grad_norm": 20.87478256225586, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 15.709, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 1.0453074433656957, |
|
"grad_norm": 11.6742525100708, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 9.1093, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 1.0711974110032363, |
|
"grad_norm": 11.33270263671875, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 9.1661, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 1.0970873786407767, |
|
"grad_norm": 14.351101875305176, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 9.304, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 1.1229773462783172, |
|
"grad_norm": 14.262197494506836, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 9.3505, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 1.1488673139158576, |
|
"grad_norm": 12.826080322265625, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 9.0723, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 1.174757281553398, |
|
"grad_norm": 13.243085861206055, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 9.1447, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 1.2006472491909386, |
|
"grad_norm": 12.890806198120117, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 9.1691, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 1.226537216828479, |
|
"grad_norm": 14.562002182006836, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 9.5301, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 1.2524271844660193, |
|
"grad_norm": 11.241775512695312, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 9.257, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 1.27831715210356, |
|
"grad_norm": 11.758941650390625, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 9.2089, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 1.3042071197411003, |
|
"grad_norm": 13.430214881896973, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 9.0806, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.3042071197411003, |
|
"eval_loss": 9.157350540161133, |
|
"eval_runtime": 0.4117, |
|
"eval_samples_per_second": 157.863, |
|
"eval_steps_per_second": 41.287, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 1.3300970873786409, |
|
"grad_norm": 12.582650184631348, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 9.1575, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 1.3559870550161812, |
|
"grad_norm": 13.364744186401367, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 9.4123, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 1.3818770226537218, |
|
"grad_norm": 11.977982521057129, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 9.1648, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 1.4077669902912622, |
|
"grad_norm": 13.052461624145508, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 9.0674, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 1.4336569579288025, |
|
"grad_norm": 13.613630294799805, |
|
"learning_rate": 2.5e-06, |
|
"loss": 9.1512, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 1.4595469255663431, |
|
"grad_norm": 13.225386619567871, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 9.1321, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 1.4854368932038835, |
|
"grad_norm": 12.972779273986816, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 8.9664, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 1.5113268608414239, |
|
"grad_norm": 13.470908164978027, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 9.1312, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 1.5372168284789645, |
|
"grad_norm": 12.796125411987305, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 8.9491, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 1.5631067961165048, |
|
"grad_norm": 11.87038516998291, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 8.9343, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 1.5889967637540452, |
|
"grad_norm": 14.072746276855469, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 8.988, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 1.6148867313915858, |
|
"grad_norm": 13.03939151763916, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 8.9825, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 1.6407766990291264, |
|
"grad_norm": 12.500960350036621, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 8.9965, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.6666666666666665, |
|
"grad_norm": 13.652578353881836, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 9.1256, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.692556634304207, |
|
"grad_norm": 13.726714134216309, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 8.875, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.7184466019417477, |
|
"grad_norm": 11.982444763183594, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 9.0407, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.744336569579288, |
|
"grad_norm": 11.878983497619629, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 8.9615, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.7702265372168284, |
|
"grad_norm": 12.483765602111816, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 8.9653, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.796116504854369, |
|
"grad_norm": 12.622501373291016, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 8.8598, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.8220064724919094, |
|
"grad_norm": 13.878958702087402, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 9.0102, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.8478964401294498, |
|
"grad_norm": 12.5238676071167, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 9.0324, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.8737864077669903, |
|
"grad_norm": 13.529470443725586, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 8.9774, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.8996763754045307, |
|
"grad_norm": 12.65139389038086, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 8.9267, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.925566343042071, |
|
"grad_norm": 14.668947219848633, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 9.1233, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.9514563106796117, |
|
"grad_norm": 12.97563362121582, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 9.0197, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.9773462783171523, |
|
"grad_norm": 13.905110359191895, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 9.0242, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 2.01294498381877, |
|
"grad_norm": 23.526731491088867, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 15.7391, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 2.0388349514563107, |
|
"grad_norm": 12.419938087463379, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 8.9641, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 2.0647249190938513, |
|
"grad_norm": 12.351256370544434, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 8.9708, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 2.0906148867313914, |
|
"grad_norm": 13.134671211242676, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 8.6328, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 2.116504854368932, |
|
"grad_norm": 13.836918830871582, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 9.0935, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 2.1423948220064726, |
|
"grad_norm": 14.656155586242676, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 9.0881, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 2.168284789644013, |
|
"grad_norm": 13.374807357788086, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 8.9988, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 2.1941747572815533, |
|
"grad_norm": 12.77652645111084, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 9.1808, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 2.220064724919094, |
|
"grad_norm": 14.176050186157227, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 8.8116, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 2.2459546925566345, |
|
"grad_norm": 12.411694526672363, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 9.0052, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 2.2718446601941746, |
|
"grad_norm": 11.73877239227295, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 8.9109, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 2.2977346278317152, |
|
"grad_norm": 12.658856391906738, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 8.909, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 2.323624595469256, |
|
"grad_norm": 12.234189987182617, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 8.8621, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 2.349514563106796, |
|
"grad_norm": 13.402729988098145, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 9.1459, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 2.3754045307443366, |
|
"grad_norm": 12.723470687866211, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 9.0186, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 2.401294498381877, |
|
"grad_norm": 13.641341209411621, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 9.2626, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 2.4271844660194173, |
|
"grad_norm": 12.847984313964844, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 8.6923, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 2.453074433656958, |
|
"grad_norm": 13.19314956665039, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 9.08, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 2.4789644012944985, |
|
"grad_norm": 13.388517379760742, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 9.3973, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 2.5048543689320386, |
|
"grad_norm": 12.82660961151123, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 9.0044, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 2.530744336569579, |
|
"grad_norm": 13.584592819213867, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 9.0041, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 2.55663430420712, |
|
"grad_norm": 12.776042938232422, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 9.1079, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 2.58252427184466, |
|
"grad_norm": 12.881516456604004, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 9.0031, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 2.6084142394822005, |
|
"grad_norm": 13.592841148376465, |
|
"learning_rate": 0.0, |
|
"loss": 9.0054, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 2.6084142394822005, |
|
"eval_loss": 9.01811695098877, |
|
"eval_runtime": 0.4122, |
|
"eval_samples_per_second": 157.701, |
|
"eval_steps_per_second": 41.245, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 54299040153600.0, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|