|
{ |
|
"best_metric": 3.6099913120269775, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 1.362098138747885, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01353637901861252, |
|
"grad_norm": 0.9948562979698181, |
|
"learning_rate": 5.000000000000001e-07, |
|
"loss": 3.8518, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01353637901861252, |
|
"eval_loss": 3.73663592338562, |
|
"eval_runtime": 3.0532, |
|
"eval_samples_per_second": 40.941, |
|
"eval_steps_per_second": 10.481, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.02707275803722504, |
|
"grad_norm": 0.8523895144462585, |
|
"learning_rate": 1.0000000000000002e-06, |
|
"loss": 3.6192, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.04060913705583756, |
|
"grad_norm": 0.87421715259552, |
|
"learning_rate": 1.5e-06, |
|
"loss": 3.4214, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.05414551607445008, |
|
"grad_norm": 0.8437895774841309, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 3.4929, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0676818950930626, |
|
"grad_norm": 0.9417490363121033, |
|
"learning_rate": 2.5e-06, |
|
"loss": 3.8509, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.08121827411167512, |
|
"grad_norm": 1.0578491687774658, |
|
"learning_rate": 3e-06, |
|
"loss": 3.611, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.09475465313028765, |
|
"grad_norm": 0.9789345860481262, |
|
"learning_rate": 3.5e-06, |
|
"loss": 3.6955, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.10829103214890017, |
|
"grad_norm": 1.0157725811004639, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 3.4882, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1218274111675127, |
|
"grad_norm": 0.925398051738739, |
|
"learning_rate": 4.5e-06, |
|
"loss": 3.5624, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1353637901861252, |
|
"grad_norm": 1.0446271896362305, |
|
"learning_rate": 5e-06, |
|
"loss": 3.4926, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14890016920473773, |
|
"grad_norm": 1.1525206565856934, |
|
"learning_rate": 4.99847706754774e-06, |
|
"loss": 3.8015, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.16243654822335024, |
|
"grad_norm": 1.1337149143218994, |
|
"learning_rate": 4.993910125649561e-06, |
|
"loss": 3.8748, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.17597292724196278, |
|
"grad_norm": 1.2873902320861816, |
|
"learning_rate": 4.986304738420684e-06, |
|
"loss": 3.6576, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.1895093062605753, |
|
"grad_norm": 1.2369529008865356, |
|
"learning_rate": 4.975670171853926e-06, |
|
"loss": 3.7863, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.20304568527918782, |
|
"grad_norm": 1.4233301877975464, |
|
"learning_rate": 4.962019382530521e-06, |
|
"loss": 4.0813, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.21658206429780033, |
|
"grad_norm": 1.3197875022888184, |
|
"learning_rate": 4.9453690018345144e-06, |
|
"loss": 3.6418, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.23011844331641285, |
|
"grad_norm": 1.519278645515442, |
|
"learning_rate": 4.925739315689991e-06, |
|
"loss": 3.6749, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2436548223350254, |
|
"grad_norm": 1.6284841299057007, |
|
"learning_rate": 4.903154239845798e-06, |
|
"loss": 3.8066, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2571912013536379, |
|
"grad_norm": 0.8777332901954651, |
|
"learning_rate": 4.8776412907378845e-06, |
|
"loss": 3.7021, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.2707275803722504, |
|
"grad_norm": 1.0336490869522095, |
|
"learning_rate": 4.849231551964771e-06, |
|
"loss": 3.8224, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.28426395939086296, |
|
"grad_norm": 0.8481849431991577, |
|
"learning_rate": 4.817959636416969e-06, |
|
"loss": 3.4932, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.29780033840947545, |
|
"grad_norm": 0.8440786004066467, |
|
"learning_rate": 4.783863644106502e-06, |
|
"loss": 3.5257, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.311336717428088, |
|
"grad_norm": 0.969007134437561, |
|
"learning_rate": 4.746985115747918e-06, |
|
"loss": 3.4107, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3248730964467005, |
|
"grad_norm": 1.0689656734466553, |
|
"learning_rate": 4.707368982147318e-06, |
|
"loss": 3.5495, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.338409475465313, |
|
"grad_norm": 1.026020884513855, |
|
"learning_rate": 4.665063509461098e-06, |
|
"loss": 3.6805, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.35194585448392557, |
|
"grad_norm": 1.0092445611953735, |
|
"learning_rate": 4.620120240391065e-06, |
|
"loss": 3.6131, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.36548223350253806, |
|
"grad_norm": 1.1854077577590942, |
|
"learning_rate": 4.572593931387604e-06, |
|
"loss": 3.562, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.3790186125211506, |
|
"grad_norm": 1.1046980619430542, |
|
"learning_rate": 4.522542485937369e-06, |
|
"loss": 3.4708, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.3925549915397631, |
|
"grad_norm": 1.0798814296722412, |
|
"learning_rate": 4.470026884016805e-06, |
|
"loss": 3.5845, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.40609137055837563, |
|
"grad_norm": 1.3098347187042236, |
|
"learning_rate": 4.415111107797445e-06, |
|
"loss": 3.7222, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.4196277495769882, |
|
"grad_norm": 1.1865979433059692, |
|
"learning_rate": 4.357862063693486e-06, |
|
"loss": 3.8202, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.43316412859560066, |
|
"grad_norm": 1.3540643453598022, |
|
"learning_rate": 4.2983495008466285e-06, |
|
"loss": 3.8541, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.4467005076142132, |
|
"grad_norm": 1.3513485193252563, |
|
"learning_rate": 4.236645926147493e-06, |
|
"loss": 3.7572, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.4602368866328257, |
|
"grad_norm": 1.4294140338897705, |
|
"learning_rate": 4.172826515897146e-06, |
|
"loss": 3.8498, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.47377326565143824, |
|
"grad_norm": 1.4952620267868042, |
|
"learning_rate": 4.106969024216348e-06, |
|
"loss": 3.8439, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.4873096446700508, |
|
"grad_norm": 1.7355625629425049, |
|
"learning_rate": 4.039153688314146e-06, |
|
"loss": 3.8003, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5008460236886633, |
|
"grad_norm": 0.8707943558692932, |
|
"learning_rate": 3.969463130731183e-06, |
|
"loss": 3.6308, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.5143824027072758, |
|
"grad_norm": 1.0384621620178223, |
|
"learning_rate": 3.897982258676867e-06, |
|
"loss": 3.5805, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.5279187817258884, |
|
"grad_norm": 0.9388870000839233, |
|
"learning_rate": 3.824798160583012e-06, |
|
"loss": 3.7473, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.5414551607445008, |
|
"grad_norm": 1.0139713287353516, |
|
"learning_rate": 3.7500000000000005e-06, |
|
"loss": 3.6321, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.5549915397631133, |
|
"grad_norm": 1.038153052330017, |
|
"learning_rate": 3.6736789069647273e-06, |
|
"loss": 3.57, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.5685279187817259, |
|
"grad_norm": 1.0182040929794312, |
|
"learning_rate": 3.595927866972694e-06, |
|
"loss": 3.4588, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.5820642978003384, |
|
"grad_norm": 1.0943628549575806, |
|
"learning_rate": 3.516841607689501e-06, |
|
"loss": 3.6839, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.5956006768189509, |
|
"grad_norm": 0.9982971549034119, |
|
"learning_rate": 3.436516483539781e-06, |
|
"loss": 3.4144, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.6091370558375635, |
|
"grad_norm": 1.1169984340667725, |
|
"learning_rate": 3.3550503583141726e-06, |
|
"loss": 3.745, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.622673434856176, |
|
"grad_norm": 1.181325912475586, |
|
"learning_rate": 3.272542485937369e-06, |
|
"loss": 3.6186, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.6362098138747885, |
|
"grad_norm": 1.1740070581436157, |
|
"learning_rate": 3.189093389542498e-06, |
|
"loss": 3.4177, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.649746192893401, |
|
"grad_norm": 1.236719012260437, |
|
"learning_rate": 3.1048047389991693e-06, |
|
"loss": 3.4231, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.6632825719120136, |
|
"grad_norm": 1.3131681680679321, |
|
"learning_rate": 3.019779227044398e-06, |
|
"loss": 3.5528, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.676818950930626, |
|
"grad_norm": 1.3397260904312134, |
|
"learning_rate": 2.9341204441673267e-06, |
|
"loss": 3.7156, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.676818950930626, |
|
"eval_loss": 3.658907413482666, |
|
"eval_runtime": 2.6656, |
|
"eval_samples_per_second": 46.894, |
|
"eval_steps_per_second": 12.005, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.6903553299492385, |
|
"grad_norm": 1.3157494068145752, |
|
"learning_rate": 2.847932752400164e-06, |
|
"loss": 3.6715, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.7038917089678511, |
|
"grad_norm": 1.6480339765548706, |
|
"learning_rate": 2.761321158169134e-06, |
|
"loss": 4.1417, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.7174280879864636, |
|
"grad_norm": 1.518749713897705, |
|
"learning_rate": 2.6743911843603134e-06, |
|
"loss": 3.6444, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.7309644670050761, |
|
"grad_norm": 1.8039215803146362, |
|
"learning_rate": 2.587248741756253e-06, |
|
"loss": 3.9769, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.7445008460236887, |
|
"grad_norm": 1.06398606300354, |
|
"learning_rate": 2.5e-06, |
|
"loss": 3.6596, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.7580372250423012, |
|
"grad_norm": 1.0259342193603516, |
|
"learning_rate": 2.4127512582437486e-06, |
|
"loss": 3.717, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.7715736040609137, |
|
"grad_norm": 0.9348158836364746, |
|
"learning_rate": 2.325608815639687e-06, |
|
"loss": 3.3873, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.7851099830795262, |
|
"grad_norm": 1.033394455909729, |
|
"learning_rate": 2.238678841830867e-06, |
|
"loss": 3.5139, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.7986463620981388, |
|
"grad_norm": 1.0691534280776978, |
|
"learning_rate": 2.1520672475998374e-06, |
|
"loss": 3.4741, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.8121827411167513, |
|
"grad_norm": 1.0994362831115723, |
|
"learning_rate": 2.0658795558326745e-06, |
|
"loss": 3.5142, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.8257191201353637, |
|
"grad_norm": 1.0791237354278564, |
|
"learning_rate": 1.9802207729556023e-06, |
|
"loss": 3.3452, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.8392554991539763, |
|
"grad_norm": 1.16475248336792, |
|
"learning_rate": 1.895195261000831e-06, |
|
"loss": 3.7296, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.8527918781725888, |
|
"grad_norm": 1.1288127899169922, |
|
"learning_rate": 1.8109066104575023e-06, |
|
"loss": 3.3933, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.8663282571912013, |
|
"grad_norm": 1.2290265560150146, |
|
"learning_rate": 1.7274575140626318e-06, |
|
"loss": 3.5753, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.8798646362098139, |
|
"grad_norm": 1.143328070640564, |
|
"learning_rate": 1.6449496416858285e-06, |
|
"loss": 3.6006, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.8934010152284264, |
|
"grad_norm": 1.249060034751892, |
|
"learning_rate": 1.56348351646022e-06, |
|
"loss": 3.4214, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.9069373942470389, |
|
"grad_norm": 1.3204628229141235, |
|
"learning_rate": 1.4831583923105e-06, |
|
"loss": 3.3819, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.9204737732656514, |
|
"grad_norm": 1.3643982410430908, |
|
"learning_rate": 1.4040721330273063e-06, |
|
"loss": 3.8538, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.934010152284264, |
|
"grad_norm": 1.2954354286193848, |
|
"learning_rate": 1.3263210930352737e-06, |
|
"loss": 3.8072, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.9475465313028765, |
|
"grad_norm": 1.635848045349121, |
|
"learning_rate": 1.2500000000000007e-06, |
|
"loss": 3.6974, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.961082910321489, |
|
"grad_norm": 1.5739266872406006, |
|
"learning_rate": 1.1752018394169882e-06, |
|
"loss": 3.8517, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.9746192893401016, |
|
"grad_norm": 1.9741549491882324, |
|
"learning_rate": 1.1020177413231334e-06, |
|
"loss": 4.3486, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.988155668358714, |
|
"grad_norm": 1.0924246311187744, |
|
"learning_rate": 1.0305368692688175e-06, |
|
"loss": 3.5943, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.0101522842639594, |
|
"grad_norm": 1.7769662141799927, |
|
"learning_rate": 9.608463116858544e-07, |
|
"loss": 5.9384, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.023688663282572, |
|
"grad_norm": 1.0755778551101685, |
|
"learning_rate": 8.930309757836517e-07, |
|
"loss": 3.7781, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.0372250423011844, |
|
"grad_norm": 0.9576964974403381, |
|
"learning_rate": 8.271734841028553e-07, |
|
"loss": 3.274, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.0507614213197969, |
|
"grad_norm": 1.0078204870224, |
|
"learning_rate": 7.633540738525066e-07, |
|
"loss": 3.5456, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.0642978003384094, |
|
"grad_norm": 0.9900814890861511, |
|
"learning_rate": 7.016504991533727e-07, |
|
"loss": 3.7089, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.077834179357022, |
|
"grad_norm": 1.0500483512878418, |
|
"learning_rate": 6.421379363065142e-07, |
|
"loss": 3.5293, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.0913705583756346, |
|
"grad_norm": 1.1538033485412598, |
|
"learning_rate": 5.848888922025553e-07, |
|
"loss": 3.4348, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.104906937394247, |
|
"grad_norm": 1.0635679960250854, |
|
"learning_rate": 5.299731159831953e-07, |
|
"loss": 3.4902, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.1184433164128595, |
|
"grad_norm": 1.1531682014465332, |
|
"learning_rate": 4.774575140626317e-07, |
|
"loss": 3.5306, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.131979695431472, |
|
"grad_norm": 1.1337559223175049, |
|
"learning_rate": 4.27406068612396e-07, |
|
"loss": 3.6038, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.1455160744500845, |
|
"grad_norm": 1.1763681173324585, |
|
"learning_rate": 3.798797596089351e-07, |
|
"loss": 3.4732, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.1590524534686972, |
|
"grad_norm": 1.3725019693374634, |
|
"learning_rate": 3.3493649053890325e-07, |
|
"loss": 3.5141, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.1725888324873097, |
|
"grad_norm": 1.287148356437683, |
|
"learning_rate": 2.9263101785268253e-07, |
|
"loss": 3.4666, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.1861252115059222, |
|
"grad_norm": 1.2582811117172241, |
|
"learning_rate": 2.53014884252083e-07, |
|
"loss": 3.5794, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.1996615905245347, |
|
"grad_norm": 1.3983982801437378, |
|
"learning_rate": 2.1613635589349756e-07, |
|
"loss": 3.6621, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.2131979695431472, |
|
"grad_norm": 1.4939510822296143, |
|
"learning_rate": 1.8204036358303173e-07, |
|
"loss": 3.6124, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.2267343485617597, |
|
"grad_norm": 1.648148536682129, |
|
"learning_rate": 1.507684480352292e-07, |
|
"loss": 3.7459, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.2402707275803722, |
|
"grad_norm": 1.8571910858154297, |
|
"learning_rate": 1.223587092621162e-07, |
|
"loss": 4.112, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.2538071065989849, |
|
"grad_norm": 1.3094947338104248, |
|
"learning_rate": 9.684576015420277e-08, |
|
"loss": 3.7015, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.2673434856175974, |
|
"grad_norm": 1.080057978630066, |
|
"learning_rate": 7.426068431000883e-08, |
|
"loss": 3.5508, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.2808798646362098, |
|
"grad_norm": 1.052301049232483, |
|
"learning_rate": 5.463099816548578e-08, |
|
"loss": 3.5461, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.2944162436548223, |
|
"grad_norm": 1.0805667638778687, |
|
"learning_rate": 3.798061746947995e-08, |
|
"loss": 3.672, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.3079526226734348, |
|
"grad_norm": 1.0864343643188477, |
|
"learning_rate": 2.4329828146074096e-08, |
|
"loss": 3.4411, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.3214890016920473, |
|
"grad_norm": 1.0621790885925293, |
|
"learning_rate": 1.3695261579316776e-08, |
|
"loss": 3.3916, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.3350253807106598, |
|
"grad_norm": 1.1159321069717407, |
|
"learning_rate": 6.089874350439507e-09, |
|
"loss": 3.4388, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.3485617597292725, |
|
"grad_norm": 1.1290203332901, |
|
"learning_rate": 1.5229324522605949e-09, |
|
"loss": 3.4397, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.362098138747885, |
|
"grad_norm": 1.1767339706420898, |
|
"learning_rate": 0.0, |
|
"loss": 3.4482, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.362098138747885, |
|
"eval_loss": 3.6099913120269775, |
|
"eval_runtime": 2.6641, |
|
"eval_samples_per_second": 46.92, |
|
"eval_steps_per_second": 12.011, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.066457809859379e+16, |
|
"train_batch_size": 4, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|