|
{ |
|
"best_metric": 0.922777533531189, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.08743169398907104, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0008743169398907104, |
|
"grad_norm": 13.654485702514648, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 7.2397, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0008743169398907104, |
|
"eval_loss": 2.0816423892974854, |
|
"eval_runtime": 234.4717, |
|
"eval_samples_per_second": 32.857, |
|
"eval_steps_per_second": 4.107, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0017486338797814208, |
|
"grad_norm": 16.08323097229004, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 7.3886, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.002622950819672131, |
|
"grad_norm": 16.72583770751953, |
|
"learning_rate": 1e-05, |
|
"loss": 7.2155, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0034972677595628415, |
|
"grad_norm": 16.019792556762695, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 6.8998, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004371584699453552, |
|
"grad_norm": 10.719884872436523, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 6.1133, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005245901639344262, |
|
"grad_norm": 99.27400207519531, |
|
"learning_rate": 2e-05, |
|
"loss": 10.2652, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.006120218579234973, |
|
"grad_norm": 85.72102355957031, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 9.529, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.006994535519125683, |
|
"grad_norm": 15.085773468017578, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 5.6537, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.007868852459016393, |
|
"grad_norm": 12.465982437133789, |
|
"learning_rate": 3e-05, |
|
"loss": 5.3902, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.008743169398907104, |
|
"grad_norm": 9.44223403930664, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 5.2389, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.009617486338797814, |
|
"grad_norm": 7.920454025268555, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 4.7169, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.010491803278688525, |
|
"grad_norm": 8.882831573486328, |
|
"learning_rate": 4e-05, |
|
"loss": 4.4305, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.011366120218579235, |
|
"grad_norm": 6.801806926727295, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 4.3088, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.012240437158469945, |
|
"grad_norm": 7.269967555999756, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 5.6956, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.013114754098360656, |
|
"grad_norm": 6.725127220153809, |
|
"learning_rate": 5e-05, |
|
"loss": 5.209, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.013989071038251366, |
|
"grad_norm": 6.431244373321533, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 5.1324, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.014863387978142076, |
|
"grad_norm": 5.522418975830078, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 4.8785, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.015737704918032787, |
|
"grad_norm": 13.761688232421875, |
|
"learning_rate": 6e-05, |
|
"loss": 3.9461, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.016612021857923497, |
|
"grad_norm": 23.60154914855957, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.3604, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.017486338797814208, |
|
"grad_norm": 14.374363899230957, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 3.9006, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.018360655737704918, |
|
"grad_norm": 8.315186500549316, |
|
"learning_rate": 7e-05, |
|
"loss": 4.7994, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.01923497267759563, |
|
"grad_norm": 6.13319730758667, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 4.6416, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02010928961748634, |
|
"grad_norm": 5.479979038238525, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 4.4633, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02098360655737705, |
|
"grad_norm": 6.043623924255371, |
|
"learning_rate": 8e-05, |
|
"loss": 4.2832, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02185792349726776, |
|
"grad_norm": 6.044895172119141, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 2.8783, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02273224043715847, |
|
"grad_norm": 6.741650104522705, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 5.4952, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02360655737704918, |
|
"grad_norm": 6.030343532562256, |
|
"learning_rate": 9e-05, |
|
"loss": 5.1832, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02448087431693989, |
|
"grad_norm": 5.912640571594238, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 4.8941, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0253551912568306, |
|
"grad_norm": 4.158953666687012, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 4.8204, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02622950819672131, |
|
"grad_norm": 4.939655303955078, |
|
"learning_rate": 0.0001, |
|
"loss": 4.4874, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.027103825136612022, |
|
"grad_norm": 6.877912521362305, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 3.2959, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.027978142076502732, |
|
"grad_norm": 7.6934590339660645, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.1399, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.028852459016393443, |
|
"grad_norm": 5.374224662780762, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 4.0316, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.029726775956284153, |
|
"grad_norm": 4.416872978210449, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 4.449, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.030601092896174863, |
|
"grad_norm": 4.096227645874023, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 4.4694, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.031475409836065574, |
|
"grad_norm": 4.204520225524902, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 4.1511, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03234972677595629, |
|
"grad_norm": 4.906490802764893, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 3.9031, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.033224043715846995, |
|
"grad_norm": 4.104410171508789, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 3.9166, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03409836065573771, |
|
"grad_norm": 4.067167282104492, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 5.0196, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.034972677595628415, |
|
"grad_norm": 3.7131476402282715, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 4.8069, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03584699453551913, |
|
"grad_norm": 3.895036220550537, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 4.7326, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.036721311475409836, |
|
"grad_norm": 3.369447946548462, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 4.5296, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03759562841530055, |
|
"grad_norm": 3.4959802627563477, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 3.6448, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03846994535519126, |
|
"grad_norm": 3.181074619293213, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 1.2202, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03934426229508197, |
|
"grad_norm": 6.853686332702637, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 3.2771, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04021857923497268, |
|
"grad_norm": 4.203513145446777, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 4.3558, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04109289617486339, |
|
"grad_norm": 3.710810422897339, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 4.2005, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0419672131147541, |
|
"grad_norm": 4.0080156326293945, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 4.1753, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.04284153005464481, |
|
"grad_norm": 4.323927402496338, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 4.033, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.04371584699453552, |
|
"grad_norm": 5.3773722648620605, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 3.0192, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04371584699453552, |
|
"eval_loss": 0.959044873714447, |
|
"eval_runtime": 235.9651, |
|
"eval_samples_per_second": 32.649, |
|
"eval_steps_per_second": 4.081, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04459016393442623, |
|
"grad_norm": 4.682861804962158, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 5.3309, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04546448087431694, |
|
"grad_norm": 3.6267170906066895, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 4.896, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.046338797814207654, |
|
"grad_norm": 2.9397659301757812, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 4.594, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04721311475409836, |
|
"grad_norm": 3.7889232635498047, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 4.6179, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.048087431693989074, |
|
"grad_norm": 3.4238991737365723, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 4.3046, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04896174863387978, |
|
"grad_norm": 6.835710048675537, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 1.9497, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.049836065573770495, |
|
"grad_norm": 6.923791885375977, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 2.179, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.0507103825136612, |
|
"grad_norm": 4.5648932456970215, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 4.3407, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.051584699453551916, |
|
"grad_norm": 3.808917284011841, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 4.3087, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05245901639344262, |
|
"grad_norm": 3.44407320022583, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 4.0944, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05333333333333334, |
|
"grad_norm": 3.4684829711914062, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 3.9452, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.054207650273224044, |
|
"grad_norm": 4.555397033691406, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 3.8704, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05508196721311476, |
|
"grad_norm": 3.6485466957092285, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 3.9606, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.055956284153005464, |
|
"grad_norm": 3.0883123874664307, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 5.0169, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05683060109289618, |
|
"grad_norm": 2.731473684310913, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 4.6796, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.057704918032786885, |
|
"grad_norm": 3.1372900009155273, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 4.5271, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.0585792349726776, |
|
"grad_norm": 2.9355735778808594, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 4.2805, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.059453551912568306, |
|
"grad_norm": 2.6107537746429443, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 3.3194, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06032786885245902, |
|
"grad_norm": 9.008191108703613, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 1.2466, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06120218579234973, |
|
"grad_norm": 3.1812331676483154, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 3.4345, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06207650273224044, |
|
"grad_norm": 3.384528636932373, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 4.3751, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06295081967213115, |
|
"grad_norm": 2.8234190940856934, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 4.2436, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06382513661202185, |
|
"grad_norm": 3.3963890075683594, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 4.0892, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06469945355191258, |
|
"grad_norm": 3.8028182983398438, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 3.8655, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06557377049180328, |
|
"grad_norm": 4.164633274078369, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 3.1239, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.06644808743169399, |
|
"grad_norm": 2.5254907608032227, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 5.017, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0673224043715847, |
|
"grad_norm": 2.449016809463501, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 4.8344, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.06819672131147542, |
|
"grad_norm": 2.4585683345794678, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 4.6238, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06907103825136612, |
|
"grad_norm": 2.7618460655212402, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 4.4138, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06994535519125683, |
|
"grad_norm": 2.7431366443634033, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 4.3351, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07081967213114754, |
|
"grad_norm": 2.915708541870117, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 2.7006, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07169398907103826, |
|
"grad_norm": 6.260775566101074, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 1.3999, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07256830601092897, |
|
"grad_norm": 3.342338800430298, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 3.8281, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.07344262295081967, |
|
"grad_norm": 3.0507147312164307, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 4.256, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07431693989071038, |
|
"grad_norm": 3.881892681121826, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 4.2829, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0751912568306011, |
|
"grad_norm": 3.3180391788482666, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 3.8737, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.0760655737704918, |
|
"grad_norm": 3.3876357078552246, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 3.9718, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.07693989071038251, |
|
"grad_norm": 5.030557155609131, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 3.6383, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.07781420765027322, |
|
"grad_norm": 2.6808958053588867, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 4.9168, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.07868852459016394, |
|
"grad_norm": 2.51533842086792, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 4.7042, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.07956284153005465, |
|
"grad_norm": 2.41265869140625, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 4.3564, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08043715846994536, |
|
"grad_norm": 2.5100467205047607, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 4.3584, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08131147540983606, |
|
"grad_norm": 2.604959487915039, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 4.0976, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08218579234972678, |
|
"grad_norm": 5.754001617431641, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.9558, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08306010928961749, |
|
"grad_norm": 3.077754020690918, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 3.4805, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.0839344262295082, |
|
"grad_norm": 2.9729459285736084, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 4.2485, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.0848087431693989, |
|
"grad_norm": 2.972970962524414, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 4.1844, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.08568306010928962, |
|
"grad_norm": 2.8650026321411133, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.9211, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.08655737704918033, |
|
"grad_norm": 3.496151924133301, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 3.981, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.08743169398907104, |
|
"grad_norm": 4.358485698699951, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 3.0869, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08743169398907104, |
|
"eval_loss": 0.922777533531189, |
|
"eval_runtime": 236.35, |
|
"eval_samples_per_second": 32.596, |
|
"eval_steps_per_second": 4.074, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8.940170116071424e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|