|
{ |
|
"best_metric": 11.898860931396484, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-75", |
|
"epoch": 0.11705814401232191, |
|
"eval_steps": 25, |
|
"global_step": 95, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0012321909896033886, |
|
"grad_norm": 0.060775503516197205, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 11.9363, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0012321909896033886, |
|
"eval_loss": 11.935738563537598, |
|
"eval_runtime": 0.4452, |
|
"eval_samples_per_second": 112.307, |
|
"eval_steps_per_second": 29.2, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.002464381979206777, |
|
"grad_norm": 0.07343389838933945, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 11.9365, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0036965729688101657, |
|
"grad_norm": 0.06411717087030411, |
|
"learning_rate": 0.0001, |
|
"loss": 11.9362, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.004928763958413554, |
|
"grad_norm": 0.05968811735510826, |
|
"learning_rate": 9.997376600647783e-05, |
|
"loss": 11.9373, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0061609549480169425, |
|
"grad_norm": 0.06928801536560059, |
|
"learning_rate": 9.989509461357426e-05, |
|
"loss": 11.9376, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0073931459376203315, |
|
"grad_norm": 0.07378507405519485, |
|
"learning_rate": 9.976407754861426e-05, |
|
"loss": 11.9355, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00862533692722372, |
|
"grad_norm": 0.07377367466688156, |
|
"learning_rate": 9.958086757163489e-05, |
|
"loss": 11.9339, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.009857527916827109, |
|
"grad_norm": 0.08905545622110367, |
|
"learning_rate": 9.934567829727386e-05, |
|
"loss": 11.9327, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.011089718906430497, |
|
"grad_norm": 0.08033275604248047, |
|
"learning_rate": 9.905878394570453e-05, |
|
"loss": 11.9319, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.012321909896033885, |
|
"grad_norm": 0.10465037077665329, |
|
"learning_rate": 9.872051902290737e-05, |
|
"loss": 11.9335, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.013554100885637273, |
|
"grad_norm": 0.15654270350933075, |
|
"learning_rate": 9.833127793065098e-05, |
|
"loss": 11.9313, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.014786291875240663, |
|
"grad_norm": 0.16284310817718506, |
|
"learning_rate": 9.789151450663723e-05, |
|
"loss": 11.9295, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01601848286484405, |
|
"grad_norm": 0.09769000113010406, |
|
"learning_rate": 9.740174149534693e-05, |
|
"loss": 11.9327, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01725067385444744, |
|
"grad_norm": 0.08060012757778168, |
|
"learning_rate": 9.686252995020249e-05, |
|
"loss": 11.9325, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.018482864844050827, |
|
"grad_norm": 0.0939093828201294, |
|
"learning_rate": 9.627450856774539e-05, |
|
"loss": 11.9323, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.019715055833654217, |
|
"grad_norm": 0.09729772806167603, |
|
"learning_rate": 9.563836295460398e-05, |
|
"loss": 11.934, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.020947246823257604, |
|
"grad_norm": 0.10164664685726166, |
|
"learning_rate": 9.495483482810688e-05, |
|
"loss": 11.9317, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.022179437812860994, |
|
"grad_norm": 0.11222667992115021, |
|
"learning_rate": 9.422472115147382e-05, |
|
"loss": 11.9313, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.023411628802464383, |
|
"grad_norm": 0.11252212524414062, |
|
"learning_rate": 9.3448873204592e-05, |
|
"loss": 11.931, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.02464381979206777, |
|
"grad_norm": 0.13546523451805115, |
|
"learning_rate": 9.2628195591462e-05, |
|
"loss": 11.9307, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02587601078167116, |
|
"grad_norm": 0.14520035684108734, |
|
"learning_rate": 9.176364518546989e-05, |
|
"loss": 11.9278, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.027108201771274546, |
|
"grad_norm": 0.1726980060338974, |
|
"learning_rate": 9.08562300137157e-05, |
|
"loss": 11.9236, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.028340392760877936, |
|
"grad_norm": 0.1935577541589737, |
|
"learning_rate": 8.990700808169889e-05, |
|
"loss": 11.9236, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.029572583750481326, |
|
"grad_norm": 0.208387091755867, |
|
"learning_rate": 8.891708613973126e-05, |
|
"loss": 11.9201, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.030804774740084712, |
|
"grad_norm": 0.25750771164894104, |
|
"learning_rate": 8.788761839251559e-05, |
|
"loss": 11.9189, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.030804774740084712, |
|
"eval_loss": 11.921143531799316, |
|
"eval_runtime": 0.4097, |
|
"eval_samples_per_second": 122.041, |
|
"eval_steps_per_second": 31.731, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0320369657296881, |
|
"grad_norm": 0.1562894582748413, |
|
"learning_rate": 8.681980515339464e-05, |
|
"loss": 11.9259, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.03326915671929149, |
|
"grad_norm": 0.16972583532333374, |
|
"learning_rate": 8.571489144483944e-05, |
|
"loss": 11.9242, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.03450134770889488, |
|
"grad_norm": 0.1763102412223816, |
|
"learning_rate": 8.457416554680877e-05, |
|
"loss": 11.9227, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03573353869849827, |
|
"grad_norm": 0.18980900943279266, |
|
"learning_rate": 8.339895749467238e-05, |
|
"loss": 11.923, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.036965729688101655, |
|
"grad_norm": 0.18117795884609222, |
|
"learning_rate": 8.219063752844926e-05, |
|
"loss": 11.9203, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03819792067770504, |
|
"grad_norm": 0.15770718455314636, |
|
"learning_rate": 8.095061449516903e-05, |
|
"loss": 11.9228, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.039430111667308435, |
|
"grad_norm": 0.21280968189239502, |
|
"learning_rate": 7.968033420621935e-05, |
|
"loss": 11.9144, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.04066230265691182, |
|
"grad_norm": 0.16655640304088593, |
|
"learning_rate": 7.838127775159452e-05, |
|
"loss": 11.9188, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.04189449364651521, |
|
"grad_norm": 0.18396218121051788, |
|
"learning_rate": 7.705495977301078e-05, |
|
"loss": 11.9168, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0431266846361186, |
|
"grad_norm": 0.19807805120944977, |
|
"learning_rate": 7.570292669790186e-05, |
|
"loss": 11.9119, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.04435887562572199, |
|
"grad_norm": 0.24202106893062592, |
|
"learning_rate": 7.43267549363537e-05, |
|
"loss": 11.9091, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.045591066615325374, |
|
"grad_norm": 0.22782112658023834, |
|
"learning_rate": 7.292804904308087e-05, |
|
"loss": 11.9144, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.04682325760492877, |
|
"grad_norm": 0.1838875114917755, |
|
"learning_rate": 7.150843984658754e-05, |
|
"loss": 11.9138, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.04805544859453215, |
|
"grad_norm": 0.1562553346157074, |
|
"learning_rate": 7.006958254769438e-05, |
|
"loss": 11.9152, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.04928763958413554, |
|
"grad_norm": 0.16204679012298584, |
|
"learning_rate": 6.861315478964841e-05, |
|
"loss": 11.9147, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.050519830573738926, |
|
"grad_norm": 0.17445921897888184, |
|
"learning_rate": 6.714085470206609e-05, |
|
"loss": 11.9121, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05175202156334232, |
|
"grad_norm": 0.17594383656978607, |
|
"learning_rate": 6.56543989209901e-05, |
|
"loss": 11.911, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.052984212552945706, |
|
"grad_norm": 0.15127213299274445, |
|
"learning_rate": 6.415552058736854e-05, |
|
"loss": 11.9129, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.05421640354254909, |
|
"grad_norm": 0.1726858913898468, |
|
"learning_rate": 6.264596732629e-05, |
|
"loss": 11.9105, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.055448594532152486, |
|
"grad_norm": 0.16555853188037872, |
|
"learning_rate": 6.112749920933111e-05, |
|
"loss": 11.9067, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.05668078552175587, |
|
"grad_norm": 0.15048764646053314, |
|
"learning_rate": 5.960188670239154e-05, |
|
"loss": 11.9086, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.05791297651135926, |
|
"grad_norm": 0.1858365684747696, |
|
"learning_rate": 5.80709086014102e-05, |
|
"loss": 11.9065, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.05914516750096265, |
|
"grad_norm": 0.1863349974155426, |
|
"learning_rate": 5.653634995836856e-05, |
|
"loss": 11.9058, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06037735849056604, |
|
"grad_norm": 0.21114401519298553, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 11.8964, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.061609549480169425, |
|
"grad_norm": 0.26575568318367004, |
|
"learning_rate": 5.346365004163145e-05, |
|
"loss": 11.8979, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.061609549480169425, |
|
"eval_loss": 11.904302597045898, |
|
"eval_runtime": 0.4424, |
|
"eval_samples_per_second": 113.019, |
|
"eval_steps_per_second": 29.385, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06284174046977281, |
|
"grad_norm": 0.14176851511001587, |
|
"learning_rate": 5.192909139858981e-05, |
|
"loss": 11.9077, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0640739314593762, |
|
"grad_norm": 0.14125069975852966, |
|
"learning_rate": 5.0398113297608465e-05, |
|
"loss": 11.9084, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0653061224489796, |
|
"grad_norm": 0.13022831082344055, |
|
"learning_rate": 4.887250079066892e-05, |
|
"loss": 11.9067, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.06653831343858298, |
|
"grad_norm": 0.1437574028968811, |
|
"learning_rate": 4.7354032673710005e-05, |
|
"loss": 11.905, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.06777050442818637, |
|
"grad_norm": 0.12500199675559998, |
|
"learning_rate": 4.584447941263149e-05, |
|
"loss": 11.9048, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.06900269541778976, |
|
"grad_norm": 0.15741832554340363, |
|
"learning_rate": 4.43456010790099e-05, |
|
"loss": 11.9049, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07023488640739314, |
|
"grad_norm": 0.1377156525850296, |
|
"learning_rate": 4.285914529793391e-05, |
|
"loss": 11.9036, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07146707739699654, |
|
"grad_norm": 0.14599944651126862, |
|
"learning_rate": 4.13868452103516e-05, |
|
"loss": 11.9023, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.07269926838659992, |
|
"grad_norm": 0.14348281919956207, |
|
"learning_rate": 3.9930417452305626e-05, |
|
"loss": 11.9015, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.07393145937620331, |
|
"grad_norm": 0.1534987986087799, |
|
"learning_rate": 3.8491560153412466e-05, |
|
"loss": 11.9031, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0751636503658067, |
|
"grad_norm": 0.20068278908729553, |
|
"learning_rate": 3.707195095691913e-05, |
|
"loss": 11.9012, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.07639584135541008, |
|
"grad_norm": 0.23697830736637115, |
|
"learning_rate": 3.567324506364632e-05, |
|
"loss": 11.8982, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.07762803234501348, |
|
"grad_norm": 0.12292071431875229, |
|
"learning_rate": 3.4297073302098156e-05, |
|
"loss": 11.9056, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.07886022333461687, |
|
"grad_norm": 0.10782992094755173, |
|
"learning_rate": 3.2945040226989244e-05, |
|
"loss": 11.9042, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08009241432422025, |
|
"grad_norm": 0.12751153111457825, |
|
"learning_rate": 3.16187222484055e-05, |
|
"loss": 11.9046, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08132460531382364, |
|
"grad_norm": 0.11147710680961609, |
|
"learning_rate": 3.0319665793780648e-05, |
|
"loss": 11.9034, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.08255679630342704, |
|
"grad_norm": 0.10850214958190918, |
|
"learning_rate": 2.9049385504830985e-05, |
|
"loss": 11.9019, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.08378898729303041, |
|
"grad_norm": 0.12796252965927124, |
|
"learning_rate": 2.7809362471550748e-05, |
|
"loss": 11.9035, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.08502117828263381, |
|
"grad_norm": 0.11640307307243347, |
|
"learning_rate": 2.660104250532764e-05, |
|
"loss": 11.8993, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0862533692722372, |
|
"grad_norm": 0.14162230491638184, |
|
"learning_rate": 2.5425834453191232e-05, |
|
"loss": 11.9038, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.08748556026184058, |
|
"grad_norm": 0.12289347499608994, |
|
"learning_rate": 2.4285108555160577e-05, |
|
"loss": 11.9017, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.08871775125144397, |
|
"grad_norm": 0.14467501640319824, |
|
"learning_rate": 2.3180194846605367e-05, |
|
"loss": 11.9031, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.08994994224104737, |
|
"grad_norm": 0.21114110946655273, |
|
"learning_rate": 2.2112381607484417e-05, |
|
"loss": 11.8997, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.09118213323065075, |
|
"grad_norm": 0.20919640362262726, |
|
"learning_rate": 2.1082913860268765e-05, |
|
"loss": 11.8962, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.09241432422025414, |
|
"grad_norm": 0.2882833480834961, |
|
"learning_rate": 2.0092991918301108e-05, |
|
"loss": 11.8899, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09241432422025414, |
|
"eval_loss": 11.898860931396484, |
|
"eval_runtime": 0.4413, |
|
"eval_samples_per_second": 113.302, |
|
"eval_steps_per_second": 29.459, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.09364651520985753, |
|
"grad_norm": 0.11339965462684631, |
|
"learning_rate": 1.91437699862843e-05, |
|
"loss": 11.9061, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.09487870619946091, |
|
"grad_norm": 0.0967002585530281, |
|
"learning_rate": 1.8236354814530112e-05, |
|
"loss": 11.9045, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0961108971890643, |
|
"grad_norm": 0.10335846245288849, |
|
"learning_rate": 1.7371804408538024e-05, |
|
"loss": 11.9027, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0973430881786677, |
|
"grad_norm": 0.10277073085308075, |
|
"learning_rate": 1.6551126795408016e-05, |
|
"loss": 11.9032, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.09857527916827108, |
|
"grad_norm": 0.09848005324602127, |
|
"learning_rate": 1.577527884852619e-05, |
|
"loss": 11.8994, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.09980747015787447, |
|
"grad_norm": 0.10161633044481277, |
|
"learning_rate": 1.5045165171893116e-05, |
|
"loss": 11.8994, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.10103966114747785, |
|
"grad_norm": 0.09697967022657394, |
|
"learning_rate": 1.4361637045396029e-05, |
|
"loss": 11.9027, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.10227185213708125, |
|
"grad_norm": 0.10200096666812897, |
|
"learning_rate": 1.3725491432254624e-05, |
|
"loss": 11.9003, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.10350404312668464, |
|
"grad_norm": 0.13053254783153534, |
|
"learning_rate": 1.313747004979751e-05, |
|
"loss": 11.8971, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.10473623411628802, |
|
"grad_norm": 0.1328170746564865, |
|
"learning_rate": 1.2598258504653081e-05, |
|
"loss": 11.8973, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.10596842510589141, |
|
"grad_norm": 0.19934439659118652, |
|
"learning_rate": 1.2108485493362765e-05, |
|
"loss": 11.8961, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1072006160954948, |
|
"grad_norm": 0.26894065737724304, |
|
"learning_rate": 1.1668722069349041e-05, |
|
"loss": 11.8933, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.10843280708509818, |
|
"grad_norm": 0.10815577208995819, |
|
"learning_rate": 1.1279480977092635e-05, |
|
"loss": 11.9066, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.10966499807470158, |
|
"grad_norm": 0.10114790499210358, |
|
"learning_rate": 1.094121605429547e-05, |
|
"loss": 11.9022, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.11089718906430497, |
|
"grad_norm": 0.0996105894446373, |
|
"learning_rate": 1.0654321702726141e-05, |
|
"loss": 11.9031, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.11212938005390835, |
|
"grad_norm": 0.11002374440431595, |
|
"learning_rate": 1.0419132428365116e-05, |
|
"loss": 11.9013, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.11336157104351174, |
|
"grad_norm": 0.1089978888630867, |
|
"learning_rate": 1.0235922451385733e-05, |
|
"loss": 11.8994, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.11459376203311514, |
|
"grad_norm": 0.10552305728197098, |
|
"learning_rate": 1.0104905386425733e-05, |
|
"loss": 11.9014, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.11582595302271852, |
|
"grad_norm": 0.11430001258850098, |
|
"learning_rate": 1.002623399352217e-05, |
|
"loss": 11.8997, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.11705814401232191, |
|
"grad_norm": 0.11952391266822815, |
|
"learning_rate": 1e-05, |
|
"loss": 11.8955, |
|
"step": 95 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 95, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 25, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 1, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 793696618414080.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|