MMed-Llama-3-8B-EnIns-PubMedQA-7bf07ae5-3245-4ab1-acb1-d42f13d74ddc
/
last-checkpoint
/trainer_state.json
{ | |
"best_metric": 0.18355341255664825, | |
"best_model_checkpoint": "miner_id_24/checkpoint-100", | |
"epoch": 0.015958826228330905, | |
"eval_steps": 50, | |
"global_step": 100, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.00015958826228330906, | |
"grad_norm": 43.736854553222656, | |
"learning_rate": 5.000000000000001e-07, | |
"loss": 12.1116, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.00015958826228330906, | |
"eval_loss": 12.323148727416992, | |
"eval_runtime": 845.2945, | |
"eval_samples_per_second": 12.486, | |
"eval_steps_per_second": 3.122, | |
"step": 1 | |
}, | |
{ | |
"epoch": 0.0003191765245666181, | |
"grad_norm": 50.650611877441406, | |
"learning_rate": 1.0000000000000002e-06, | |
"loss": 11.9574, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.0004787647868499272, | |
"grad_norm": 50.548484802246094, | |
"learning_rate": 1.5e-06, | |
"loss": 11.5063, | |
"step": 3 | |
}, | |
{ | |
"epoch": 0.0006383530491332362, | |
"grad_norm": 50.47951126098633, | |
"learning_rate": 2.0000000000000003e-06, | |
"loss": 11.9538, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.0007979413114165454, | |
"grad_norm": 50.52406311035156, | |
"learning_rate": 2.5e-06, | |
"loss": 12.166, | |
"step": 5 | |
}, | |
{ | |
"epoch": 0.0009575295736998544, | |
"grad_norm": 53.469520568847656, | |
"learning_rate": 3e-06, | |
"loss": 12.1688, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.0011171178359831635, | |
"grad_norm": 52.03275680541992, | |
"learning_rate": 3.5e-06, | |
"loss": 12.0868, | |
"step": 7 | |
}, | |
{ | |
"epoch": 0.0012767060982664725, | |
"grad_norm": 45.10970687866211, | |
"learning_rate": 4.000000000000001e-06, | |
"loss": 11.7541, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.0014362943605497815, | |
"grad_norm": 46.315494537353516, | |
"learning_rate": 4.5e-06, | |
"loss": 12.0005, | |
"step": 9 | |
}, | |
{ | |
"epoch": 0.0015958826228330907, | |
"grad_norm": 53.98945236206055, | |
"learning_rate": 5e-06, | |
"loss": 11.6276, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.0017554708851163997, | |
"grad_norm": 48.429481506347656, | |
"learning_rate": 4.99847706754774e-06, | |
"loss": 11.6075, | |
"step": 11 | |
}, | |
{ | |
"epoch": 0.0019150591473997087, | |
"grad_norm": 47.879478454589844, | |
"learning_rate": 4.993910125649561e-06, | |
"loss": 11.6263, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.0020746474096830178, | |
"grad_norm": 51.92265701293945, | |
"learning_rate": 4.986304738420684e-06, | |
"loss": 11.106, | |
"step": 13 | |
}, | |
{ | |
"epoch": 0.002234235671966327, | |
"grad_norm": 51.57489776611328, | |
"learning_rate": 4.975670171853926e-06, | |
"loss": 10.845, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.0023938239342496358, | |
"grad_norm": 50.46339416503906, | |
"learning_rate": 4.962019382530521e-06, | |
"loss": 10.5991, | |
"step": 15 | |
}, | |
{ | |
"epoch": 0.002553412196532945, | |
"grad_norm": 49.37790298461914, | |
"learning_rate": 4.9453690018345144e-06, | |
"loss": 9.8602, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.0027130004588162542, | |
"grad_norm": 48.29505157470703, | |
"learning_rate": 4.925739315689991e-06, | |
"loss": 9.8981, | |
"step": 17 | |
}, | |
{ | |
"epoch": 0.002872588721099563, | |
"grad_norm": 50.35626983642578, | |
"learning_rate": 4.903154239845798e-06, | |
"loss": 9.4826, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.0030321769833828722, | |
"grad_norm": 49.06550979614258, | |
"learning_rate": 4.8776412907378845e-06, | |
"loss": 9.2918, | |
"step": 19 | |
}, | |
{ | |
"epoch": 0.0031917652456661815, | |
"grad_norm": 46.6195068359375, | |
"learning_rate": 4.849231551964771e-06, | |
"loss": 9.0255, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.0033513535079494903, | |
"grad_norm": 44.751346588134766, | |
"learning_rate": 4.817959636416969e-06, | |
"loss": 8.4435, | |
"step": 21 | |
}, | |
{ | |
"epoch": 0.0035109417702327995, | |
"grad_norm": 41.83870315551758, | |
"learning_rate": 4.783863644106502e-06, | |
"loss": 8.1362, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.0036705300325161083, | |
"grad_norm": 43.00385665893555, | |
"learning_rate": 4.746985115747918e-06, | |
"loss": 7.519, | |
"step": 23 | |
}, | |
{ | |
"epoch": 0.0038301182947994175, | |
"grad_norm": 45.05305099487305, | |
"learning_rate": 4.707368982147318e-06, | |
"loss": 7.0324, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.003989706557082726, | |
"grad_norm": 46.236839294433594, | |
"learning_rate": 4.665063509461098e-06, | |
"loss": 5.9956, | |
"step": 25 | |
}, | |
{ | |
"epoch": 0.0041492948193660355, | |
"grad_norm": 45.36016845703125, | |
"learning_rate": 4.620120240391065e-06, | |
"loss": 5.8249, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.004308883081649345, | |
"grad_norm": 46.748207092285156, | |
"learning_rate": 4.572593931387604e-06, | |
"loss": 5.1073, | |
"step": 27 | |
}, | |
{ | |
"epoch": 0.004468471343932654, | |
"grad_norm": 45.68260192871094, | |
"learning_rate": 4.522542485937369e-06, | |
"loss": 4.5708, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.004628059606215963, | |
"grad_norm": 46.5210075378418, | |
"learning_rate": 4.470026884016805e-06, | |
"loss": 4.391, | |
"step": 29 | |
}, | |
{ | |
"epoch": 0.0047876478684992715, | |
"grad_norm": 42.01366424560547, | |
"learning_rate": 4.415111107797445e-06, | |
"loss": 4.3002, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.004947236130782581, | |
"grad_norm": 43.48591613769531, | |
"learning_rate": 4.357862063693486e-06, | |
"loss": 3.8914, | |
"step": 31 | |
}, | |
{ | |
"epoch": 0.00510682439306589, | |
"grad_norm": 43.21699142456055, | |
"learning_rate": 4.2983495008466285e-06, | |
"loss": 3.1507, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.005266412655349199, | |
"grad_norm": 41.28952407836914, | |
"learning_rate": 4.236645926147493e-06, | |
"loss": 2.0522, | |
"step": 33 | |
}, | |
{ | |
"epoch": 0.0054260009176325084, | |
"grad_norm": 36.86624526977539, | |
"learning_rate": 4.172826515897146e-06, | |
"loss": 2.1409, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.005585589179915817, | |
"grad_norm": 30.340343475341797, | |
"learning_rate": 4.106969024216348e-06, | |
"loss": 1.8715, | |
"step": 35 | |
}, | |
{ | |
"epoch": 0.005745177442199126, | |
"grad_norm": 24.695117950439453, | |
"learning_rate": 4.039153688314146e-06, | |
"loss": 1.0543, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.005904765704482435, | |
"grad_norm": 21.477252960205078, | |
"learning_rate": 3.969463130731183e-06, | |
"loss": 1.7806, | |
"step": 37 | |
}, | |
{ | |
"epoch": 0.0060643539667657445, | |
"grad_norm": 14.005023956298828, | |
"learning_rate": 3.897982258676867e-06, | |
"loss": 0.8706, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.006223942229049054, | |
"grad_norm": 12.730143547058105, | |
"learning_rate": 3.824798160583012e-06, | |
"loss": 0.688, | |
"step": 39 | |
}, | |
{ | |
"epoch": 0.006383530491332363, | |
"grad_norm": 9.65303897857666, | |
"learning_rate": 3.7500000000000005e-06, | |
"loss": 0.9158, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.006543118753615671, | |
"grad_norm": 9.569194793701172, | |
"learning_rate": 3.6736789069647273e-06, | |
"loss": 1.0299, | |
"step": 41 | |
}, | |
{ | |
"epoch": 0.0067027070158989805, | |
"grad_norm": 3.0408260822296143, | |
"learning_rate": 3.595927866972694e-06, | |
"loss": 0.3519, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.00686229527818229, | |
"grad_norm": 7.300391674041748, | |
"learning_rate": 3.516841607689501e-06, | |
"loss": 0.8792, | |
"step": 43 | |
}, | |
{ | |
"epoch": 0.007021883540465599, | |
"grad_norm": 8.536422729492188, | |
"learning_rate": 3.436516483539781e-06, | |
"loss": 0.354, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.007181471802748908, | |
"grad_norm": 7.12312126159668, | |
"learning_rate": 3.3550503583141726e-06, | |
"loss": 0.6326, | |
"step": 45 | |
}, | |
{ | |
"epoch": 0.0073410600650322165, | |
"grad_norm": 19.750776290893555, | |
"learning_rate": 3.272542485937369e-06, | |
"loss": 1.8694, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.007500648327315526, | |
"grad_norm": 15.625970840454102, | |
"learning_rate": 3.189093389542498e-06, | |
"loss": 0.8438, | |
"step": 47 | |
}, | |
{ | |
"epoch": 0.007660236589598835, | |
"grad_norm": 8.163710594177246, | |
"learning_rate": 3.1048047389991693e-06, | |
"loss": 0.1959, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.007819824851882144, | |
"grad_norm": 10.990124702453613, | |
"learning_rate": 3.019779227044398e-06, | |
"loss": 0.6136, | |
"step": 49 | |
}, | |
{ | |
"epoch": 0.007979413114165453, | |
"grad_norm": 12.102945327758789, | |
"learning_rate": 2.9341204441673267e-06, | |
"loss": 0.5843, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.007979413114165453, | |
"eval_loss": 0.5140388607978821, | |
"eval_runtime": 847.4177, | |
"eval_samples_per_second": 12.454, | |
"eval_steps_per_second": 3.114, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.008139001376448763, | |
"grad_norm": 7.3725104331970215, | |
"learning_rate": 2.847932752400164e-06, | |
"loss": 1.0074, | |
"step": 51 | |
}, | |
{ | |
"epoch": 0.008298589638732071, | |
"grad_norm": 7.9290900230407715, | |
"learning_rate": 2.761321158169134e-06, | |
"loss": 0.4143, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.008458177901015381, | |
"grad_norm": 16.212963104248047, | |
"learning_rate": 2.6743911843603134e-06, | |
"loss": 0.6425, | |
"step": 53 | |
}, | |
{ | |
"epoch": 0.00861776616329869, | |
"grad_norm": 7.862034320831299, | |
"learning_rate": 2.587248741756253e-06, | |
"loss": 0.3512, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.008777354425581998, | |
"grad_norm": 3.113178253173828, | |
"learning_rate": 2.5e-06, | |
"loss": 0.1114, | |
"step": 55 | |
}, | |
{ | |
"epoch": 0.008936942687865308, | |
"grad_norm": 8.0140380859375, | |
"learning_rate": 2.4127512582437486e-06, | |
"loss": 0.4566, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.009096530950148616, | |
"grad_norm": 11.397700309753418, | |
"learning_rate": 2.325608815639687e-06, | |
"loss": 0.6353, | |
"step": 57 | |
}, | |
{ | |
"epoch": 0.009256119212431926, | |
"grad_norm": 0.06368193030357361, | |
"learning_rate": 2.238678841830867e-06, | |
"loss": 0.001, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.009415707474715235, | |
"grad_norm": 4.552347183227539, | |
"learning_rate": 2.1520672475998374e-06, | |
"loss": 0.2425, | |
"step": 59 | |
}, | |
{ | |
"epoch": 0.009575295736998543, | |
"grad_norm": 4.318637847900391, | |
"learning_rate": 2.0658795558326745e-06, | |
"loss": 0.2014, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.009734883999281853, | |
"grad_norm": 2.7980666160583496, | |
"learning_rate": 1.9802207729556023e-06, | |
"loss": 0.1012, | |
"step": 61 | |
}, | |
{ | |
"epoch": 0.009894472261565162, | |
"grad_norm": 2.5013349056243896, | |
"learning_rate": 1.895195261000831e-06, | |
"loss": 0.0972, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.010054060523848472, | |
"grad_norm": 2.51824688911438, | |
"learning_rate": 1.8109066104575023e-06, | |
"loss": 0.1209, | |
"step": 63 | |
}, | |
{ | |
"epoch": 0.01021364878613178, | |
"grad_norm": 4.979506492614746, | |
"learning_rate": 1.7274575140626318e-06, | |
"loss": 0.2335, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.010373237048415088, | |
"grad_norm": 0.3189781904220581, | |
"learning_rate": 1.6449496416858285e-06, | |
"loss": 0.0033, | |
"step": 65 | |
}, | |
{ | |
"epoch": 0.010532825310698398, | |
"grad_norm": 13.277030944824219, | |
"learning_rate": 1.56348351646022e-06, | |
"loss": 0.3517, | |
"step": 66 | |
}, | |
{ | |
"epoch": 0.010692413572981707, | |
"grad_norm": 5.3693084716796875, | |
"learning_rate": 1.4831583923105e-06, | |
"loss": 0.1901, | |
"step": 67 | |
}, | |
{ | |
"epoch": 0.010852001835265017, | |
"grad_norm": 0.6272711157798767, | |
"learning_rate": 1.4040721330273063e-06, | |
"loss": 0.006, | |
"step": 68 | |
}, | |
{ | |
"epoch": 0.011011590097548325, | |
"grad_norm": 9.435708045959473, | |
"learning_rate": 1.3263210930352737e-06, | |
"loss": 0.2644, | |
"step": 69 | |
}, | |
{ | |
"epoch": 0.011171178359831634, | |
"grad_norm": 4.341763496398926, | |
"learning_rate": 1.2500000000000007e-06, | |
"loss": 0.1834, | |
"step": 70 | |
}, | |
{ | |
"epoch": 0.011330766622114944, | |
"grad_norm": 9.02403450012207, | |
"learning_rate": 1.1752018394169882e-06, | |
"loss": 0.4484, | |
"step": 71 | |
}, | |
{ | |
"epoch": 0.011490354884398252, | |
"grad_norm": 4.708563327789307, | |
"learning_rate": 1.1020177413231334e-06, | |
"loss": 0.107, | |
"step": 72 | |
}, | |
{ | |
"epoch": 0.011649943146681562, | |
"grad_norm": 1.80886709690094, | |
"learning_rate": 1.0305368692688175e-06, | |
"loss": 0.0173, | |
"step": 73 | |
}, | |
{ | |
"epoch": 0.01180953140896487, | |
"grad_norm": 1.6764700412750244, | |
"learning_rate": 9.608463116858544e-07, | |
"loss": 0.0171, | |
"step": 74 | |
}, | |
{ | |
"epoch": 0.011969119671248179, | |
"grad_norm": 2.3282880783081055, | |
"learning_rate": 8.930309757836517e-07, | |
"loss": 0.0509, | |
"step": 75 | |
}, | |
{ | |
"epoch": 0.012128707933531489, | |
"grad_norm": 3.2590067386627197, | |
"learning_rate": 8.271734841028553e-07, | |
"loss": 0.1197, | |
"step": 76 | |
}, | |
{ | |
"epoch": 0.012288296195814797, | |
"grad_norm": 1.3154736757278442, | |
"learning_rate": 7.633540738525066e-07, | |
"loss": 0.0891, | |
"step": 77 | |
}, | |
{ | |
"epoch": 0.012447884458098107, | |
"grad_norm": 2.3927087783813477, | |
"learning_rate": 7.016504991533727e-07, | |
"loss": 0.0256, | |
"step": 78 | |
}, | |
{ | |
"epoch": 0.012607472720381416, | |
"grad_norm": 1.8653408288955688, | |
"learning_rate": 6.421379363065142e-07, | |
"loss": 0.1032, | |
"step": 79 | |
}, | |
{ | |
"epoch": 0.012767060982664726, | |
"grad_norm": 1.5105987787246704, | |
"learning_rate": 5.848888922025553e-07, | |
"loss": 0.0666, | |
"step": 80 | |
}, | |
{ | |
"epoch": 0.012926649244948034, | |
"grad_norm": 7.518610000610352, | |
"learning_rate": 5.299731159831953e-07, | |
"loss": 0.2022, | |
"step": 81 | |
}, | |
{ | |
"epoch": 0.013086237507231343, | |
"grad_norm": 2.9338836669921875, | |
"learning_rate": 4.774575140626317e-07, | |
"loss": 0.0967, | |
"step": 82 | |
}, | |
{ | |
"epoch": 0.013245825769514653, | |
"grad_norm": 1.571068525314331, | |
"learning_rate": 4.27406068612396e-07, | |
"loss": 0.0741, | |
"step": 83 | |
}, | |
{ | |
"epoch": 0.013405414031797961, | |
"grad_norm": 8.719659805297852, | |
"learning_rate": 3.798797596089351e-07, | |
"loss": 0.2685, | |
"step": 84 | |
}, | |
{ | |
"epoch": 0.013565002294081271, | |
"grad_norm": 2.258552312850952, | |
"learning_rate": 3.3493649053890325e-07, | |
"loss": 0.051, | |
"step": 85 | |
}, | |
{ | |
"epoch": 0.01372459055636458, | |
"grad_norm": 1.2262955904006958, | |
"learning_rate": 2.9263101785268253e-07, | |
"loss": 0.0505, | |
"step": 86 | |
}, | |
{ | |
"epoch": 0.013884178818647888, | |
"grad_norm": 3.613511323928833, | |
"learning_rate": 2.53014884252083e-07, | |
"loss": 0.2141, | |
"step": 87 | |
}, | |
{ | |
"epoch": 0.014043767080931198, | |
"grad_norm": 6.448750972747803, | |
"learning_rate": 2.1613635589349756e-07, | |
"loss": 0.3561, | |
"step": 88 | |
}, | |
{ | |
"epoch": 0.014203355343214506, | |
"grad_norm": 6.130068302154541, | |
"learning_rate": 1.8204036358303173e-07, | |
"loss": 0.2403, | |
"step": 89 | |
}, | |
{ | |
"epoch": 0.014362943605497816, | |
"grad_norm": 7.4299211502075195, | |
"learning_rate": 1.507684480352292e-07, | |
"loss": 0.3328, | |
"step": 90 | |
}, | |
{ | |
"epoch": 0.014522531867781125, | |
"grad_norm": 6.2848381996154785, | |
"learning_rate": 1.223587092621162e-07, | |
"loss": 0.2196, | |
"step": 91 | |
}, | |
{ | |
"epoch": 0.014682120130064433, | |
"grad_norm": 5.241060256958008, | |
"learning_rate": 9.684576015420277e-08, | |
"loss": 0.2432, | |
"step": 92 | |
}, | |
{ | |
"epoch": 0.014841708392347743, | |
"grad_norm": 8.220368385314941, | |
"learning_rate": 7.426068431000883e-08, | |
"loss": 0.2829, | |
"step": 93 | |
}, | |
{ | |
"epoch": 0.015001296654631052, | |
"grad_norm": 4.223203182220459, | |
"learning_rate": 5.463099816548578e-08, | |
"loss": 0.1818, | |
"step": 94 | |
}, | |
{ | |
"epoch": 0.015160884916914362, | |
"grad_norm": 3.850449562072754, | |
"learning_rate": 3.798061746947995e-08, | |
"loss": 0.1388, | |
"step": 95 | |
}, | |
{ | |
"epoch": 0.01532047317919767, | |
"grad_norm": 5.5809760093688965, | |
"learning_rate": 2.4329828146074096e-08, | |
"loss": 0.2356, | |
"step": 96 | |
}, | |
{ | |
"epoch": 0.015480061441480978, | |
"grad_norm": 4.3721795082092285, | |
"learning_rate": 1.3695261579316776e-08, | |
"loss": 0.0995, | |
"step": 97 | |
}, | |
{ | |
"epoch": 0.01563964970376429, | |
"grad_norm": 5.354106426239014, | |
"learning_rate": 6.089874350439507e-09, | |
"loss": 0.1928, | |
"step": 98 | |
}, | |
{ | |
"epoch": 0.015799237966047597, | |
"grad_norm": 4.162508010864258, | |
"learning_rate": 1.5229324522605949e-09, | |
"loss": 0.1439, | |
"step": 99 | |
}, | |
{ | |
"epoch": 0.015958826228330905, | |
"grad_norm": 5.800200939178467, | |
"learning_rate": 0.0, | |
"loss": 0.2004, | |
"step": 100 | |
}, | |
{ | |
"epoch": 0.015958826228330905, | |
"eval_loss": 0.18355341255664825, | |
"eval_runtime": 847.1818, | |
"eval_samples_per_second": 12.458, | |
"eval_steps_per_second": 3.115, | |
"step": 100 | |
} | |
], | |
"logging_steps": 1, | |
"max_steps": 100, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 1, | |
"save_steps": 50, | |
"stateful_callbacks": { | |
"EarlyStoppingCallback": { | |
"args": { | |
"early_stopping_patience": 5, | |
"early_stopping_threshold": 0.0 | |
}, | |
"attributes": { | |
"early_stopping_patience_counter": 0 | |
} | |
}, | |
"TrainerControl": { | |
"args": { | |
"should_epoch_stop": false, | |
"should_evaluate": false, | |
"should_log": false, | |
"should_save": true, | |
"should_training_stop": true | |
}, | |
"attributes": {} | |
} | |
}, | |
"total_flos": 1.4874840035386982e+17, | |
"train_batch_size": 4, | |
"trial_name": null, | |
"trial_params": null | |
} | |