|
{ |
|
"best_metric": 0.6421379446983337, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.009240863096613223, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 4.620431548306612e-05, |
|
"grad_norm": 0.5076673030853271, |
|
"learning_rate": 3.3333333333333333e-06, |
|
"loss": 1.2688, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 4.620431548306612e-05, |
|
"eval_loss": 2.6150074005126953, |
|
"eval_runtime": 957.4031, |
|
"eval_samples_per_second": 38.073, |
|
"eval_steps_per_second": 19.037, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 9.240863096613223e-05, |
|
"grad_norm": 0.4707164168357849, |
|
"learning_rate": 6.666666666666667e-06, |
|
"loss": 1.4034, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00013861294644919834, |
|
"grad_norm": 0.49893462657928467, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3002, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00018481726193226447, |
|
"grad_norm": 0.4544735550880432, |
|
"learning_rate": 1.3333333333333333e-05, |
|
"loss": 1.3597, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0002310215774153306, |
|
"grad_norm": 0.5113587975502014, |
|
"learning_rate": 1.6666666666666667e-05, |
|
"loss": 1.4907, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0002772258928983967, |
|
"grad_norm": 0.49932631850242615, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5368, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.00032343020838146284, |
|
"grad_norm": 0.49129220843315125, |
|
"learning_rate": 2.3333333333333336e-05, |
|
"loss": 1.4715, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.00036963452386452894, |
|
"grad_norm": 0.5280050039291382, |
|
"learning_rate": 2.6666666666666667e-05, |
|
"loss": 1.5247, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0004158388393475951, |
|
"grad_norm": 0.5901511311531067, |
|
"learning_rate": 3e-05, |
|
"loss": 1.6846, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0004620431548306612, |
|
"grad_norm": 0.5997692346572876, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 1.7587, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0005082474703137273, |
|
"grad_norm": 0.5347150564193726, |
|
"learning_rate": 3.6666666666666666e-05, |
|
"loss": 1.5797, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0005544517857967934, |
|
"grad_norm": 0.5770092606544495, |
|
"learning_rate": 4e-05, |
|
"loss": 1.8065, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0006006561012798596, |
|
"grad_norm": 0.640047550201416, |
|
"learning_rate": 4.3333333333333334e-05, |
|
"loss": 1.756, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0006468604167629257, |
|
"grad_norm": 0.6685127019882202, |
|
"learning_rate": 4.666666666666667e-05, |
|
"loss": 1.8463, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.0006930647322459918, |
|
"grad_norm": 0.6096793413162231, |
|
"learning_rate": 5e-05, |
|
"loss": 1.7731, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0007392690477290579, |
|
"grad_norm": 0.47331157326698303, |
|
"learning_rate": 5.333333333333333e-05, |
|
"loss": 1.5453, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.000785473363212124, |
|
"grad_norm": 0.6507935523986816, |
|
"learning_rate": 5.666666666666667e-05, |
|
"loss": 1.9544, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0008316776786951902, |
|
"grad_norm": 0.5391093492507935, |
|
"learning_rate": 6e-05, |
|
"loss": 1.7061, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0008778819941782563, |
|
"grad_norm": 0.5378036499023438, |
|
"learning_rate": 6.333333333333333e-05, |
|
"loss": 1.5813, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0009240863096613224, |
|
"grad_norm": 0.47087186574935913, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 1.4304, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0009702906251443885, |
|
"grad_norm": 0.5405943989753723, |
|
"learning_rate": 7e-05, |
|
"loss": 1.706, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.0010164949406274546, |
|
"grad_norm": 0.4968361258506775, |
|
"learning_rate": 7.333333333333333e-05, |
|
"loss": 1.4416, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0010626992561105207, |
|
"grad_norm": 0.6123631596565247, |
|
"learning_rate": 7.666666666666667e-05, |
|
"loss": 1.6009, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0011089035715935868, |
|
"grad_norm": 0.651899516582489, |
|
"learning_rate": 8e-05, |
|
"loss": 1.6598, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0011551078870766529, |
|
"grad_norm": 0.6122700572013855, |
|
"learning_rate": 8.333333333333334e-05, |
|
"loss": 1.5412, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0012013122025597192, |
|
"grad_norm": 0.5154715776443481, |
|
"learning_rate": 8.666666666666667e-05, |
|
"loss": 1.3952, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0012475165180427853, |
|
"grad_norm": 0.5692221522331238, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4373, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0012937208335258514, |
|
"grad_norm": 0.6106476187705994, |
|
"learning_rate": 9.333333333333334e-05, |
|
"loss": 1.4265, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.0013399251490089175, |
|
"grad_norm": 0.5000233054161072, |
|
"learning_rate": 9.666666666666667e-05, |
|
"loss": 1.0467, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.0013861294644919836, |
|
"grad_norm": 0.5108127593994141, |
|
"learning_rate": 0.0001, |
|
"loss": 0.9634, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0014323337799750497, |
|
"grad_norm": 0.5342124104499817, |
|
"learning_rate": 9.999146252290264e-05, |
|
"loss": 1.0977, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0014785380954581157, |
|
"grad_norm": 0.475253164768219, |
|
"learning_rate": 9.996585300715116e-05, |
|
"loss": 1.1091, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0015247424109411818, |
|
"grad_norm": 0.4190135896205902, |
|
"learning_rate": 9.99231801983717e-05, |
|
"loss": 1.0745, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.001570946726424248, |
|
"grad_norm": 0.454335480928421, |
|
"learning_rate": 9.986345866928941e-05, |
|
"loss": 0.9677, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.001617151041907314, |
|
"grad_norm": 0.5590494871139526, |
|
"learning_rate": 9.978670881475172e-05, |
|
"loss": 1.1762, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0016633553573903804, |
|
"grad_norm": 0.5425230860710144, |
|
"learning_rate": 9.96929568447637e-05, |
|
"loss": 0.8319, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.0017095596728734464, |
|
"grad_norm": 0.5821622610092163, |
|
"learning_rate": 9.958223477553714e-05, |
|
"loss": 1.1842, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.0017557639883565125, |
|
"grad_norm": 0.6131502985954285, |
|
"learning_rate": 9.94545804185573e-05, |
|
"loss": 1.0638, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0018019683038395786, |
|
"grad_norm": 0.7071466445922852, |
|
"learning_rate": 9.931003736767013e-05, |
|
"loss": 0.8615, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0018481726193226447, |
|
"grad_norm": 0.5414029359817505, |
|
"learning_rate": 9.91486549841951e-05, |
|
"loss": 0.928, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0018943769348057108, |
|
"grad_norm": 0.5337193012237549, |
|
"learning_rate": 9.89704883800683e-05, |
|
"loss": 0.8243, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.001940581250288777, |
|
"grad_norm": 0.5797553658485413, |
|
"learning_rate": 9.877559839902184e-05, |
|
"loss": 1.0262, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.001986785565771843, |
|
"grad_norm": 0.42586958408355713, |
|
"learning_rate": 9.85640515958057e-05, |
|
"loss": 0.7431, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.002032989881254909, |
|
"grad_norm": 0.4883628189563751, |
|
"learning_rate": 9.833592021345937e-05, |
|
"loss": 1.0385, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0020791941967379752, |
|
"grad_norm": 0.49928346276283264, |
|
"learning_rate": 9.809128215864097e-05, |
|
"loss": 0.9741, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0021253985122210413, |
|
"grad_norm": 0.4869343638420105, |
|
"learning_rate": 9.783022097502204e-05, |
|
"loss": 0.9224, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0021716028277041074, |
|
"grad_norm": 0.5011721849441528, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.9387, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.0022178071431871735, |
|
"grad_norm": 0.4684049189090729, |
|
"learning_rate": 9.725919140804099e-05, |
|
"loss": 1.0372, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0022640114586702396, |
|
"grad_norm": 0.4781642258167267, |
|
"learning_rate": 9.694941803075283e-05, |
|
"loss": 0.9302, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.0023102157741533057, |
|
"grad_norm": 0.4801224172115326, |
|
"learning_rate": 9.662361147021779e-05, |
|
"loss": 0.8316, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0023102157741533057, |
|
"eval_loss": 0.7963826656341553, |
|
"eval_runtime": 949.1046, |
|
"eval_samples_per_second": 38.406, |
|
"eval_steps_per_second": 19.203, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0023564200896363722, |
|
"grad_norm": 0.3222690522670746, |
|
"learning_rate": 9.628188298907782e-05, |
|
"loss": 0.6702, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0024026244051194383, |
|
"grad_norm": 0.3845457136631012, |
|
"learning_rate": 9.592434928729616e-05, |
|
"loss": 0.3937, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0024488287206025044, |
|
"grad_norm": 0.4016472399234772, |
|
"learning_rate": 9.555113246230442e-05, |
|
"loss": 0.5014, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0024950330360855705, |
|
"grad_norm": 0.21051602065563202, |
|
"learning_rate": 9.516235996730645e-05, |
|
"loss": 0.3835, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.0025412373515686366, |
|
"grad_norm": 0.32177630066871643, |
|
"learning_rate": 9.475816456775313e-05, |
|
"loss": 0.52, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0025874416670517027, |
|
"grad_norm": 0.3018689751625061, |
|
"learning_rate": 9.43386842960031e-05, |
|
"loss": 0.5665, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.002633645982534769, |
|
"grad_norm": 0.24280551075935364, |
|
"learning_rate": 9.39040624041849e-05, |
|
"loss": 0.4877, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.002679850298017835, |
|
"grad_norm": 0.20828717947006226, |
|
"learning_rate": 9.345444731527642e-05, |
|
"loss": 0.4176, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.002726054613500901, |
|
"grad_norm": 0.18063269555568695, |
|
"learning_rate": 9.298999257241863e-05, |
|
"loss": 0.5384, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.002772258928983967, |
|
"grad_norm": 0.277569442987442, |
|
"learning_rate": 9.251085678648072e-05, |
|
"loss": 0.4104, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.002818463244467033, |
|
"grad_norm": 0.29315823316574097, |
|
"learning_rate": 9.201720358189464e-05, |
|
"loss": 0.5355, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.0028646675599500993, |
|
"grad_norm": 0.24790503084659576, |
|
"learning_rate": 9.150920154077754e-05, |
|
"loss": 0.5383, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0029108718754331654, |
|
"grad_norm": 0.22097140550613403, |
|
"learning_rate": 9.098702414536107e-05, |
|
"loss": 0.536, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0029570761909162315, |
|
"grad_norm": 0.23576390743255615, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.4857, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.0030032805063992976, |
|
"grad_norm": 0.31752127408981323, |
|
"learning_rate": 8.9900861364012e-05, |
|
"loss": 0.5439, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.0030494848218823637, |
|
"grad_norm": 0.33295369148254395, |
|
"learning_rate": 8.933724690167417e-05, |
|
"loss": 0.6619, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.00309568913736543, |
|
"grad_norm": 0.24967357516288757, |
|
"learning_rate": 8.876019880555649e-05, |
|
"loss": 0.4586, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.003141893452848496, |
|
"grad_norm": 0.3030238449573517, |
|
"learning_rate": 8.816991413705516e-05, |
|
"loss": 0.6095, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.003188097768331562, |
|
"grad_norm": 0.2835695147514343, |
|
"learning_rate": 8.756659447784368e-05, |
|
"loss": 0.6811, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.003234302083814628, |
|
"grad_norm": 0.21882100403308868, |
|
"learning_rate": 8.695044586103296e-05, |
|
"loss": 0.4792, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0032805063992976946, |
|
"grad_norm": 0.31645792722702026, |
|
"learning_rate": 8.632167870081121e-05, |
|
"loss": 0.6682, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.0033267107147807607, |
|
"grad_norm": 0.17226547002792358, |
|
"learning_rate": 8.568050772058762e-05, |
|
"loss": 0.3664, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.003372915030263827, |
|
"grad_norm": 0.21164900064468384, |
|
"learning_rate": 8.502715187966455e-05, |
|
"loss": 0.5181, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.003419119345746893, |
|
"grad_norm": 0.2374795526266098, |
|
"learning_rate": 8.436183429846313e-05, |
|
"loss": 0.5285, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.003465323661229959, |
|
"grad_norm": 0.2672363817691803, |
|
"learning_rate": 8.368478218232787e-05, |
|
"loss": 0.5057, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.003511527976713025, |
|
"grad_norm": 0.23033465445041656, |
|
"learning_rate": 8.299622674393614e-05, |
|
"loss": 0.5224, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.003557732292196091, |
|
"grad_norm": 0.2521456778049469, |
|
"learning_rate": 8.229640312433937e-05, |
|
"loss": 0.5363, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.0036039366076791573, |
|
"grad_norm": 0.3108648657798767, |
|
"learning_rate": 8.158555031266254e-05, |
|
"loss": 0.5886, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.0036501409231622234, |
|
"grad_norm": 0.2801637351512909, |
|
"learning_rate": 8.086391106448965e-05, |
|
"loss": 0.5706, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0036963452386452895, |
|
"grad_norm": 0.2559647262096405, |
|
"learning_rate": 8.013173181896283e-05, |
|
"loss": 0.5647, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0037425495541283556, |
|
"grad_norm": 0.23344534635543823, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.4891, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.0037887538696114217, |
|
"grad_norm": 0.2664535939693451, |
|
"learning_rate": 7.863675700402526e-05, |
|
"loss": 0.5494, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.0038349581850944878, |
|
"grad_norm": 0.31824880838394165, |
|
"learning_rate": 7.787447196714427e-05, |
|
"loss": 0.6566, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.003881162500577554, |
|
"grad_norm": 0.29656538367271423, |
|
"learning_rate": 7.710266782362247e-05, |
|
"loss": 0.6361, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.00392736681606062, |
|
"grad_norm": 0.30476269125938416, |
|
"learning_rate": 7.63216081438678e-05, |
|
"loss": 0.6621, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.003973571131543686, |
|
"grad_norm": 0.29754209518432617, |
|
"learning_rate": 7.553155965904535e-05, |
|
"loss": 0.7092, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.004019775447026752, |
|
"grad_norm": 0.28509530425071716, |
|
"learning_rate": 7.473279216998895e-05, |
|
"loss": 0.6042, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.004065979762509818, |
|
"grad_norm": 0.39861199259757996, |
|
"learning_rate": 7.392557845506432e-05, |
|
"loss": 0.8293, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.004112184077992884, |
|
"grad_norm": 0.28901976346969604, |
|
"learning_rate": 7.311019417701566e-05, |
|
"loss": 0.5748, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0041583883934759504, |
|
"grad_norm": 0.3572286367416382, |
|
"learning_rate": 7.228691778882693e-05, |
|
"loss": 0.6117, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.0042045927089590165, |
|
"grad_norm": 0.3332473039627075, |
|
"learning_rate": 7.145603043863045e-05, |
|
"loss": 0.653, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.004250797024442083, |
|
"grad_norm": 0.3621080815792084, |
|
"learning_rate": 7.061781587369519e-05, |
|
"loss": 0.6299, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.004297001339925149, |
|
"grad_norm": 0.4089956283569336, |
|
"learning_rate": 6.977256034352712e-05, |
|
"loss": 0.8945, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.004343205655408215, |
|
"grad_norm": 0.3218964636325836, |
|
"learning_rate": 6.892055250211552e-05, |
|
"loss": 0.7201, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.004389409970891281, |
|
"grad_norm": 0.3587651252746582, |
|
"learning_rate": 6.806208330935766e-05, |
|
"loss": 0.7803, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.004435614286374347, |
|
"grad_norm": 0.3168658912181854, |
|
"learning_rate": 6.719744593169641e-05, |
|
"loss": 0.7799, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.004481818601857413, |
|
"grad_norm": 0.4870496988296509, |
|
"learning_rate": 6.632693564200416e-05, |
|
"loss": 0.9183, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.004528022917340479, |
|
"grad_norm": 0.31272202730178833, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.768, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.004574227232823545, |
|
"grad_norm": 0.300275057554245, |
|
"learning_rate": 6.456948734446624e-05, |
|
"loss": 0.6447, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.004620431548306611, |
|
"grad_norm": 0.377289354801178, |
|
"learning_rate": 6.368314950360415e-05, |
|
"loss": 0.7445, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004620431548306611, |
|
"eval_loss": 0.6734322309494019, |
|
"eval_runtime": 950.0633, |
|
"eval_samples_per_second": 38.367, |
|
"eval_steps_per_second": 19.184, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.004666635863789678, |
|
"grad_norm": 0.2588423788547516, |
|
"learning_rate": 6.279213887972179e-05, |
|
"loss": 0.7065, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.0047128401792727445, |
|
"grad_norm": 0.21197082102298737, |
|
"learning_rate": 6.189675975213094e-05, |
|
"loss": 0.2932, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.004759044494755811, |
|
"grad_norm": 0.22588442265987396, |
|
"learning_rate": 6.099731789198344e-05, |
|
"loss": 0.3405, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.004805248810238877, |
|
"grad_norm": 0.25069916248321533, |
|
"learning_rate": 6.009412045785051e-05, |
|
"loss": 0.4191, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.004851453125721943, |
|
"grad_norm": 0.24258778989315033, |
|
"learning_rate": 5.918747589082853e-05, |
|
"loss": 0.355, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.004897657441205009, |
|
"grad_norm": 0.20508873462677002, |
|
"learning_rate": 5.82776938092065e-05, |
|
"loss": 0.3554, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.004943861756688075, |
|
"grad_norm": 0.2670445442199707, |
|
"learning_rate": 5.736508490273188e-05, |
|
"loss": 0.5443, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.004990066072171141, |
|
"grad_norm": 0.24584536254405975, |
|
"learning_rate": 5.644996082651017e-05, |
|
"loss": 0.4639, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.005036270387654207, |
|
"grad_norm": 0.2456938475370407, |
|
"learning_rate": 5.553263409457504e-05, |
|
"loss": 0.4195, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.005082474703137273, |
|
"grad_norm": 0.25109416246414185, |
|
"learning_rate": 5.4613417973165106e-05, |
|
"loss": 0.487, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.005128679018620339, |
|
"grad_norm": 0.1877359002828598, |
|
"learning_rate": 5.3692626373743706e-05, |
|
"loss": 0.4365, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.0051748833341034054, |
|
"grad_norm": 0.19817650318145752, |
|
"learning_rate": 5.27705737457985e-05, |
|
"loss": 0.327, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.0052210876495864715, |
|
"grad_norm": 0.198269784450531, |
|
"learning_rate": 5.184757496945726e-05, |
|
"loss": 0.4454, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.005267291965069538, |
|
"grad_norm": 0.22586289048194885, |
|
"learning_rate": 5.092394524795649e-05, |
|
"loss": 0.4795, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.005313496280552604, |
|
"grad_norm": 0.2628689706325531, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4899, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.00535970059603567, |
|
"grad_norm": 0.2354358434677124, |
|
"learning_rate": 4.907605475204352e-05, |
|
"loss": 0.4658, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.005405904911518736, |
|
"grad_norm": 0.23226183652877808, |
|
"learning_rate": 4.8152425030542766e-05, |
|
"loss": 0.5188, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.005452109227001802, |
|
"grad_norm": 0.25832581520080566, |
|
"learning_rate": 4.72294262542015e-05, |
|
"loss": 0.482, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.005498313542484868, |
|
"grad_norm": 0.21811699867248535, |
|
"learning_rate": 4.6307373626256306e-05, |
|
"loss": 0.4764, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.005544517857967934, |
|
"grad_norm": 0.19816286861896515, |
|
"learning_rate": 4.5386582026834906e-05, |
|
"loss": 0.4253, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.005590722173451, |
|
"grad_norm": 0.3619321286678314, |
|
"learning_rate": 4.446736590542497e-05, |
|
"loss": 0.5297, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.005636926488934066, |
|
"grad_norm": 0.31721270084381104, |
|
"learning_rate": 4.3550039173489845e-05, |
|
"loss": 0.5815, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.0056831308044171325, |
|
"grad_norm": 0.2503688633441925, |
|
"learning_rate": 4.2634915097268115e-05, |
|
"loss": 0.5621, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.005729335119900199, |
|
"grad_norm": 0.25126245617866516, |
|
"learning_rate": 4.1722306190793495e-05, |
|
"loss": 0.3979, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.005775539435383265, |
|
"grad_norm": 0.24463218450546265, |
|
"learning_rate": 4.0812524109171476e-05, |
|
"loss": 0.5177, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.005821743750866331, |
|
"grad_norm": 0.33323267102241516, |
|
"learning_rate": 3.99058795421495e-05, |
|
"loss": 0.5103, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.005867948066349397, |
|
"grad_norm": 0.2861713171005249, |
|
"learning_rate": 3.9002682108016585e-05, |
|
"loss": 0.5381, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.005914152381832463, |
|
"grad_norm": 0.3225221037864685, |
|
"learning_rate": 3.8103240247869075e-05, |
|
"loss": 0.6206, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.005960356697315529, |
|
"grad_norm": 0.23997412621974945, |
|
"learning_rate": 3.720786112027822e-05, |
|
"loss": 0.4345, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.006006561012798595, |
|
"grad_norm": 0.22723270952701569, |
|
"learning_rate": 3.631685049639586e-05, |
|
"loss": 0.5091, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.006052765328281661, |
|
"grad_norm": 0.31986862421035767, |
|
"learning_rate": 3.543051265553377e-05, |
|
"loss": 0.5041, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.006098969643764727, |
|
"grad_norm": 0.4289882481098175, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.6675, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.0061451739592477935, |
|
"grad_norm": 0.24950823187828064, |
|
"learning_rate": 3.367306435799584e-05, |
|
"loss": 0.5478, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.00619137827473086, |
|
"grad_norm": 0.292449027299881, |
|
"learning_rate": 3.2802554068303596e-05, |
|
"loss": 0.5382, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.006237582590213926, |
|
"grad_norm": 0.27559009194374084, |
|
"learning_rate": 3.1937916690642356e-05, |
|
"loss": 0.5363, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.006283786905696992, |
|
"grad_norm": 0.27184924483299255, |
|
"learning_rate": 3.107944749788449e-05, |
|
"loss": 0.6343, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.006329991221180058, |
|
"grad_norm": 0.26460501551628113, |
|
"learning_rate": 3.0227439656472877e-05, |
|
"loss": 0.5129, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.006376195536663124, |
|
"grad_norm": 0.31789299845695496, |
|
"learning_rate": 2.9382184126304834e-05, |
|
"loss": 0.5976, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.00642239985214619, |
|
"grad_norm": 0.3732602000236511, |
|
"learning_rate": 2.8543969561369556e-05, |
|
"loss": 0.6542, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.006468604167629256, |
|
"grad_norm": 0.4154840111732483, |
|
"learning_rate": 2.771308221117309e-05, |
|
"loss": 0.6341, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.006514808483112323, |
|
"grad_norm": 0.43639206886291504, |
|
"learning_rate": 2.688980582298435e-05, |
|
"loss": 0.6507, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.006561012798595389, |
|
"grad_norm": 0.2488391399383545, |
|
"learning_rate": 2.607442154493568e-05, |
|
"loss": 0.5045, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.006607217114078455, |
|
"grad_norm": 0.2505655586719513, |
|
"learning_rate": 2.5267207830011068e-05, |
|
"loss": 0.539, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.006653421429561521, |
|
"grad_norm": 0.3137003779411316, |
|
"learning_rate": 2.446844034095466e-05, |
|
"loss": 0.741, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.0066996257450445875, |
|
"grad_norm": 0.4721606969833374, |
|
"learning_rate": 2.3678391856132204e-05, |
|
"loss": 0.8028, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.006745830060527654, |
|
"grad_norm": 0.325411856174469, |
|
"learning_rate": 2.2897332176377528e-05, |
|
"loss": 0.6541, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.00679203437601072, |
|
"grad_norm": 0.434574693441391, |
|
"learning_rate": 2.2125528032855724e-05, |
|
"loss": 0.8358, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.006838238691493786, |
|
"grad_norm": 0.299848735332489, |
|
"learning_rate": 2.136324299597474e-05, |
|
"loss": 0.6771, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.006884443006976852, |
|
"grad_norm": 0.3632323741912842, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.8139, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.006930647322459918, |
|
"grad_norm": 0.3702452778816223, |
|
"learning_rate": 1.9868268181037185e-05, |
|
"loss": 0.7374, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.006930647322459918, |
|
"eval_loss": 0.6421379446983337, |
|
"eval_runtime": 951.4779, |
|
"eval_samples_per_second": 38.31, |
|
"eval_steps_per_second": 19.155, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.006976851637942984, |
|
"grad_norm": 0.2505011260509491, |
|
"learning_rate": 1.9136088935510362e-05, |
|
"loss": 0.7066, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.00702305595342605, |
|
"grad_norm": 0.22771523892879486, |
|
"learning_rate": 1.8414449687337464e-05, |
|
"loss": 0.2777, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.007069260268909116, |
|
"grad_norm": 0.19011437892913818, |
|
"learning_rate": 1.7703596875660645e-05, |
|
"loss": 0.3103, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.007115464584392182, |
|
"grad_norm": 0.217951238155365, |
|
"learning_rate": 1.700377325606388e-05, |
|
"loss": 0.394, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.0071616688998752485, |
|
"grad_norm": 0.2236698716878891, |
|
"learning_rate": 1.631521781767214e-05, |
|
"loss": 0.2915, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.007207873215358315, |
|
"grad_norm": 0.18969488143920898, |
|
"learning_rate": 1.5638165701536868e-05, |
|
"loss": 0.3511, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.007254077530841381, |
|
"grad_norm": 0.24837560951709747, |
|
"learning_rate": 1.4972848120335453e-05, |
|
"loss": 0.3537, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.007300281846324447, |
|
"grad_norm": 0.19340603053569794, |
|
"learning_rate": 1.4319492279412388e-05, |
|
"loss": 0.3308, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.007346486161807513, |
|
"grad_norm": 0.22562357783317566, |
|
"learning_rate": 1.3678321299188801e-05, |
|
"loss": 0.349, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.007392690477290579, |
|
"grad_norm": 0.22165030241012573, |
|
"learning_rate": 1.3049554138967051e-05, |
|
"loss": 0.4201, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.007438894792773645, |
|
"grad_norm": 0.19497385621070862, |
|
"learning_rate": 1.2433405522156332e-05, |
|
"loss": 0.3438, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.007485099108256711, |
|
"grad_norm": 0.24119050800800323, |
|
"learning_rate": 1.183008586294485e-05, |
|
"loss": 0.4641, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.007531303423739777, |
|
"grad_norm": 0.18582989275455475, |
|
"learning_rate": 1.1239801194443506e-05, |
|
"loss": 0.315, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.007577507739222843, |
|
"grad_norm": 0.25035154819488525, |
|
"learning_rate": 1.066275309832584e-05, |
|
"loss": 0.4704, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0076237120547059094, |
|
"grad_norm": 0.24834051728248596, |
|
"learning_rate": 1.0099138635988026e-05, |
|
"loss": 0.4988, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0076699163701889755, |
|
"grad_norm": 0.24904410541057587, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.4457, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.007716120685672042, |
|
"grad_norm": 0.2082366645336151, |
|
"learning_rate": 9.012975854638949e-06, |
|
"loss": 0.4532, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.007762325001155108, |
|
"grad_norm": 0.2391825169324875, |
|
"learning_rate": 8.490798459222476e-06, |
|
"loss": 0.4345, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.007808529316638174, |
|
"grad_norm": 0.25518763065338135, |
|
"learning_rate": 7.982796418105371e-06, |
|
"loss": 0.5175, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.00785473363212124, |
|
"grad_norm": 0.18920768797397614, |
|
"learning_rate": 7.489143213519301e-06, |
|
"loss": 0.4119, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.007900937947604306, |
|
"grad_norm": 0.27596113085746765, |
|
"learning_rate": 7.010007427581378e-06, |
|
"loss": 0.4418, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.007947142263087372, |
|
"grad_norm": 0.18986520171165466, |
|
"learning_rate": 6.5455526847235825e-06, |
|
"loss": 0.4393, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.007993346578570438, |
|
"grad_norm": 0.28557878732681274, |
|
"learning_rate": 6.0959375958151045e-06, |
|
"loss": 0.3656, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.008039550894053504, |
|
"grad_norm": 0.24311548471450806, |
|
"learning_rate": 5.6613157039969055e-06, |
|
"loss": 0.5238, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.00808575520953657, |
|
"grad_norm": 0.22532469034194946, |
|
"learning_rate": 5.241835432246889e-06, |
|
"loss": 0.6135, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.008131959525019637, |
|
"grad_norm": 0.21544504165649414, |
|
"learning_rate": 4.837640032693558e-06, |
|
"loss": 0.4251, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.008178163840502703, |
|
"grad_norm": 0.22226200997829437, |
|
"learning_rate": 4.448867537695578e-06, |
|
"loss": 0.4952, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.008224368155985769, |
|
"grad_norm": 0.22834548354148865, |
|
"learning_rate": 4.075650712703849e-06, |
|
"loss": 0.5105, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.008270572471468835, |
|
"grad_norm": 0.2550305724143982, |
|
"learning_rate": 3.71811701092219e-06, |
|
"loss": 0.576, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.008316776786951901, |
|
"grad_norm": 0.2729456424713135, |
|
"learning_rate": 3.376388529782215e-06, |
|
"loss": 0.5614, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.008362981102434967, |
|
"grad_norm": 0.3093857169151306, |
|
"learning_rate": 3.0505819692471792e-06, |
|
"loss": 0.5891, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.008409185417918033, |
|
"grad_norm": 0.23913784325122833, |
|
"learning_rate": 2.7408085919590264e-06, |
|
"loss": 0.5029, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.0084553897334011, |
|
"grad_norm": 0.2879793643951416, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.6654, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.008501594048884165, |
|
"grad_norm": 0.28612762689590454, |
|
"learning_rate": 2.1697790249779636e-06, |
|
"loss": 0.5423, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.008547798364367231, |
|
"grad_norm": 0.2573546767234802, |
|
"learning_rate": 1.908717841359048e-06, |
|
"loss": 0.5354, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.008594002679850297, |
|
"grad_norm": 0.2628982365131378, |
|
"learning_rate": 1.6640797865406288e-06, |
|
"loss": 0.546, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.008640206995333364, |
|
"grad_norm": 0.34436121582984924, |
|
"learning_rate": 1.4359484041943038e-06, |
|
"loss": 0.5221, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.00868641131081643, |
|
"grad_norm": 0.2856992483139038, |
|
"learning_rate": 1.2244016009781701e-06, |
|
"loss": 0.5188, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.008732615626299496, |
|
"grad_norm": 0.2615925669670105, |
|
"learning_rate": 1.0295116199317057e-06, |
|
"loss": 0.5459, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.008778819941782562, |
|
"grad_norm": 0.30612173676490784, |
|
"learning_rate": 8.513450158049108e-07, |
|
"loss": 0.692, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.008825024257265628, |
|
"grad_norm": 0.4444868564605713, |
|
"learning_rate": 6.899626323298713e-07, |
|
"loss": 0.7948, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.008871228572748694, |
|
"grad_norm": 0.40465953946113586, |
|
"learning_rate": 5.454195814427021e-07, |
|
"loss": 0.6784, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.00891743288823176, |
|
"grad_norm": 0.3131570518016815, |
|
"learning_rate": 4.177652244628627e-07, |
|
"loss": 0.694, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.008963637203714826, |
|
"grad_norm": 0.2531670928001404, |
|
"learning_rate": 3.0704315523631953e-07, |
|
"loss": 0.502, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.009009841519197892, |
|
"grad_norm": 0.266597718000412, |
|
"learning_rate": 2.1329118524827662e-07, |
|
"loss": 0.5612, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.009056045834680958, |
|
"grad_norm": 0.33451199531555176, |
|
"learning_rate": 1.3654133071059893e-07, |
|
"loss": 0.6737, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.009102250150164025, |
|
"grad_norm": 0.39705005288124084, |
|
"learning_rate": 7.681980162830282e-08, |
|
"loss": 0.6853, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.00914845446564709, |
|
"grad_norm": 0.44060179591178894, |
|
"learning_rate": 3.4146992848854695e-08, |
|
"loss": 0.8037, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.009194658781130157, |
|
"grad_norm": 0.39385107159614563, |
|
"learning_rate": 8.537477097364522e-09, |
|
"loss": 0.7855, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.009240863096613223, |
|
"grad_norm": 0.42565372586250305, |
|
"learning_rate": 0.0, |
|
"loss": 0.7798, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.009240863096613223, |
|
"eval_loss": 0.642300009727478, |
|
"eval_runtime": 953.0502, |
|
"eval_samples_per_second": 38.247, |
|
"eval_steps_per_second": 19.124, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.394382546665472e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|