|
{ |
|
"best_metric": 1.45925772190094, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.37019898195279966, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0018509949097639982, |
|
"grad_norm": 59.89948654174805, |
|
"learning_rate": 1e-05, |
|
"loss": 10.1586, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0018509949097639982, |
|
"eval_loss": 3.6028523445129395, |
|
"eval_runtime": 68.5961, |
|
"eval_samples_per_second": 13.266, |
|
"eval_steps_per_second": 3.324, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0037019898195279964, |
|
"grad_norm": 100.43816375732422, |
|
"learning_rate": 2e-05, |
|
"loss": 11.8233, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.005552984729291994, |
|
"grad_norm": 60.06867980957031, |
|
"learning_rate": 3e-05, |
|
"loss": 10.9429, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.007403979639055993, |
|
"grad_norm": 53.39894104003906, |
|
"learning_rate": 4e-05, |
|
"loss": 9.6525, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00925497454881999, |
|
"grad_norm": 67.93931579589844, |
|
"learning_rate": 5e-05, |
|
"loss": 10.0067, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.011105969458583989, |
|
"grad_norm": 47.26503372192383, |
|
"learning_rate": 6e-05, |
|
"loss": 8.465, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.012956964368347987, |
|
"grad_norm": 38.43549346923828, |
|
"learning_rate": 7e-05, |
|
"loss": 8.9691, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.014807959278111986, |
|
"grad_norm": 40.666404724121094, |
|
"learning_rate": 8e-05, |
|
"loss": 7.4453, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.016658954187875982, |
|
"grad_norm": 29.77450942993164, |
|
"learning_rate": 9e-05, |
|
"loss": 8.4803, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01850994909763998, |
|
"grad_norm": 29.103349685668945, |
|
"learning_rate": 0.0001, |
|
"loss": 7.4746, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02036094400740398, |
|
"grad_norm": 30.80104637145996, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 8.5573, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.022211938917167977, |
|
"grad_norm": 30.97075653076172, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 8.1872, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.024062933826931976, |
|
"grad_norm": 29.791688919067383, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 7.0701, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.025913928736695974, |
|
"grad_norm": 46.57557678222656, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 8.5949, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.027764923646459973, |
|
"grad_norm": 31.162527084350586, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 8.3614, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02961591855622397, |
|
"grad_norm": 28.787185668945312, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 7.7322, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.03146691346598797, |
|
"grad_norm": 26.941804885864258, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 6.6127, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.033317908375751965, |
|
"grad_norm": 26.8864688873291, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 7.7146, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.035168903285515966, |
|
"grad_norm": 25.0333194732666, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 6.7545, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.03701989819527996, |
|
"grad_norm": 22.383333206176758, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 7.2769, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.03887089310504396, |
|
"grad_norm": 20.333555221557617, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 5.9189, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04072188801480796, |
|
"grad_norm": 22.781673431396484, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 6.9341, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.04257288292457196, |
|
"grad_norm": 24.01888084411621, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 7.1848, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.044423877834335955, |
|
"grad_norm": 22.655099868774414, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 6.002, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.04627487274409996, |
|
"grad_norm": 24.61270523071289, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 6.2469, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.04812586765386395, |
|
"grad_norm": 25.12575912475586, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 6.8516, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04997686256362795, |
|
"grad_norm": 22.560014724731445, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 5.8165, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.05182785747339195, |
|
"grad_norm": 22.28982162475586, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 6.3579, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.053678852383155944, |
|
"grad_norm": 24.577585220336914, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 5.9195, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.055529847292919945, |
|
"grad_norm": 30.741830825805664, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 7.2511, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.05738084220268394, |
|
"grad_norm": 26.450328826904297, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 7.2489, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.05923183711244794, |
|
"grad_norm": 24.98292350769043, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 6.9673, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.06108283202221194, |
|
"grad_norm": 21.95580291748047, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 5.2675, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.06293382693197594, |
|
"grad_norm": 23.09197235107422, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 4.9874, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.06478482184173993, |
|
"grad_norm": 24.217933654785156, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 6.4793, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.06663581675150393, |
|
"grad_norm": 25.319543838500977, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 5.6718, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.06848681166126794, |
|
"grad_norm": 20.073984146118164, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 4.0343, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.07033780657103193, |
|
"grad_norm": 25.865989685058594, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 4.9742, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.07218880148079593, |
|
"grad_norm": 26.664836883544922, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 6.4406, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.07403979639055992, |
|
"grad_norm": 21.804039001464844, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 4.4743, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.07589079130032392, |
|
"grad_norm": 25.674467086791992, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 6.674, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.07774178621008793, |
|
"grad_norm": 22.556793212890625, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 5.0478, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.07959278111985192, |
|
"grad_norm": 22.43199920654297, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 4.8246, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.08144377602961592, |
|
"grad_norm": 22.04460906982422, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 6.7395, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.08329477093937991, |
|
"grad_norm": 27.928298950195312, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 5.0662, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.08514576584914392, |
|
"grad_norm": 21.5211181640625, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 4.8276, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.08699676075890792, |
|
"grad_norm": 30.689226150512695, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 5.0762, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.08884775566867191, |
|
"grad_norm": 24.589027404785156, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 5.8773, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0906987505784359, |
|
"grad_norm": 25.96907615661621, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 4.7589, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.09254974548819991, |
|
"grad_norm": 31.189998626708984, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 4.126, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09254974548819991, |
|
"eval_loss": 2.353039264678955, |
|
"eval_runtime": 69.882, |
|
"eval_samples_per_second": 13.022, |
|
"eval_steps_per_second": 3.263, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.09440074039796391, |
|
"grad_norm": 110.58844757080078, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 15.2761, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0962517353077279, |
|
"grad_norm": 58.721832275390625, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 11.9424, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.0981027302174919, |
|
"grad_norm": 30.038488388061523, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 9.9225, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0999537251272559, |
|
"grad_norm": 20.106815338134766, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 8.2082, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.1018047200370199, |
|
"grad_norm": 23.7856388092041, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 8.4048, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.1036557149467839, |
|
"grad_norm": 25.19913101196289, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 8.0735, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.10550670985654789, |
|
"grad_norm": 25.34491539001465, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 7.4242, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.10735770476631189, |
|
"grad_norm": 23.657678604125977, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 8.2696, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.1092086996760759, |
|
"grad_norm": 23.854257583618164, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 8.2411, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.11105969458583989, |
|
"grad_norm": 25.120820999145508, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 7.9448, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11291068949560389, |
|
"grad_norm": 22.30246925354004, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 7.962, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.11476168440536788, |
|
"grad_norm": 22.673364639282227, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 7.8381, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.11661267931513189, |
|
"grad_norm": 26.36172103881836, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 8.6518, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.11846367422489588, |
|
"grad_norm": 22.343236923217773, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 6.7058, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.12031466913465988, |
|
"grad_norm": 29.06987190246582, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 7.2401, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.12216566404442387, |
|
"grad_norm": 25.895959854125977, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 8.3782, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.12401665895418787, |
|
"grad_norm": 24.11650848388672, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 7.4369, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.12586765386395188, |
|
"grad_norm": 22.802165985107422, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 5.9717, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.12771864877371586, |
|
"grad_norm": 21.314449310302734, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 7.2408, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.12956964368347987, |
|
"grad_norm": 18.106534957885742, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 6.1463, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13142063859324388, |
|
"grad_norm": 21.958393096923828, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 6.8193, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.13327163350300786, |
|
"grad_norm": 19.48069953918457, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 6.5375, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.13512262841277187, |
|
"grad_norm": 23.702938079833984, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 7.0712, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.13697362332253588, |
|
"grad_norm": 22.461809158325195, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 5.9379, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.13882461823229986, |
|
"grad_norm": 24.132768630981445, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 6.974, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.14067561314206387, |
|
"grad_norm": 25.628990173339844, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 7.1283, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.14252660805182785, |
|
"grad_norm": 24.302547454833984, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 7.334, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.14437760296159186, |
|
"grad_norm": 18.943586349487305, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 5.2304, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.14622859787135586, |
|
"grad_norm": 18.917469024658203, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 5.6487, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.14807959278111985, |
|
"grad_norm": 21.635984420776367, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 6.0559, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.14993058769088385, |
|
"grad_norm": 21.822101593017578, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 6.1391, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.15178158260064784, |
|
"grad_norm": 19.244054794311523, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 5.7351, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.15363257751041184, |
|
"grad_norm": 22.16774559020996, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 5.5628, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.15548357242017585, |
|
"grad_norm": 19.686809539794922, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 5.6571, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.15733456732993983, |
|
"grad_norm": 18.753782272338867, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 5.5124, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.15918556223970384, |
|
"grad_norm": 22.904891967773438, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 6.0632, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.16103655714946785, |
|
"grad_norm": 20.548492431640625, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 6.0648, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.16288755205923183, |
|
"grad_norm": 21.983842849731445, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 5.0114, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.16473854696899584, |
|
"grad_norm": 18.991622924804688, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 5.3445, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.16658954187875982, |
|
"grad_norm": 20.161306381225586, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 5.8939, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.16844053678852383, |
|
"grad_norm": 16.23657989501953, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 4.8956, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.17029153169828784, |
|
"grad_norm": 15.523630142211914, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 3.6043, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.17214252660805182, |
|
"grad_norm": 17.354217529296875, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 4.0083, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.17399352151781583, |
|
"grad_norm": 18.608896255493164, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 5.0076, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1758445164275798, |
|
"grad_norm": 18.4306583404541, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 4.9611, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.17769551133734382, |
|
"grad_norm": 17.808902740478516, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 4.66, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.17954650624710783, |
|
"grad_norm": 20.695646286010742, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 5.8069, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1813975011568718, |
|
"grad_norm": 20.554363250732422, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 4.5419, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.18324849606663582, |
|
"grad_norm": 20.178359985351562, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 4.041, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.18509949097639983, |
|
"grad_norm": 18.658552169799805, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 1.9184, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.18509949097639983, |
|
"eval_loss": 1.8544436693191528, |
|
"eval_runtime": 69.8668, |
|
"eval_samples_per_second": 13.025, |
|
"eval_steps_per_second": 3.263, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1869504858861638, |
|
"grad_norm": 41.62975311279297, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 10.6419, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.18880148079592782, |
|
"grad_norm": 29.18929672241211, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 10.8545, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1906524757056918, |
|
"grad_norm": 20.762163162231445, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 9.604, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.1925034706154558, |
|
"grad_norm": 17.0522403717041, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 7.3362, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.19435446552521982, |
|
"grad_norm": 15.715831756591797, |
|
"learning_rate": 5e-05, |
|
"loss": 8.2619, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1962054604349838, |
|
"grad_norm": 17.21978187561035, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 7.198, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.1980564553447478, |
|
"grad_norm": 17.804431915283203, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 7.937, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1999074502545118, |
|
"grad_norm": 17.663671493530273, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 7.3717, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.2017584451642758, |
|
"grad_norm": 19.943225860595703, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 7.8254, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.2036094400740398, |
|
"grad_norm": 19.38444709777832, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 8.0488, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.20546043498380379, |
|
"grad_norm": 20.862642288208008, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 7.8827, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.2073114298935678, |
|
"grad_norm": 21.540983200073242, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 7.5021, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.2091624248033318, |
|
"grad_norm": 24.776857376098633, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 8.8133, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.21101341971309578, |
|
"grad_norm": 18.81482696533203, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 6.2371, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.2128644146228598, |
|
"grad_norm": 19.74003791809082, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 7.1802, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.21471540953262377, |
|
"grad_norm": 23.482675552368164, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 7.1844, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.21656640444238778, |
|
"grad_norm": 21.858055114746094, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 6.6821, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.2184173993521518, |
|
"grad_norm": 23.546022415161133, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 6.0886, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.22026839426191577, |
|
"grad_norm": 20.7333927154541, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 7.5445, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.22211938917167978, |
|
"grad_norm": 22.107837677001953, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 5.8253, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22397038408144376, |
|
"grad_norm": 20.686031341552734, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 5.9198, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.22582137899120777, |
|
"grad_norm": 22.019136428833008, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 6.5592, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.22767237390097178, |
|
"grad_norm": 18.745075225830078, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 5.8053, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.22952336881073576, |
|
"grad_norm": 19.928760528564453, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 5.6467, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.23137436372049977, |
|
"grad_norm": 23.67180824279785, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 6.1394, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.23322535863026378, |
|
"grad_norm": 21.75117301940918, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 5.6863, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.23507635354002776, |
|
"grad_norm": 19.03370475769043, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 5.2691, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.23692734844979177, |
|
"grad_norm": 22.11434555053711, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 7.5865, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.23877834335955575, |
|
"grad_norm": 20.700796127319336, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 4.9502, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.24062933826931976, |
|
"grad_norm": 22.236141204833984, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 6.357, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.24248033317908377, |
|
"grad_norm": 19.655351638793945, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 4.5109, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.24433132808884775, |
|
"grad_norm": 20.140209197998047, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 5.5753, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.24618232299861176, |
|
"grad_norm": 19.17459487915039, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 5.437, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.24803331790837574, |
|
"grad_norm": 19.464014053344727, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 5.243, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.24988431281813975, |
|
"grad_norm": 18.852859497070312, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 6.4638, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.25173530772790376, |
|
"grad_norm": 14.234766006469727, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 3.8942, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.25358630263766774, |
|
"grad_norm": 15.460630416870117, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 3.9669, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.2554372975474317, |
|
"grad_norm": 19.800920486450195, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 5.8222, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.25728829245719576, |
|
"grad_norm": 17.38718605041504, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 6.0962, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.25913928736695974, |
|
"grad_norm": 15.048430442810059, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 3.9541, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2609902822767237, |
|
"grad_norm": 16.55597496032715, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 4.1839, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.26284127718648775, |
|
"grad_norm": 16.657272338867188, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 4.9786, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.26469227209625174, |
|
"grad_norm": 21.036558151245117, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 5.0662, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2665432670060157, |
|
"grad_norm": 17.173782348632812, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 4.0465, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.26839426191577975, |
|
"grad_norm": 18.073938369750977, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 4.8604, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.27024525682554373, |
|
"grad_norm": 14.32249641418457, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 3.3291, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.2720962517353077, |
|
"grad_norm": 20.76031494140625, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 5.7931, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.27394724664507175, |
|
"grad_norm": 15.284453392028809, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 3.1407, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.27579824155483573, |
|
"grad_norm": 23.898174285888672, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 5.8988, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2776492364645997, |
|
"grad_norm": 20.32386589050293, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 3.6415, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2776492364645997, |
|
"eval_loss": 1.531545639038086, |
|
"eval_runtime": 69.842, |
|
"eval_samples_per_second": 13.029, |
|
"eval_steps_per_second": 3.265, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2795002313743637, |
|
"grad_norm": 21.202672958374023, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 8.2199, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.28135122628412773, |
|
"grad_norm": 22.77549934387207, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 8.2585, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.2832022211938917, |
|
"grad_norm": 21.384912490844727, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 7.9309, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.2850532161036557, |
|
"grad_norm": 18.97314453125, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 8.0805, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.28690421101341973, |
|
"grad_norm": 18.570676803588867, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 7.4273, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.2887552059231837, |
|
"grad_norm": 20.62229347229004, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 7.6848, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2906062008329477, |
|
"grad_norm": 16.83143424987793, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 6.4836, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.29245719574271173, |
|
"grad_norm": 21.52330207824707, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 7.286, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2943081906524757, |
|
"grad_norm": 21.631942749023438, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 7.57, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.2961591855622397, |
|
"grad_norm": 20.015647888183594, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 7.5558, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2980101804720037, |
|
"grad_norm": 20.935726165771484, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 9.1985, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2998611753817677, |
|
"grad_norm": 20.2431583404541, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 6.8121, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.3017121702915317, |
|
"grad_norm": 20.497529983520508, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 6.5842, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.30356316520129567, |
|
"grad_norm": 19.90401840209961, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 6.6889, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3054141601110597, |
|
"grad_norm": 20.3807430267334, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 5.8842, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3072651550208237, |
|
"grad_norm": 19.59682846069336, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 6.7799, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.30911614993058767, |
|
"grad_norm": 16.78765296936035, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 5.4901, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3109671448403517, |
|
"grad_norm": 16.29376983642578, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 5.9158, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3128181397501157, |
|
"grad_norm": 18.27606201171875, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 6.7209, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.31466913465987967, |
|
"grad_norm": 17.894317626953125, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 6.3001, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3165201295696437, |
|
"grad_norm": 21.63971710205078, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 7.1544, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3183711244794077, |
|
"grad_norm": 21.629539489746094, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 6.2639, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.32022211938917167, |
|
"grad_norm": 19.84263801574707, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 6.8295, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3220731142989357, |
|
"grad_norm": 20.238880157470703, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 5.8894, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.3239241092086997, |
|
"grad_norm": 17.637775421142578, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 4.9432, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.32577510411846367, |
|
"grad_norm": 20.498844146728516, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 5.9794, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.32762609902822765, |
|
"grad_norm": 17.650970458984375, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 4.5091, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3294770939379917, |
|
"grad_norm": 21.161008834838867, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 6.4282, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.33132808884775566, |
|
"grad_norm": 19.688852310180664, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 5.8306, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.33317908375751965, |
|
"grad_norm": 17.909574508666992, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 6.0146, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3350300786672837, |
|
"grad_norm": 19.697717666625977, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 5.26, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.33688107357704766, |
|
"grad_norm": 22.01299476623535, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 6.6505, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.33873206848681164, |
|
"grad_norm": 16.821945190429688, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 5.1342, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.3405830633965757, |
|
"grad_norm": 17.76853370666504, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 4.8088, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.34243405830633966, |
|
"grad_norm": 20.280498504638672, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 5.978, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.34428505321610364, |
|
"grad_norm": 20.38216209411621, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 5.6034, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.3461360481258677, |
|
"grad_norm": 17.165733337402344, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 4.2564, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.34798704303563166, |
|
"grad_norm": 17.380369186401367, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 4.6483, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.34983803794539564, |
|
"grad_norm": 20.93096351623535, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 5.1795, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.3516890328551596, |
|
"grad_norm": 17.947641372680664, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 5.088, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.35354002776492366, |
|
"grad_norm": 17.104299545288086, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 3.7657, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.35539102267468764, |
|
"grad_norm": 19.836400985717773, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 4.1091, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.3572420175844516, |
|
"grad_norm": 16.77385902404785, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 5.1161, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.35909301249421566, |
|
"grad_norm": 18.861099243164062, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 4.392, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.36094400740397964, |
|
"grad_norm": 15.38021469116211, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 3.6737, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.3627950023137436, |
|
"grad_norm": 21.193696975708008, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 6.081, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.36464599722350766, |
|
"grad_norm": 20.111827850341797, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 5.7416, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.36649699213327164, |
|
"grad_norm": 18.77097511291504, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 4.9755, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.3683479870430356, |
|
"grad_norm": 26.088537216186523, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 5.5487, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.37019898195279966, |
|
"grad_norm": 25.18060302734375, |
|
"learning_rate": 0.0, |
|
"loss": 4.2396, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.37019898195279966, |
|
"eval_loss": 1.45925772190094, |
|
"eval_runtime": 69.8425, |
|
"eval_samples_per_second": 13.029, |
|
"eval_steps_per_second": 3.264, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.862000849813504e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|