|
{ |
|
"best_metric": 1.0585381984710693, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.046086488977648055, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00030724325985098704, |
|
"grad_norm": 23.011367797851562, |
|
"learning_rate": 5e-06, |
|
"loss": 6.3985, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00030724325985098704, |
|
"eval_loss": 2.409876585006714, |
|
"eval_runtime": 441.0598, |
|
"eval_samples_per_second": 12.429, |
|
"eval_steps_per_second": 6.215, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006144865197019741, |
|
"grad_norm": 32.28715515136719, |
|
"learning_rate": 1e-05, |
|
"loss": 7.734, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0009217297795529611, |
|
"grad_norm": 28.548389434814453, |
|
"learning_rate": 1.5e-05, |
|
"loss": 7.4653, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0012289730394039482, |
|
"grad_norm": 24.919015884399414, |
|
"learning_rate": 2e-05, |
|
"loss": 7.3706, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.001536216299254935, |
|
"grad_norm": 22.995384216308594, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.938, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0018434595591059221, |
|
"grad_norm": 19.593135833740234, |
|
"learning_rate": 3e-05, |
|
"loss": 7.122, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0021507028189569092, |
|
"grad_norm": 18.867374420166016, |
|
"learning_rate": 3.5e-05, |
|
"loss": 5.8915, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0024579460788078963, |
|
"grad_norm": 15.144004821777344, |
|
"learning_rate": 4e-05, |
|
"loss": 5.7937, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.002765189338658883, |
|
"grad_norm": 13.516443252563477, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.6253, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.00307243259850987, |
|
"grad_norm": 14.180933952331543, |
|
"learning_rate": 5e-05, |
|
"loss": 5.3894, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003379675858360857, |
|
"grad_norm": 16.558490753173828, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 4.628, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0036869191182118443, |
|
"grad_norm": 14.397795677185059, |
|
"learning_rate": 6e-05, |
|
"loss": 5.3816, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.003994162378062831, |
|
"grad_norm": 15.309121131896973, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.9166, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0043014056379138185, |
|
"grad_norm": 15.53266716003418, |
|
"learning_rate": 7e-05, |
|
"loss": 4.5761, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004608648897764805, |
|
"grad_norm": 13.632649421691895, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 3.6609, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.004915892157615793, |
|
"grad_norm": 13.333073616027832, |
|
"learning_rate": 8e-05, |
|
"loss": 4.0839, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.005223135417466779, |
|
"grad_norm": 14.622278213500977, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.4781, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.005530378677317766, |
|
"grad_norm": 12.793045997619629, |
|
"learning_rate": 9e-05, |
|
"loss": 4.9027, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0058376219371687535, |
|
"grad_norm": 11.85013484954834, |
|
"learning_rate": 9.5e-05, |
|
"loss": 3.8893, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.00614486519701974, |
|
"grad_norm": 13.697391510009766, |
|
"learning_rate": 0.0001, |
|
"loss": 4.6765, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006452108456870728, |
|
"grad_norm": 10.900583267211914, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 3.559, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.006759351716721714, |
|
"grad_norm": 15.428689002990723, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 4.3751, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.007066594976572701, |
|
"grad_norm": 11.113511085510254, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 4.1964, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0073738382364236885, |
|
"grad_norm": 12.102518081665039, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 4.4795, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.007681081496274675, |
|
"grad_norm": 11.722882270812988, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 4.2897, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.007988324756125662, |
|
"grad_norm": 11.406682968139648, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 5.0885, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00829556801597665, |
|
"grad_norm": 11.243815422058105, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 4.4903, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.008602811275827637, |
|
"grad_norm": 11.628358840942383, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 4.4378, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.008910054535678624, |
|
"grad_norm": 10.681234359741211, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 4.691, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.00921729779552961, |
|
"grad_norm": 10.783159255981445, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 4.9312, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.009524541055380597, |
|
"grad_norm": 10.892800331115723, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 5.2761, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.009831784315231585, |
|
"grad_norm": 11.34912109375, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 5.4007, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.010139027575082572, |
|
"grad_norm": 13.050414085388184, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 5.5961, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.010446270834933559, |
|
"grad_norm": 11.140335083007812, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 4.5804, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.010753514094784545, |
|
"grad_norm": 11.838504791259766, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 5.553, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.011060757354635532, |
|
"grad_norm": 12.498956680297852, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 4.8211, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01136800061448652, |
|
"grad_norm": 12.233826637268066, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 5.3758, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.011675243874337507, |
|
"grad_norm": 12.875245094299316, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 5.2887, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.011982487134188494, |
|
"grad_norm": 14.482268333435059, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 5.3555, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.01228973039403948, |
|
"grad_norm": 12.469817161560059, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 5.1884, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.012596973653890467, |
|
"grad_norm": 14.905447959899902, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 5.7557, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.012904216913741455, |
|
"grad_norm": 12.736101150512695, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 4.3636, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.013211460173592442, |
|
"grad_norm": 13.541585922241211, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 5.2159, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.013518703433443429, |
|
"grad_norm": 14.56790542602539, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 6.1784, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.013825946693294415, |
|
"grad_norm": 14.048850059509277, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 5.7569, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.014133189953145402, |
|
"grad_norm": 14.115535736083984, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 5.7633, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01444043321299639, |
|
"grad_norm": 13.446468353271484, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 5.7742, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.014747676472847377, |
|
"grad_norm": 15.913680076599121, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 5.6512, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.015054919732698364, |
|
"grad_norm": 13.415315628051758, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 4.9549, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.01536216299254935, |
|
"grad_norm": 22.350265502929688, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 6.6309, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.01536216299254935, |
|
"eval_loss": 1.475726842880249, |
|
"eval_runtime": 443.646, |
|
"eval_samples_per_second": 12.357, |
|
"eval_steps_per_second": 6.178, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.015669406252400337, |
|
"grad_norm": 17.104642868041992, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 6.866, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.015976649512251324, |
|
"grad_norm": 12.813888549804688, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 6.4915, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.01628389277210231, |
|
"grad_norm": 8.647307395935059, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 4.6191, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0165911360319533, |
|
"grad_norm": 8.637353897094727, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 5.2535, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.016898379291804287, |
|
"grad_norm": 8.05450439453125, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 4.8999, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.017205622551655274, |
|
"grad_norm": 8.113150596618652, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.5401, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.01751286581150626, |
|
"grad_norm": 9.034808158874512, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 5.2058, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.017820109071357247, |
|
"grad_norm": 9.577351570129395, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 4.8838, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.018127352331208234, |
|
"grad_norm": 8.906129837036133, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 4.4069, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.01843459559105922, |
|
"grad_norm": 8.663689613342285, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 4.3744, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.018741838850910207, |
|
"grad_norm": 8.294697761535645, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 4.1245, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.019049082110761194, |
|
"grad_norm": 8.371333122253418, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 4.5234, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.019356325370612184, |
|
"grad_norm": 8.438100814819336, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 4.2958, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.01966356863046317, |
|
"grad_norm": 8.364363670349121, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 3.934, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.019970811890314157, |
|
"grad_norm": 7.366670608520508, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.5163, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.020278055150165144, |
|
"grad_norm": 7.6314921379089355, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 3.7853, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02058529841001613, |
|
"grad_norm": 7.76265811920166, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 3.8884, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.020892541669867117, |
|
"grad_norm": 8.086345672607422, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 3.5251, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.021199784929718104, |
|
"grad_norm": 6.946031093597412, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 3.593, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.02150702818956909, |
|
"grad_norm": 7.788767337799072, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 4.1621, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.021814271449420077, |
|
"grad_norm": 8.629899978637695, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 3.7884, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.022121514709271064, |
|
"grad_norm": 6.985069274902344, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 3.9133, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.022428757969122054, |
|
"grad_norm": 6.972623825073242, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 3.205, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02273600122897304, |
|
"grad_norm": 7.396304130554199, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 3.4196, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.023043244488824027, |
|
"grad_norm": 7.159494876861572, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 3.5241, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.023350487748675014, |
|
"grad_norm": 7.689444541931152, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 4.3798, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.023657731008526, |
|
"grad_norm": 8.844693183898926, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 4.4668, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.023964974268376987, |
|
"grad_norm": 8.752437591552734, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 4.4342, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.024272217528227974, |
|
"grad_norm": 8.479850769042969, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 4.2627, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.02457946078807896, |
|
"grad_norm": 8.894695281982422, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.7375, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.024886704047929947, |
|
"grad_norm": 9.680545806884766, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 5.2633, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.025193947307780934, |
|
"grad_norm": 9.162818908691406, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 4.553, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.025501190567631924, |
|
"grad_norm": 9.10145378112793, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 4.7129, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.02580843382748291, |
|
"grad_norm": 9.074843406677246, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 4.0724, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.026115677087333897, |
|
"grad_norm": 9.07311725616455, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 4.7301, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.026422920347184884, |
|
"grad_norm": 11.810867309570312, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 5.2496, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.02673016360703587, |
|
"grad_norm": 10.740492820739746, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 5.3248, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.027037406866886857, |
|
"grad_norm": 10.580676078796387, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 4.8344, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.027344650126737844, |
|
"grad_norm": 11.064846992492676, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 4.8108, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.02765189338658883, |
|
"grad_norm": 11.720850944519043, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 4.8933, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.027959136646439817, |
|
"grad_norm": 9.551570892333984, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 4.714, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.028266379906290804, |
|
"grad_norm": 11.187040328979492, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 4.073, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.028573623166141794, |
|
"grad_norm": 9.788082122802734, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 4.7062, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.02888086642599278, |
|
"grad_norm": 10.513997077941895, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 4.612, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.029188109685843767, |
|
"grad_norm": 12.733663558959961, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 5.4784, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.029495352945694754, |
|
"grad_norm": 13.620742797851562, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 5.3613, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.02980259620554574, |
|
"grad_norm": 13.19336986541748, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 5.6018, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.030109839465396727, |
|
"grad_norm": 13.040425300598145, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 5.054, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.030417082725247714, |
|
"grad_norm": 14.068294525146484, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 5.6559, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.0307243259850987, |
|
"grad_norm": 14.678174018859863, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 5.2266, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.0307243259850987, |
|
"eval_loss": 1.2480071783065796, |
|
"eval_runtime": 443.7094, |
|
"eval_samples_per_second": 12.355, |
|
"eval_steps_per_second": 6.177, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.031031569244949687, |
|
"grad_norm": 10.580477714538574, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 5.5392, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.031338812504800674, |
|
"grad_norm": 9.869646072387695, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 5.5595, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03164605576465166, |
|
"grad_norm": 9.99475383758545, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 4.8398, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.03195329902450265, |
|
"grad_norm": 6.947811126708984, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 4.2438, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.032260542284353634, |
|
"grad_norm": 6.170292377471924, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 4.6814, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.03256778554420462, |
|
"grad_norm": 6.3110246658325195, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 4.4389, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.032875028804055614, |
|
"grad_norm": 6.087858200073242, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 4.1039, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.0331822720639066, |
|
"grad_norm": 6.2835211753845215, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 4.0166, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.03348951532375759, |
|
"grad_norm": 6.776898384094238, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 4.2686, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.033796758583608574, |
|
"grad_norm": 6.588527202606201, |
|
"learning_rate": 5e-05, |
|
"loss": 3.7269, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.03410400184345956, |
|
"grad_norm": 7.68696403503418, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 3.9392, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.03441124510331055, |
|
"grad_norm": 5.877866744995117, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.5263, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.034718488363161534, |
|
"grad_norm": 7.236352920532227, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 3.7052, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.03502573162301252, |
|
"grad_norm": 6.501185894012451, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 2.9949, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.03533297488286351, |
|
"grad_norm": 6.9282941818237305, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 3.5694, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.035640218142714494, |
|
"grad_norm": 7.355767250061035, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 3.9165, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03594746140256548, |
|
"grad_norm": 7.192873954772949, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 3.1828, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03625470466241647, |
|
"grad_norm": 6.482280254364014, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 3.2275, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.036561947922267454, |
|
"grad_norm": 6.704769611358643, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 3.1593, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03686919118211844, |
|
"grad_norm": 6.919817924499512, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 3.0843, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.03717643444196943, |
|
"grad_norm": 7.4176506996154785, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 3.4191, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.037483677701820414, |
|
"grad_norm": 7.300436496734619, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 3.7824, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.0377909209616714, |
|
"grad_norm": 6.638614177703857, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 3.3756, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.03809816422152239, |
|
"grad_norm": 6.030972957611084, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.3504, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.038405407481373374, |
|
"grad_norm": 7.475324630737305, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 3.5805, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.03871265074122437, |
|
"grad_norm": 7.8704609870910645, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.6184, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.039019894001075354, |
|
"grad_norm": 7.465869426727295, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 4.1467, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.03932713726092634, |
|
"grad_norm": 7.257153511047363, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 4.2342, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.03963438052077733, |
|
"grad_norm": 7.452193260192871, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 4.3429, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.039941623780628314, |
|
"grad_norm": 7.880541801452637, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 4.3863, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.0402488670404793, |
|
"grad_norm": 8.45700740814209, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 4.8662, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04055611030033029, |
|
"grad_norm": 8.435225486755371, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 4.2266, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.040863353560181274, |
|
"grad_norm": 9.154268264770508, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 4.2599, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04117059682003226, |
|
"grad_norm": 7.695627212524414, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 4.0229, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.04147784007988325, |
|
"grad_norm": 9.314230918884277, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 4.8999, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.041785083339734234, |
|
"grad_norm": 9.278669357299805, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 4.6078, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.04209232659958522, |
|
"grad_norm": 10.638617515563965, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 4.6089, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.04239956985943621, |
|
"grad_norm": 8.870823860168457, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 4.3292, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.042706813119287194, |
|
"grad_norm": 9.005095481872559, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 4.5548, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.04301405637913818, |
|
"grad_norm": 10.735925674438477, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 4.8442, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04332129963898917, |
|
"grad_norm": 9.419788360595703, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 4.926, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.043628542898840154, |
|
"grad_norm": 9.60610580444336, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 4.2713, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.04393578615869114, |
|
"grad_norm": 8.531740188598633, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 4.0917, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.04424302941854213, |
|
"grad_norm": 9.785737037658691, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 4.6779, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.044550272678393114, |
|
"grad_norm": 10.093374252319336, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 4.713, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04485751593824411, |
|
"grad_norm": 11.576554298400879, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 5.0362, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.045164759198095095, |
|
"grad_norm": 12.363187789916992, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 5.3881, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.04547200245794608, |
|
"grad_norm": 11.872509956359863, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 4.8039, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.04577924571779707, |
|
"grad_norm": 13.223235130310059, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 5.4591, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.046086488977648055, |
|
"grad_norm": 13.729710578918457, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 5.6227, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.046086488977648055, |
|
"eval_loss": 1.0585381984710693, |
|
"eval_runtime": 443.8526, |
|
"eval_samples_per_second": 12.351, |
|
"eval_steps_per_second": 6.175, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.150076928449577e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|