|
{ |
|
"best_metric": 0.856124222278595, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 1.2738853503184713, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"grad_norm": 36.13005447387695, |
|
"learning_rate": 5e-06, |
|
"loss": 9.517, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006369426751592357, |
|
"eval_loss": 4.299320697784424, |
|
"eval_runtime": 22.4063, |
|
"eval_samples_per_second": 11.827, |
|
"eval_steps_per_second": 5.936, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.012738853503184714, |
|
"grad_norm": 44.257057189941406, |
|
"learning_rate": 1e-05, |
|
"loss": 8.7487, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01910828025477707, |
|
"grad_norm": 42.081783294677734, |
|
"learning_rate": 1.5e-05, |
|
"loss": 9.2334, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.025477707006369428, |
|
"grad_norm": 41.70063018798828, |
|
"learning_rate": 2e-05, |
|
"loss": 9.2985, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03184713375796178, |
|
"grad_norm": 24.960290908813477, |
|
"learning_rate": 2.5e-05, |
|
"loss": 8.3713, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03821656050955414, |
|
"grad_norm": 21.20467758178711, |
|
"learning_rate": 3e-05, |
|
"loss": 7.8663, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.044585987261146494, |
|
"grad_norm": 20.491474151611328, |
|
"learning_rate": 3.5e-05, |
|
"loss": 7.8621, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.050955414012738856, |
|
"grad_norm": 19.02033042907715, |
|
"learning_rate": 4e-05, |
|
"loss": 7.1343, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05732484076433121, |
|
"grad_norm": 18.769779205322266, |
|
"learning_rate": 4.5e-05, |
|
"loss": 7.121, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06369426751592357, |
|
"grad_norm": 16.574338912963867, |
|
"learning_rate": 5e-05, |
|
"loss": 6.1092, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07006369426751592, |
|
"grad_norm": 16.516620635986328, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 6.1318, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07643312101910828, |
|
"grad_norm": 17.191225051879883, |
|
"learning_rate": 6e-05, |
|
"loss": 5.93, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.08280254777070063, |
|
"grad_norm": 17.699655532836914, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 5.2682, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.08917197452229299, |
|
"grad_norm": 18.904544830322266, |
|
"learning_rate": 7e-05, |
|
"loss": 6.294, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09554140127388536, |
|
"grad_norm": 16.874366760253906, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 6.1507, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10191082802547771, |
|
"grad_norm": 18.243234634399414, |
|
"learning_rate": 8e-05, |
|
"loss": 5.9768, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.10828025477707007, |
|
"grad_norm": 22.026344299316406, |
|
"learning_rate": 8.5e-05, |
|
"loss": 7.0462, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11464968152866242, |
|
"grad_norm": 18.958724975585938, |
|
"learning_rate": 9e-05, |
|
"loss": 5.301, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.12101910828025478, |
|
"grad_norm": 18.320955276489258, |
|
"learning_rate": 9.5e-05, |
|
"loss": 5.9237, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.12738853503184713, |
|
"grad_norm": 24.6405029296875, |
|
"learning_rate": 0.0001, |
|
"loss": 5.3109, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.1337579617834395, |
|
"grad_norm": 39.88923645019531, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 5.7486, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14012738853503184, |
|
"grad_norm": 20.46506690979004, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 6.0248, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.1464968152866242, |
|
"grad_norm": 20.671714782714844, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 6.1074, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15286624203821655, |
|
"grad_norm": 24.3786563873291, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 6.1998, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1592356687898089, |
|
"grad_norm": 24.025131225585938, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 5.2428, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.16560509554140126, |
|
"grad_norm": 27.44234848022461, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 5.3396, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17197452229299362, |
|
"grad_norm": 27.521507263183594, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 5.9622, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.17834394904458598, |
|
"grad_norm": 26.471494674682617, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 5.5839, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.18471337579617833, |
|
"grad_norm": 26.029315948486328, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 5.4105, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.1910828025477707, |
|
"grad_norm": 28.722759246826172, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 5.0631, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.19745222929936307, |
|
"grad_norm": 30.768484115600586, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 5.6846, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.20382165605095542, |
|
"grad_norm": 33.13035583496094, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 5.9204, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.21019108280254778, |
|
"grad_norm": 28.801864624023438, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 4.7152, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.21656050955414013, |
|
"grad_norm": 29.887720108032227, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 4.2513, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.2229299363057325, |
|
"grad_norm": 29.54290771484375, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 5.0721, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.22929936305732485, |
|
"grad_norm": 37.83737564086914, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 4.5899, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2356687898089172, |
|
"grad_norm": 43.482608795166016, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 5.0616, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.24203821656050956, |
|
"grad_norm": 50.641151428222656, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 6.4259, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2484076433121019, |
|
"grad_norm": 177.5625, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 13.0021, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.25477707006369427, |
|
"grad_norm": 136.0759735107422, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 10.0178, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2611464968152866, |
|
"grad_norm": 55.65427017211914, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 5.7104, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.267515923566879, |
|
"grad_norm": 19.339778900146484, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 4.7599, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.27388535031847133, |
|
"grad_norm": 12.372485160827637, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 4.6529, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2802547770700637, |
|
"grad_norm": 9.565322875976562, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 4.4598, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.28662420382165604, |
|
"grad_norm": 8.054713249206543, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 4.507, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.2929936305732484, |
|
"grad_norm": 8.118359565734863, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 3.9682, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.29936305732484075, |
|
"grad_norm": 7.510158061981201, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 3.9359, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3057324840764331, |
|
"grad_norm": 9.019046783447266, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 4.3822, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.31210191082802546, |
|
"grad_norm": 8.206843376159668, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 3.7949, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"grad_norm": 8.42149829864502, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 4.563, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3184713375796178, |
|
"eval_loss": 1.222476840019226, |
|
"eval_runtime": 22.9758, |
|
"eval_samples_per_second": 11.534, |
|
"eval_steps_per_second": 5.789, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3248407643312102, |
|
"grad_norm": 9.273707389831543, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 4.2956, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.33121019108280253, |
|
"grad_norm": 8.437464714050293, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 4.1599, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3375796178343949, |
|
"grad_norm": 8.612960815429688, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 4.1081, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.34394904458598724, |
|
"grad_norm": 11.117528915405273, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 4.3433, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3503184713375796, |
|
"grad_norm": 10.288155555725098, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 4.5473, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.35668789808917195, |
|
"grad_norm": 10.283195495605469, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.3766, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3630573248407643, |
|
"grad_norm": 11.619425773620605, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 4.5981, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.36942675159235666, |
|
"grad_norm": 12.74874210357666, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 4.98, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.37579617834394907, |
|
"grad_norm": 11.369183540344238, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 3.341, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3821656050955414, |
|
"grad_norm": 13.796540260314941, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 4.6845, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.3885350318471338, |
|
"grad_norm": 13.996506690979004, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 4.5114, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.39490445859872614, |
|
"grad_norm": 16.951208114624023, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 5.0782, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.4012738853503185, |
|
"grad_norm": 14.65240478515625, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 4.6793, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.40764331210191085, |
|
"grad_norm": 18.088653564453125, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 4.7098, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4140127388535032, |
|
"grad_norm": 17.526235580444336, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 4.4782, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.42038216560509556, |
|
"grad_norm": 18.589906692504883, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 4.4513, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.4267515923566879, |
|
"grad_norm": 22.394956588745117, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 4.823, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.43312101910828027, |
|
"grad_norm": 19.558523178100586, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 4.3261, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.4394904458598726, |
|
"grad_norm": 19.380760192871094, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 4.6877, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.445859872611465, |
|
"grad_norm": 23.139272689819336, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 4.5216, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.45222929936305734, |
|
"grad_norm": 20.861736297607422, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 4.1113, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.4585987261146497, |
|
"grad_norm": 21.064624786376953, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 3.894, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.46496815286624205, |
|
"grad_norm": 23.0689697265625, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 4.061, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.4713375796178344, |
|
"grad_norm": 25.696874618530273, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 4.2541, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.47770700636942676, |
|
"grad_norm": 23.2997989654541, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 4.3783, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.4840764331210191, |
|
"grad_norm": 33.244049072265625, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.9646, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.49044585987261147, |
|
"grad_norm": 34.73582077026367, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 4.3941, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.4968152866242038, |
|
"grad_norm": 68.82785034179688, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 3.9351, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5031847133757962, |
|
"grad_norm": 48.5151252746582, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 6.4497, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5095541401273885, |
|
"grad_norm": 20.338134765625, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.1436, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5159235668789809, |
|
"grad_norm": 8.380877494812012, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 4.0942, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5222929936305732, |
|
"grad_norm": 6.018649101257324, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 3.5251, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5286624203821656, |
|
"grad_norm": 7.0183796882629395, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 3.6932, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.535031847133758, |
|
"grad_norm": 6.874508380889893, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.8955, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5414012738853503, |
|
"grad_norm": 7.094663143157959, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 4.4996, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5477707006369427, |
|
"grad_norm": 6.800449848175049, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 4.3487, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.554140127388535, |
|
"grad_norm": 6.512099742889404, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 3.7667, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5605095541401274, |
|
"grad_norm": 7.194104194641113, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 3.907, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5668789808917197, |
|
"grad_norm": 6.929054260253906, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 3.7032, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5732484076433121, |
|
"grad_norm": 6.628689289093018, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 3.3508, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.5796178343949044, |
|
"grad_norm": 7.876663684844971, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 4.032, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.5859872611464968, |
|
"grad_norm": 7.960112571716309, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.8419, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.5923566878980892, |
|
"grad_norm": 8.09794807434082, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 3.9603, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.5987261146496815, |
|
"grad_norm": 9.845130920410156, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 4.0276, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6050955414012739, |
|
"grad_norm": 8.297134399414062, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 3.4893, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6114649681528662, |
|
"grad_norm": 8.784958839416504, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 3.8555, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6178343949044586, |
|
"grad_norm": 10.812994003295898, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 3.7086, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6242038216560509, |
|
"grad_norm": 9.371301651000977, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 3.957, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6305732484076433, |
|
"grad_norm": 11.402350425720215, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 3.8245, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"grad_norm": 10.444782257080078, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 3.4607, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6369426751592356, |
|
"eval_loss": 0.9518508911132812, |
|
"eval_runtime": 22.9599, |
|
"eval_samples_per_second": 11.542, |
|
"eval_steps_per_second": 5.793, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.643312101910828, |
|
"grad_norm": 11.772903442382812, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 3.5184, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6496815286624203, |
|
"grad_norm": 15.014814376831055, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 4.02, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6560509554140127, |
|
"grad_norm": 13.781637191772461, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 3.6895, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.6624203821656051, |
|
"grad_norm": 14.441632270812988, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 3.3828, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6687898089171974, |
|
"grad_norm": 16.388935089111328, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 3.8216, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.6751592356687898, |
|
"grad_norm": 16.722646713256836, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 3.8913, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.6815286624203821, |
|
"grad_norm": 17.876073837280273, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 3.6247, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.6878980891719745, |
|
"grad_norm": 18.381223678588867, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 3.9273, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.6942675159235668, |
|
"grad_norm": 17.400468826293945, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 3.4908, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7006369426751592, |
|
"grad_norm": 21.45941925048828, |
|
"learning_rate": 5e-05, |
|
"loss": 4.4969, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7070063694267515, |
|
"grad_norm": 21.00533103942871, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 4.1499, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7133757961783439, |
|
"grad_norm": 20.375274658203125, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.4835, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7197452229299363, |
|
"grad_norm": 21.039329528808594, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 2.9916, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7261146496815286, |
|
"grad_norm": 25.608665466308594, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 3.5051, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.732484076433121, |
|
"grad_norm": 23.432601928710938, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 2.8175, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7388535031847133, |
|
"grad_norm": 31.607587814331055, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 4.0832, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7452229299363057, |
|
"grad_norm": 47.30448913574219, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 3.5419, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7515923566878981, |
|
"grad_norm": 4.965266227722168, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 3.0148, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7579617834394905, |
|
"grad_norm": 4.364875793457031, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 2.9562, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7643312101910829, |
|
"grad_norm": 4.950608253479004, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 3.453, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.7707006369426752, |
|
"grad_norm": 4.8531951904296875, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 2.7735, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.7770700636942676, |
|
"grad_norm": 5.037564754486084, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 2.94, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.7834394904458599, |
|
"grad_norm": 5.288941383361816, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 3.0112, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.7898089171974523, |
|
"grad_norm": 5.76696252822876, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.1363, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.7961783439490446, |
|
"grad_norm": 5.585211753845215, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 3.3864, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.802547770700637, |
|
"grad_norm": 6.322639465332031, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.5615, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8089171974522293, |
|
"grad_norm": 6.185570240020752, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 3.1405, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8152866242038217, |
|
"grad_norm": 6.54400634765625, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 3.6258, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.821656050955414, |
|
"grad_norm": 6.367386341094971, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 3.153, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.8280254777070064, |
|
"grad_norm": 6.623294830322266, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 3.0124, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8343949044585988, |
|
"grad_norm": 6.886430263519287, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 3.1905, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8407643312101911, |
|
"grad_norm": 7.273221969604492, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 2.9941, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8471337579617835, |
|
"grad_norm": 8.015644073486328, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 3.4856, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8535031847133758, |
|
"grad_norm": 8.67126750946045, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 3.5122, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8598726114649682, |
|
"grad_norm": 9.060256958007812, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 3.7418, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8662420382165605, |
|
"grad_norm": 8.686426162719727, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 3.003, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.8726114649681529, |
|
"grad_norm": 8.711852073669434, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 3.1511, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.8789808917197452, |
|
"grad_norm": 11.177677154541016, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 3.7168, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.8853503184713376, |
|
"grad_norm": 10.784727096557617, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 3.0916, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.89171974522293, |
|
"grad_norm": 11.460144996643066, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 3.2102, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.8980891719745223, |
|
"grad_norm": 12.439859390258789, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 3.6256, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9044585987261147, |
|
"grad_norm": 13.51606273651123, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 4.201, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.910828025477707, |
|
"grad_norm": 17.250572204589844, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 4.7407, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9171974522292994, |
|
"grad_norm": 16.19012451171875, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 4.0064, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9235668789808917, |
|
"grad_norm": 16.520713806152344, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 3.7402, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9299363057324841, |
|
"grad_norm": 17.06348991394043, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 4.0871, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.9363057324840764, |
|
"grad_norm": 16.918140411376953, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 4.0366, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9426751592356688, |
|
"grad_norm": 17.086406707763672, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 3.6202, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9490445859872612, |
|
"grad_norm": 16.144107818603516, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 3.0974, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9554140127388535, |
|
"grad_norm": 18.168882369995117, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 3.2189, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9554140127388535, |
|
"eval_loss": 0.856124222278595, |
|
"eval_runtime": 22.9621, |
|
"eval_samples_per_second": 11.541, |
|
"eval_steps_per_second": 5.792, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9617834394904459, |
|
"grad_norm": 23.63081932067871, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 4.073, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.9681528662420382, |
|
"grad_norm": 19.112306594848633, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 3.6236, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.9745222929936306, |
|
"grad_norm": 31.241661071777344, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 3.0811, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.9808917197452229, |
|
"grad_norm": 22.64638328552246, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 2.9199, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.9872611464968153, |
|
"grad_norm": 27.255001068115234, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 2.6116, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.9936305732484076, |
|
"grad_norm": 66.21800231933594, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 4.2625, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 24.99510383605957, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 4.1692, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.0063694267515924, |
|
"grad_norm": 3.7077722549438477, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 2.7945, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0127388535031847, |
|
"grad_norm": 3.5477285385131836, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 2.4028, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.019108280254777, |
|
"grad_norm": 4.283918857574463, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 3.1117, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0254777070063694, |
|
"grad_norm": 3.8144969940185547, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 2.6418, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0318471337579618, |
|
"grad_norm": 4.030559539794922, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 2.377, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0382165605095541, |
|
"grad_norm": 4.541848659515381, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 2.5533, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.0445859872611465, |
|
"grad_norm": 4.711845874786377, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 2.4903, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0509554140127388, |
|
"grad_norm": 4.770325183868408, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 2.6872, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0573248407643312, |
|
"grad_norm": 4.293312072753906, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 1.9581, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.0636942675159236, |
|
"grad_norm": 5.336209297180176, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 2.0512, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.070063694267516, |
|
"grad_norm": 5.297292232513428, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.6065, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.0764331210191083, |
|
"grad_norm": 4.493692874908447, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 1.8749, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.0828025477707006, |
|
"grad_norm": 5.324576377868652, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.1537, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.089171974522293, |
|
"grad_norm": 5.945623874664307, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 2.3464, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.0955414012738853, |
|
"grad_norm": 5.990110397338867, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 2.1652, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.1019108280254777, |
|
"grad_norm": 5.564594745635986, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 1.9364, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.10828025477707, |
|
"grad_norm": 6.395089149475098, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 2.3456, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1146496815286624, |
|
"grad_norm": 6.796019554138184, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 2.09, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1210191082802548, |
|
"grad_norm": 7.000392913818359, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 2.333, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.127388535031847, |
|
"grad_norm": 6.889556407928467, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 1.5809, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.1337579617834395, |
|
"grad_norm": 7.113092422485352, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 2.0368, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.1401273885350318, |
|
"grad_norm": 6.891910552978516, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 1.5373, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1464968152866242, |
|
"grad_norm": 10.173569679260254, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 1.9954, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1528662420382165, |
|
"grad_norm": 7.4136061668396, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 1.5246, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.1592356687898089, |
|
"grad_norm": 8.997750282287598, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.0869, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.1656050955414012, |
|
"grad_norm": 9.356908798217773, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 2.0526, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.1719745222929936, |
|
"grad_norm": 10.576266288757324, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.0726, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.178343949044586, |
|
"grad_norm": 10.605173110961914, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 1.7944, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.1847133757961783, |
|
"grad_norm": 9.885431289672852, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 1.732, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.1910828025477707, |
|
"grad_norm": 10.850882530212402, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 2.0103, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.197452229299363, |
|
"grad_norm": 10.830645561218262, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 1.9147, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.2038216560509554, |
|
"grad_norm": 14.65571117401123, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 2.0657, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.2101910828025477, |
|
"grad_norm": 10.46584701538086, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 1.7092, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.21656050955414, |
|
"grad_norm": 11.583234786987305, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 1.5889, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2229299363057324, |
|
"grad_norm": 19.2625732421875, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 1.8016, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2292993630573248, |
|
"grad_norm": 13.321444511413574, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 1.3804, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2356687898089171, |
|
"grad_norm": 13.012649536132812, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 1.2135, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2420382165605095, |
|
"grad_norm": 17.863269805908203, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 1.7409, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2484076433121019, |
|
"grad_norm": 30.971969604492188, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 1.8244, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.2547770700636942, |
|
"grad_norm": 4.519101619720459, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 2.9923, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.2611464968152866, |
|
"grad_norm": 4.492830753326416, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 2.6286, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.267515923566879, |
|
"grad_norm": 4.864869117736816, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 2.9949, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.2738853503184713, |
|
"grad_norm": 4.6130547523498535, |
|
"learning_rate": 0.0, |
|
"loss": 2.3603, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.2738853503184713, |
|
"eval_loss": 0.8630052804946899, |
|
"eval_runtime": 22.948, |
|
"eval_samples_per_second": 11.548, |
|
"eval_steps_per_second": 5.796, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 3.04485205706539e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|