|
{ |
|
"best_metric": 3.230548620223999, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.08714596949891068, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00043572984749455336, |
|
"grad_norm": 6.714541912078857, |
|
"learning_rate": 5e-06, |
|
"loss": 4.1538, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00043572984749455336, |
|
"eval_loss": 8.217473030090332, |
|
"eval_runtime": 340.3935, |
|
"eval_samples_per_second": 11.357, |
|
"eval_steps_per_second": 5.679, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0008714596949891067, |
|
"grad_norm": 10.93850040435791, |
|
"learning_rate": 1e-05, |
|
"loss": 6.0324, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00130718954248366, |
|
"grad_norm": 10.759867668151855, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.2806, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0017429193899782135, |
|
"grad_norm": 8.95274543762207, |
|
"learning_rate": 2e-05, |
|
"loss": 6.2892, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.002178649237472767, |
|
"grad_norm": 8.26896858215332, |
|
"learning_rate": 2.5e-05, |
|
"loss": 6.3382, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.00261437908496732, |
|
"grad_norm": 7.07852840423584, |
|
"learning_rate": 3e-05, |
|
"loss": 6.1125, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0030501089324618735, |
|
"grad_norm": 5.926280975341797, |
|
"learning_rate": 3.5e-05, |
|
"loss": 6.0423, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.003485838779956427, |
|
"grad_norm": 5.2815117835998535, |
|
"learning_rate": 4e-05, |
|
"loss": 5.8223, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00392156862745098, |
|
"grad_norm": 5.319513320922852, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.7601, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.004357298474945534, |
|
"grad_norm": 4.76923131942749, |
|
"learning_rate": 5e-05, |
|
"loss": 5.5735, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.004793028322440087, |
|
"grad_norm": 5.165170192718506, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.3123, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.00522875816993464, |
|
"grad_norm": 5.180248260498047, |
|
"learning_rate": 6e-05, |
|
"loss": 4.7714, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.005664488017429194, |
|
"grad_norm": 5.849486351013184, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.8962, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.006100217864923747, |
|
"grad_norm": 6.7091064453125, |
|
"learning_rate": 7e-05, |
|
"loss": 4.6161, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.006535947712418301, |
|
"grad_norm": 6.763986110687256, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.5908, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.006971677559912854, |
|
"grad_norm": 6.989901065826416, |
|
"learning_rate": 8e-05, |
|
"loss": 4.6449, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.007407407407407408, |
|
"grad_norm": 10.466228485107422, |
|
"learning_rate": 8.5e-05, |
|
"loss": 4.9358, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00784313725490196, |
|
"grad_norm": 8.58240032196045, |
|
"learning_rate": 9e-05, |
|
"loss": 4.2366, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.008278867102396514, |
|
"grad_norm": 6.406922340393066, |
|
"learning_rate": 9.5e-05, |
|
"loss": 3.9878, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.008714596949891068, |
|
"grad_norm": 8.407312393188477, |
|
"learning_rate": 0.0001, |
|
"loss": 3.6146, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.009150326797385621, |
|
"grad_norm": 8.44864559173584, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 4.4908, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.009586056644880174, |
|
"grad_norm": 10.223307609558105, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 4.4297, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.010021786492374727, |
|
"grad_norm": 10.1259765625, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 4.2845, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.01045751633986928, |
|
"grad_norm": 9.56737995147705, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 4.4324, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.010893246187363835, |
|
"grad_norm": 10.609879493713379, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 4.434, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.011328976034858388, |
|
"grad_norm": 10.974640846252441, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 4.7324, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.011764705882352941, |
|
"grad_norm": 10.421369552612305, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 4.1938, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.012200435729847494, |
|
"grad_norm": 9.512422561645508, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 4.1048, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.012636165577342049, |
|
"grad_norm": 9.695697784423828, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 4.1034, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.013071895424836602, |
|
"grad_norm": 9.301131248474121, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 4.0044, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.013507625272331155, |
|
"grad_norm": 10.068182945251465, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 3.793, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.013943355119825708, |
|
"grad_norm": 11.07810115814209, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 3.7727, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01437908496732026, |
|
"grad_norm": 9.415453910827637, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 3.7048, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.014814814814814815, |
|
"grad_norm": 7.458181381225586, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 3.383, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.015250544662309368, |
|
"grad_norm": 8.064324378967285, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 3.2161, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"grad_norm": 8.875123977661133, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 3.7818, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.016122004357298474, |
|
"grad_norm": 11.600370407104492, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 3.7597, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.016557734204793027, |
|
"grad_norm": 11.134902000427246, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 3.5337, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.01699346405228758, |
|
"grad_norm": 10.188698768615723, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 3.7508, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.017429193899782137, |
|
"grad_norm": 7.492838382720947, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 3.5272, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.01786492374727669, |
|
"grad_norm": 15.345321655273438, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 3.5264, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.018300653594771243, |
|
"grad_norm": 10.750115394592285, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 3.7775, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.018736383442265796, |
|
"grad_norm": 12.272331237792969, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 3.581, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.01917211328976035, |
|
"grad_norm": 10.027256965637207, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 3.4422, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.0196078431372549, |
|
"grad_norm": 12.036170959472656, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 3.3977, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.020043572984749455, |
|
"grad_norm": 13.455428123474121, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 3.8076, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.020479302832244008, |
|
"grad_norm": 21.514856338500977, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 3.4415, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.02091503267973856, |
|
"grad_norm": 16.04561424255371, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 3.4292, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.021350762527233117, |
|
"grad_norm": 26.326793670654297, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 4.6314, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.02178649237472767, |
|
"grad_norm": 35.12826156616211, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 5.3739, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02178649237472767, |
|
"eval_loss": 6.510286808013916, |
|
"eval_runtime": 342.1833, |
|
"eval_samples_per_second": 11.298, |
|
"eval_steps_per_second": 5.649, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.022222222222222223, |
|
"grad_norm": 35.10013961791992, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 5.0736, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.022657952069716776, |
|
"grad_norm": 9.756641387939453, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 5.1805, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.02309368191721133, |
|
"grad_norm": 5.467214584350586, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 4.7327, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.023529411764705882, |
|
"grad_norm": 5.6564483642578125, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 4.4293, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.023965141612200435, |
|
"grad_norm": 3.8603763580322266, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 4.2583, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.024400871459694988, |
|
"grad_norm": 3.4143786430358887, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.0873, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.02483660130718954, |
|
"grad_norm": 3.591214179992676, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 4.1065, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.025272331154684097, |
|
"grad_norm": 3.990115165710449, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 3.9787, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.02570806100217865, |
|
"grad_norm": 3.732379674911499, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 3.9912, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.026143790849673203, |
|
"grad_norm": 4.2087016105651855, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 3.9105, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.026579520697167756, |
|
"grad_norm": 3.774808645248413, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 3.7629, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.02701525054466231, |
|
"grad_norm": 3.8952903747558594, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 3.9626, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.027450980392156862, |
|
"grad_norm": 4.1821160316467285, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 3.7911, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.027886710239651415, |
|
"grad_norm": 4.013959884643555, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 4.0033, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.02832244008714597, |
|
"grad_norm": 4.394720554351807, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 3.7035, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02875816993464052, |
|
"grad_norm": 4.393320083618164, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 3.6583, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.029193899782135078, |
|
"grad_norm": 5.116196632385254, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 3.4642, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.02962962962962963, |
|
"grad_norm": 5.922344207763672, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 2.4069, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.030065359477124184, |
|
"grad_norm": 5.2199482917785645, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 2.8334, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.030501089324618737, |
|
"grad_norm": 5.466800212860107, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 2.7829, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03093681917211329, |
|
"grad_norm": 5.415471076965332, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 2.926, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.03137254901960784, |
|
"grad_norm": 5.683098316192627, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 3.3565, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.0318082788671024, |
|
"grad_norm": 7.391642093658447, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 4.1612, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.03224400871459695, |
|
"grad_norm": 7.878474712371826, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 4.0754, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.032679738562091505, |
|
"grad_norm": 7.959073066711426, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 3.7957, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.033115468409586055, |
|
"grad_norm": 9.686983108520508, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.7651, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.03355119825708061, |
|
"grad_norm": 8.671168327331543, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 3.4634, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.03398692810457516, |
|
"grad_norm": 7.247128486633301, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 3.139, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.03442265795206972, |
|
"grad_norm": 8.371869087219238, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 3.1679, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.034858387799564274, |
|
"grad_norm": 7.804739952087402, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 3.082, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.03529411764705882, |
|
"grad_norm": 7.40488862991333, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 3.0061, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.03572984749455338, |
|
"grad_norm": 9.724720001220703, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 3.3592, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.03616557734204793, |
|
"grad_norm": 8.739630699157715, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 3.1958, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.036601307189542485, |
|
"grad_norm": 13.883004188537598, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 3.6667, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.037037037037037035, |
|
"grad_norm": 8.60599136352539, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 2.9883, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.03747276688453159, |
|
"grad_norm": 9.525105476379395, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 3.3996, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.03790849673202614, |
|
"grad_norm": 8.75131607055664, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 3.193, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.0383442265795207, |
|
"grad_norm": 7.756675720214844, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 3.2385, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.038779956427015254, |
|
"grad_norm": 8.15986442565918, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 3.0469, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0392156862745098, |
|
"grad_norm": 9.440335273742676, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 3.2547, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.03965141612200436, |
|
"grad_norm": 7.82032585144043, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 3.0536, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04008714596949891, |
|
"grad_norm": 8.337102890014648, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 3.3388, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.040522875816993466, |
|
"grad_norm": 9.669166564941406, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 3.6829, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.040958605664488015, |
|
"grad_norm": 8.827813148498535, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 3.2636, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.04139433551198257, |
|
"grad_norm": 7.568974018096924, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 3.064, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.04183006535947712, |
|
"grad_norm": 10.151366233825684, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 3.0729, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.04226579520697168, |
|
"grad_norm": 15.141189575195312, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 3.407, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.042701525054466234, |
|
"grad_norm": 12.019047737121582, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 3.1992, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.043137254901960784, |
|
"grad_norm": 18.79649543762207, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 3.5933, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.04357298474945534, |
|
"grad_norm": 13.088361740112305, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 4.619, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04357298474945534, |
|
"eval_loss": 6.02884578704834, |
|
"eval_runtime": 342.2855, |
|
"eval_samples_per_second": 11.295, |
|
"eval_steps_per_second": 5.647, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.04400871459694989, |
|
"grad_norm": 42.128944396972656, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 5.0307, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.044444444444444446, |
|
"grad_norm": 19.087020874023438, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 4.9041, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.044880174291938996, |
|
"grad_norm": 8.862335205078125, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 4.3268, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.04531590413943355, |
|
"grad_norm": 5.625029563903809, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 4.1794, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.0457516339869281, |
|
"grad_norm": 4.465734958648682, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 3.8786, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.04618736383442266, |
|
"grad_norm": 4.2806243896484375, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 3.5648, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.046623093681917215, |
|
"grad_norm": 4.626951217651367, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 3.8994, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 4.95627498626709, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 3.6665, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.04749455337690632, |
|
"grad_norm": 3.974017858505249, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 3.6386, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.04793028322440087, |
|
"grad_norm": 4.023645877838135, |
|
"learning_rate": 5e-05, |
|
"loss": 3.4637, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.048366013071895426, |
|
"grad_norm": 3.7657175064086914, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 3.6163, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.048801742919389976, |
|
"grad_norm": 4.199164867401123, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.4856, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.04923747276688453, |
|
"grad_norm": 4.223781108856201, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 3.4757, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.04967320261437908, |
|
"grad_norm": 4.435927867889404, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 3.3169, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.05010893246187364, |
|
"grad_norm": 4.904012680053711, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 3.5495, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.050544662309368195, |
|
"grad_norm": 4.705389022827148, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 3.0275, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.050980392156862744, |
|
"grad_norm": 5.674432277679443, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 2.4125, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.0514161220043573, |
|
"grad_norm": 5.154523849487305, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 2.4193, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.05185185185185185, |
|
"grad_norm": 5.284936904907227, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 3.0715, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.05228758169934641, |
|
"grad_norm": 5.14494514465332, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 2.6556, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.052723311546840956, |
|
"grad_norm": 5.538849353790283, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 3.0354, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.05315904139433551, |
|
"grad_norm": 7.487570285797119, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 3.9713, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.05359477124183006, |
|
"grad_norm": 8.709990501403809, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 4.0759, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.05403050108932462, |
|
"grad_norm": 8.605923652648926, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 3.5372, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.054466230936819175, |
|
"grad_norm": 8.194428443908691, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 3.3761, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.054901960784313725, |
|
"grad_norm": 8.833706855773926, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.4749, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.05533769063180828, |
|
"grad_norm": 8.356715202331543, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 3.1606, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.05577342047930283, |
|
"grad_norm": 10.272514343261719, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 2.5981, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.05620915032679739, |
|
"grad_norm": 8.101070404052734, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 3.4551, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.05664488017429194, |
|
"grad_norm": 8.390080451965332, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 3.329, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.05708061002178649, |
|
"grad_norm": 7.999818325042725, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 3.1951, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.05751633986928104, |
|
"grad_norm": 8.268589973449707, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 3.0559, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.0579520697167756, |
|
"grad_norm": 7.182888984680176, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 2.7636, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.058387799564270156, |
|
"grad_norm": 7.375523567199707, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 2.9165, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 7.143980026245117, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 2.835, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.05925925925925926, |
|
"grad_norm": 7.666155815124512, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 2.9644, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.05969498910675381, |
|
"grad_norm": 8.490532875061035, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 3.2043, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.06013071895424837, |
|
"grad_norm": 9.01056957244873, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 3.2254, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.06056644880174292, |
|
"grad_norm": 9.418787956237793, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 3.1616, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.06100217864923747, |
|
"grad_norm": 10.980050086975098, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 2.756, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.06143790849673202, |
|
"grad_norm": 9.275108337402344, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 3.0247, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.06187363834422658, |
|
"grad_norm": 9.312789916992188, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 3.2638, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.062309368191721136, |
|
"grad_norm": 8.673617362976074, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 3.0619, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.06274509803921569, |
|
"grad_norm": 11.994880676269531, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 3.6095, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.06318082788671024, |
|
"grad_norm": 12.651790618896484, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 2.9046, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.0636165577342048, |
|
"grad_norm": 14.072041511535645, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 3.1311, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.06405228758169934, |
|
"grad_norm": 11.299721717834473, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 3.4079, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.0644880174291939, |
|
"grad_norm": 13.439027786254883, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 3.3159, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.06492374727668845, |
|
"grad_norm": 21.56414222717285, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 4.244, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"grad_norm": 19.723522186279297, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 4.4341, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06535947712418301, |
|
"eval_loss": 4.00246524810791, |
|
"eval_runtime": 342.244, |
|
"eval_samples_per_second": 11.296, |
|
"eval_steps_per_second": 5.648, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.06579520697167757, |
|
"grad_norm": 4.723827838897705, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 3.2273, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.06623093681917211, |
|
"grad_norm": 4.662656784057617, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 3.6659, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.06666666666666667, |
|
"grad_norm": 4.43446159362793, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 3.5692, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.06710239651416122, |
|
"grad_norm": 4.775651931762695, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 3.5715, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.06753812636165578, |
|
"grad_norm": 3.828355073928833, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 3.5012, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.06797385620915032, |
|
"grad_norm": 3.4921183586120605, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 3.2915, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.06840958605664488, |
|
"grad_norm": 3.7830638885498047, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 3.0783, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.06884531590413943, |
|
"grad_norm": 3.9744088649749756, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 3.4568, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.06928104575163399, |
|
"grad_norm": 4.1922407150268555, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 3.2042, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.06971677559912855, |
|
"grad_norm": 4.034817695617676, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 3.2243, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.07015250544662309, |
|
"grad_norm": 4.016630172729492, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 3.2557, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.07058823529411765, |
|
"grad_norm": 4.005847454071045, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 3.3865, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.0710239651416122, |
|
"grad_norm": 3.8765485286712646, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 3.2679, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.07145969498910676, |
|
"grad_norm": 3.795775890350342, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 3.1198, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.0718954248366013, |
|
"grad_norm": 4.019495010375977, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 3.0646, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.07233115468409586, |
|
"grad_norm": 4.386630058288574, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 3.1476, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.07276688453159041, |
|
"grad_norm": 5.1531243324279785, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 3.281, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.07320261437908497, |
|
"grad_norm": 4.335279941558838, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 2.8631, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.07363834422657953, |
|
"grad_norm": 5.068178176879883, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 2.6016, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.07407407407407407, |
|
"grad_norm": 5.061690807342529, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 2.7191, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.07450980392156863, |
|
"grad_norm": 5.518496990203857, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 3.0086, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.07494553376906318, |
|
"grad_norm": 6.2861762046813965, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 3.466, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.07538126361655774, |
|
"grad_norm": 11.34403133392334, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 3.7306, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.07581699346405228, |
|
"grad_norm": 12.873065948486328, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 3.6371, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.07625272331154684, |
|
"grad_norm": 11.45556926727295, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 3.4708, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.0766884531590414, |
|
"grad_norm": 13.728697776794434, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 3.6843, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.07712418300653595, |
|
"grad_norm": 12.337279319763184, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 3.1694, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.07755991285403051, |
|
"grad_norm": 11.851096153259277, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 3.201, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.07799564270152505, |
|
"grad_norm": 13.997617721557617, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 2.7749, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 13.09975814819336, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 3.0505, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.07886710239651416, |
|
"grad_norm": 10.723923683166504, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 2.8997, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.07930283224400872, |
|
"grad_norm": 10.451512336730957, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 2.8682, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.07973856209150326, |
|
"grad_norm": 9.712420463562012, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 3.1159, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.08017429193899782, |
|
"grad_norm": 9.685064315795898, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 2.9747, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.08061002178649238, |
|
"grad_norm": 9.208358764648438, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 3.1348, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.08104575163398693, |
|
"grad_norm": 10.657655715942383, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 3.0165, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.08148148148148149, |
|
"grad_norm": 11.530529022216797, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 3.1041, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.08191721132897603, |
|
"grad_norm": 10.34518814086914, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 2.8182, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.08235294117647059, |
|
"grad_norm": 10.75024127960205, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 2.9237, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.08278867102396514, |
|
"grad_norm": 10.584077835083008, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 2.5154, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.0832244008714597, |
|
"grad_norm": 14.239422798156738, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 2.8776, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.08366013071895424, |
|
"grad_norm": 12.499034881591797, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 2.9535, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.0840958605664488, |
|
"grad_norm": 13.673118591308594, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 3.1236, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.08453159041394336, |
|
"grad_norm": 13.16242790222168, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 3.1829, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.08496732026143791, |
|
"grad_norm": 13.244391441345215, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 3.0557, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.08540305010893247, |
|
"grad_norm": 15.455413818359375, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 3.2272, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.08583877995642701, |
|
"grad_norm": 18.628986358642578, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 2.6964, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.08627450980392157, |
|
"grad_norm": 17.168880462646484, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 3.4646, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.08671023965141612, |
|
"grad_norm": 31.757919311523438, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 3.9619, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.08714596949891068, |
|
"grad_norm": 30.930551528930664, |
|
"learning_rate": 0.0, |
|
"loss": 4.0868, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.08714596949891068, |
|
"eval_loss": 3.230548620223999, |
|
"eval_runtime": 342.1999, |
|
"eval_samples_per_second": 11.297, |
|
"eval_steps_per_second": 5.649, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.9451989028962304e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|