| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 5.0, | |
| "eval_steps": 500, | |
| "global_step": 470, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.010638297872340425, | |
| "grad_norm": 5.62164644869217, | |
| "learning_rate": 1.7021276595744682e-06, | |
| "loss": 0.8559, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02127659574468085, | |
| "grad_norm": 5.541790955372414, | |
| "learning_rate": 3.4042553191489363e-06, | |
| "loss": 0.8537, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.031914893617021274, | |
| "grad_norm": 5.407901882681235, | |
| "learning_rate": 5.106382978723404e-06, | |
| "loss": 0.8414, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0425531914893617, | |
| "grad_norm": 4.03578755597478, | |
| "learning_rate": 6.808510638297873e-06, | |
| "loss": 0.8026, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05319148936170213, | |
| "grad_norm": 2.235595604545287, | |
| "learning_rate": 8.510638297872341e-06, | |
| "loss": 0.7586, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06382978723404255, | |
| "grad_norm": 3.6713198410625205, | |
| "learning_rate": 1.0212765957446808e-05, | |
| "loss": 0.7542, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.07446808510638298, | |
| "grad_norm": 3.8053717898299015, | |
| "learning_rate": 1.1914893617021277e-05, | |
| "loss": 0.7525, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0851063829787234, | |
| "grad_norm": 3.735929746217291, | |
| "learning_rate": 1.3617021276595745e-05, | |
| "loss": 0.7146, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.09574468085106383, | |
| "grad_norm": 3.4388722472257522, | |
| "learning_rate": 1.5319148936170214e-05, | |
| "loss": 0.7076, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.10638297872340426, | |
| "grad_norm": 2.0576402741081745, | |
| "learning_rate": 1.7021276595744682e-05, | |
| "loss": 0.6804, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11702127659574468, | |
| "grad_norm": 2.194245295451263, | |
| "learning_rate": 1.872340425531915e-05, | |
| "loss": 0.6574, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.1276595744680851, | |
| "grad_norm": 1.970180537002589, | |
| "learning_rate": 2.0425531914893616e-05, | |
| "loss": 0.6351, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.13829787234042554, | |
| "grad_norm": 1.1822804981997306, | |
| "learning_rate": 2.2127659574468088e-05, | |
| "loss": 0.6205, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.14893617021276595, | |
| "grad_norm": 1.1602034908038812, | |
| "learning_rate": 2.3829787234042553e-05, | |
| "loss": 0.6102, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.1595744680851064, | |
| "grad_norm": 1.6168637308896043, | |
| "learning_rate": 2.5531914893617025e-05, | |
| "loss": 0.6058, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.1702127659574468, | |
| "grad_norm": 0.9268293511359277, | |
| "learning_rate": 2.723404255319149e-05, | |
| "loss": 0.5865, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.18085106382978725, | |
| "grad_norm": 1.3995612819702326, | |
| "learning_rate": 2.8936170212765963e-05, | |
| "loss": 0.5828, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.19148936170212766, | |
| "grad_norm": 0.9491984900355026, | |
| "learning_rate": 3.063829787234043e-05, | |
| "loss": 0.5814, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.20212765957446807, | |
| "grad_norm": 1.2461341580332967, | |
| "learning_rate": 3.234042553191489e-05, | |
| "loss": 0.5732, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2127659574468085, | |
| "grad_norm": 0.9013655593666975, | |
| "learning_rate": 3.4042553191489365e-05, | |
| "loss": 0.572, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.22340425531914893, | |
| "grad_norm": 1.4385376985061347, | |
| "learning_rate": 3.574468085106383e-05, | |
| "loss": 0.5775, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.23404255319148937, | |
| "grad_norm": 1.1316548066081353, | |
| "learning_rate": 3.74468085106383e-05, | |
| "loss": 0.5589, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.24468085106382978, | |
| "grad_norm": 1.1348968485093565, | |
| "learning_rate": 3.914893617021277e-05, | |
| "loss": 0.5555, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 0.9593256794712123, | |
| "learning_rate": 4.085106382978723e-05, | |
| "loss": 0.5577, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.26595744680851063, | |
| "grad_norm": 1.5810352206230227, | |
| "learning_rate": 4.2553191489361704e-05, | |
| "loss": 0.5506, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2765957446808511, | |
| "grad_norm": 1.0552617330606424, | |
| "learning_rate": 4.4255319148936176e-05, | |
| "loss": 0.5373, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2872340425531915, | |
| "grad_norm": 1.651827285414071, | |
| "learning_rate": 4.595744680851065e-05, | |
| "loss": 0.5465, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.2978723404255319, | |
| "grad_norm": 1.8485214296335475, | |
| "learning_rate": 4.765957446808511e-05, | |
| "loss": 0.5413, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.30851063829787234, | |
| "grad_norm": 0.8693252455438564, | |
| "learning_rate": 4.936170212765958e-05, | |
| "loss": 0.5416, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 2.3515640310759522, | |
| "learning_rate": 5.106382978723405e-05, | |
| "loss": 0.547, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.32978723404255317, | |
| "grad_norm": 1.4756724644968964, | |
| "learning_rate": 5.276595744680851e-05, | |
| "loss": 0.5442, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.3404255319148936, | |
| "grad_norm": 1.9541127476117117, | |
| "learning_rate": 5.446808510638298e-05, | |
| "loss": 0.5376, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.35106382978723405, | |
| "grad_norm": 1.4385140916597212, | |
| "learning_rate": 5.617021276595745e-05, | |
| "loss": 0.5347, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3617021276595745, | |
| "grad_norm": 1.6177174100025706, | |
| "learning_rate": 5.7872340425531925e-05, | |
| "loss": 0.5283, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.3723404255319149, | |
| "grad_norm": 1.5517064428797538, | |
| "learning_rate": 5.9574468085106384e-05, | |
| "loss": 0.5304, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.3829787234042553, | |
| "grad_norm": 1.3062103414138244, | |
| "learning_rate": 6.127659574468086e-05, | |
| "loss": 0.5277, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.39361702127659576, | |
| "grad_norm": 1.1717376687944345, | |
| "learning_rate": 6.297872340425533e-05, | |
| "loss": 0.5246, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.40425531914893614, | |
| "grad_norm": 1.5784372654316825, | |
| "learning_rate": 6.468085106382979e-05, | |
| "loss": 0.5211, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.4148936170212766, | |
| "grad_norm": 1.2570855650815358, | |
| "learning_rate": 6.638297872340426e-05, | |
| "loss": 0.531, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 1.1994392890244334, | |
| "learning_rate": 6.808510638297873e-05, | |
| "loss": 0.5098, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.43617021276595747, | |
| "grad_norm": 1.608445496858305, | |
| "learning_rate": 6.97872340425532e-05, | |
| "loss": 0.5176, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.44680851063829785, | |
| "grad_norm": 1.6126071175066283, | |
| "learning_rate": 7.148936170212766e-05, | |
| "loss": 0.5181, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.4574468085106383, | |
| "grad_norm": 1.3431412401241174, | |
| "learning_rate": 7.319148936170213e-05, | |
| "loss": 0.5135, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.46808510638297873, | |
| "grad_norm": 1.5807072225527692, | |
| "learning_rate": 7.48936170212766e-05, | |
| "loss": 0.5185, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4787234042553192, | |
| "grad_norm": 2.0147344702502554, | |
| "learning_rate": 7.659574468085108e-05, | |
| "loss": 0.5281, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.48936170212765956, | |
| "grad_norm": 1.4169773369597476, | |
| "learning_rate": 7.829787234042553e-05, | |
| "loss": 0.5104, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 1.327014601864401, | |
| "learning_rate": 8e-05, | |
| "loss": 0.507, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 1.463329858386686, | |
| "learning_rate": 7.999889681839899e-05, | |
| "loss": 0.513, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.5212765957446809, | |
| "grad_norm": 1.7226533644834205, | |
| "learning_rate": 7.999558733444641e-05, | |
| "loss": 0.5241, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.5319148936170213, | |
| "grad_norm": 1.4625043342314172, | |
| "learning_rate": 7.999007173069037e-05, | |
| "loss": 0.4992, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.5425531914893617, | |
| "grad_norm": 1.74638144425408, | |
| "learning_rate": 7.998235031136648e-05, | |
| "loss": 0.5129, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.5531914893617021, | |
| "grad_norm": 1.156051684687154, | |
| "learning_rate": 7.997242350238117e-05, | |
| "loss": 0.5017, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5638297872340425, | |
| "grad_norm": 1.1484647512074981, | |
| "learning_rate": 7.996029185128804e-05, | |
| "loss": 0.5021, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.574468085106383, | |
| "grad_norm": 1.7889194838438403, | |
| "learning_rate": 7.994595602725781e-05, | |
| "loss": 0.5137, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5851063829787234, | |
| "grad_norm": 1.2512024906203, | |
| "learning_rate": 7.992941682104139e-05, | |
| "loss": 0.4935, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.5957446808510638, | |
| "grad_norm": 1.9138223030633696, | |
| "learning_rate": 7.991067514492614e-05, | |
| "loss": 0.5084, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.6063829787234043, | |
| "grad_norm": 1.2776429355884351, | |
| "learning_rate": 7.988973203268567e-05, | |
| "loss": 0.5045, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.6170212765957447, | |
| "grad_norm": 2.141401973332067, | |
| "learning_rate": 7.986658863952281e-05, | |
| "loss": 0.4992, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.6276595744680851, | |
| "grad_norm": 1.454535688794727, | |
| "learning_rate": 7.984124624200583e-05, | |
| "loss": 0.5058, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 1.7846076730422646, | |
| "learning_rate": 7.981370623799803e-05, | |
| "loss": 0.5013, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.648936170212766, | |
| "grad_norm": 1.4310227485726414, | |
| "learning_rate": 7.978397014658075e-05, | |
| "loss": 0.5079, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.6595744680851063, | |
| "grad_norm": 1.4094274678104701, | |
| "learning_rate": 7.97520396079694e-05, | |
| "loss": 0.4882, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6702127659574468, | |
| "grad_norm": 1.6288107207366431, | |
| "learning_rate": 7.971791638342313e-05, | |
| "loss": 0.4977, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6808510638297872, | |
| "grad_norm": 1.0461330955205717, | |
| "learning_rate": 7.96816023551476e-05, | |
| "loss": 0.4932, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.6914893617021277, | |
| "grad_norm": 1.8472118603297956, | |
| "learning_rate": 7.964309952619121e-05, | |
| "loss": 0.4956, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.7021276595744681, | |
| "grad_norm": 1.3785935023957434, | |
| "learning_rate": 7.96024100203346e-05, | |
| "loss": 0.5072, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.7127659574468085, | |
| "grad_norm": 1.321199071263235, | |
| "learning_rate": 7.955953608197345e-05, | |
| "loss": 0.4864, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.723404255319149, | |
| "grad_norm": 1.2126321080139488, | |
| "learning_rate": 7.951448007599478e-05, | |
| "loss": 0.5016, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.7340425531914894, | |
| "grad_norm": 1.1858808993782288, | |
| "learning_rate": 7.946724448764644e-05, | |
| "loss": 0.4943, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.7446808510638298, | |
| "grad_norm": 1.0062356288958618, | |
| "learning_rate": 7.94178319224e-05, | |
| "loss": 0.491, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.7553191489361702, | |
| "grad_norm": 1.9547579891672486, | |
| "learning_rate": 7.936624510580712e-05, | |
| "loss": 0.4879, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 1.0537143803568765, | |
| "learning_rate": 7.931248688334915e-05, | |
| "loss": 0.4916, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.776595744680851, | |
| "grad_norm": 1.6913458685278957, | |
| "learning_rate": 7.925656022028017e-05, | |
| "loss": 0.4983, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.7872340425531915, | |
| "grad_norm": 1.016587494594598, | |
| "learning_rate": 7.919846820146348e-05, | |
| "loss": 0.4964, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.7978723404255319, | |
| "grad_norm": 1.6376648762774801, | |
| "learning_rate": 7.913821403120139e-05, | |
| "loss": 0.4916, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.8085106382978723, | |
| "grad_norm": 1.2262934120029891, | |
| "learning_rate": 7.90758010330585e-05, | |
| "loss": 0.4851, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.8191489361702128, | |
| "grad_norm": 1.381474776104582, | |
| "learning_rate": 7.901123264967836e-05, | |
| "loss": 0.4929, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.8297872340425532, | |
| "grad_norm": 1.3905933735533444, | |
| "learning_rate": 7.894451244259363e-05, | |
| "loss": 0.4924, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.8404255319148937, | |
| "grad_norm": 1.038270640845919, | |
| "learning_rate": 7.887564409202953e-05, | |
| "loss": 0.4893, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 1.4032705044700307, | |
| "learning_rate": 7.880463139670091e-05, | |
| "loss": 0.485, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.8617021276595744, | |
| "grad_norm": 1.1420897464578796, | |
| "learning_rate": 7.873147827360273e-05, | |
| "loss": 0.4867, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.8723404255319149, | |
| "grad_norm": 1.667140313423623, | |
| "learning_rate": 7.865618875779398e-05, | |
| "loss": 0.4775, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.8829787234042553, | |
| "grad_norm": 1.276151508655107, | |
| "learning_rate": 7.857876700217508e-05, | |
| "loss": 0.4786, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.8936170212765957, | |
| "grad_norm": 1.1090496719024276, | |
| "learning_rate": 7.849921727725882e-05, | |
| "loss": 0.4722, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.9042553191489362, | |
| "grad_norm": 0.991963411082399, | |
| "learning_rate": 7.841754397093487e-05, | |
| "loss": 0.478, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.9148936170212766, | |
| "grad_norm": 1.731621852488109, | |
| "learning_rate": 7.833375158822766e-05, | |
| "loss": 0.4773, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.925531914893617, | |
| "grad_norm": 1.3804671267005582, | |
| "learning_rate": 7.824784475104795e-05, | |
| "loss": 0.4787, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.9361702127659575, | |
| "grad_norm": 1.1135118775414654, | |
| "learning_rate": 7.815982819793784e-05, | |
| "loss": 0.4772, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.9468085106382979, | |
| "grad_norm": 1.9348194075183587, | |
| "learning_rate": 7.806970678380943e-05, | |
| "loss": 0.4768, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 0.9278131511467589, | |
| "learning_rate": 7.797748547967701e-05, | |
| "loss": 0.4768, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.9680851063829787, | |
| "grad_norm": 1.5790755873743727, | |
| "learning_rate": 7.788316937238287e-05, | |
| "loss": 0.4777, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.9787234042553191, | |
| "grad_norm": 1.1589932356056467, | |
| "learning_rate": 7.778676366431676e-05, | |
| "loss": 0.4793, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.9893617021276596, | |
| "grad_norm": 0.9998066780828351, | |
| "learning_rate": 7.768827367312882e-05, | |
| "loss": 0.4736, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 0.9964032491428947, | |
| "learning_rate": 7.758770483143634e-05, | |
| "loss": 0.4841, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.0106382978723405, | |
| "grad_norm": 1.1948858983085593, | |
| "learning_rate": 7.748506268652415e-05, | |
| "loss": 0.461, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.0212765957446808, | |
| "grad_norm": 0.7073134184929624, | |
| "learning_rate": 7.73803529000385e-05, | |
| "loss": 0.4536, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.0319148936170213, | |
| "grad_norm": 1.1611831111477335, | |
| "learning_rate": 7.727358124767491e-05, | |
| "loss": 0.4612, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.0425531914893618, | |
| "grad_norm": 1.0669064153598073, | |
| "learning_rate": 7.71647536188595e-05, | |
| "loss": 0.4653, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.053191489361702, | |
| "grad_norm": 1.024613831141331, | |
| "learning_rate": 7.705387601642416e-05, | |
| "loss": 0.4584, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.0638297872340425, | |
| "grad_norm": 1.4884061518621656, | |
| "learning_rate": 7.694095455627542e-05, | |
| "loss": 0.4608, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.074468085106383, | |
| "grad_norm": 1.0414967862002833, | |
| "learning_rate": 7.682599546705716e-05, | |
| "loss": 0.4577, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.0851063829787233, | |
| "grad_norm": 1.3598647542639628, | |
| "learning_rate": 7.670900508980697e-05, | |
| "loss": 0.4587, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.0957446808510638, | |
| "grad_norm": 1.5748967124421995, | |
| "learning_rate": 7.658998987760645e-05, | |
| "loss": 0.4628, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.1063829787234043, | |
| "grad_norm": 1.142076727768023, | |
| "learning_rate": 7.646895639522518e-05, | |
| "loss": 0.4551, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.1170212765957448, | |
| "grad_norm": 1.3811816181823466, | |
| "learning_rate": 7.634591131875875e-05, | |
| "loss": 0.4594, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.127659574468085, | |
| "grad_norm": 0.9160727711980033, | |
| "learning_rate": 7.622086143526036e-05, | |
| "loss": 0.4518, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.1382978723404256, | |
| "grad_norm": 1.0917703855774061, | |
| "learning_rate": 7.609381364236655e-05, | |
| "loss": 0.4551, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.148936170212766, | |
| "grad_norm": 1.43226233951233, | |
| "learning_rate": 7.59647749479167e-05, | |
| "loss": 0.4603, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.1595744680851063, | |
| "grad_norm": 1.4134305371766467, | |
| "learning_rate": 7.583375246956648e-05, | |
| "loss": 0.4641, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.1702127659574468, | |
| "grad_norm": 0.7629510146350382, | |
| "learning_rate": 7.570075343439526e-05, | |
| "loss": 0.4559, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.1808510638297873, | |
| "grad_norm": 1.3551761060537464, | |
| "learning_rate": 7.556578517850747e-05, | |
| "loss": 0.4562, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.1914893617021276, | |
| "grad_norm": 1.1143038754484502, | |
| "learning_rate": 7.542885514662794e-05, | |
| "loss": 0.454, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.202127659574468, | |
| "grad_norm": 1.3365732739074656, | |
| "learning_rate": 7.528997089169128e-05, | |
| "loss": 0.4534, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.2127659574468086, | |
| "grad_norm": 0.9865960113031025, | |
| "learning_rate": 7.51491400744252e-05, | |
| "loss": 0.4522, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.2234042553191489, | |
| "grad_norm": 1.7137045691014816, | |
| "learning_rate": 7.500637046292803e-05, | |
| "loss": 0.4609, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.2340425531914894, | |
| "grad_norm": 1.0423122076919606, | |
| "learning_rate": 7.48616699322402e-05, | |
| "loss": 0.4582, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.2446808510638299, | |
| "grad_norm": 1.1897509411491205, | |
| "learning_rate": 7.471504646390987e-05, | |
| "loss": 0.4552, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.2553191489361701, | |
| "grad_norm": 1.707159683909229, | |
| "learning_rate": 7.456650814555267e-05, | |
| "loss": 0.4668, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.2659574468085106, | |
| "grad_norm": 0.7299856606632058, | |
| "learning_rate": 7.441606317040558e-05, | |
| "loss": 0.4523, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 1.7335201629368997, | |
| "learning_rate": 7.426371983687503e-05, | |
| "loss": 0.4645, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.2872340425531914, | |
| "grad_norm": 1.0376836462353736, | |
| "learning_rate": 7.410948654807916e-05, | |
| "loss": 0.4567, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.297872340425532, | |
| "grad_norm": 1.1899631811065923, | |
| "learning_rate": 7.39533718113843e-05, | |
| "loss": 0.4622, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.3085106382978724, | |
| "grad_norm": 1.2350801182171, | |
| "learning_rate": 7.379538423793568e-05, | |
| "loss": 0.4557, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.3191489361702127, | |
| "grad_norm": 0.8340292591613142, | |
| "learning_rate": 7.363553254218253e-05, | |
| "loss": 0.4492, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.3297872340425532, | |
| "grad_norm": 1.2214430144107857, | |
| "learning_rate": 7.347382554139733e-05, | |
| "loss": 0.4563, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.3404255319148937, | |
| "grad_norm": 0.7515274378448177, | |
| "learning_rate": 7.331027215518949e-05, | |
| "loss": 0.4574, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.351063829787234, | |
| "grad_norm": 1.0593819816413108, | |
| "learning_rate": 7.31448814050133e-05, | |
| "loss": 0.4586, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.3617021276595744, | |
| "grad_norm": 0.8257535268103262, | |
| "learning_rate": 7.297766241367041e-05, | |
| "loss": 0.4525, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.372340425531915, | |
| "grad_norm": 0.8489466970658132, | |
| "learning_rate": 7.280862440480658e-05, | |
| "loss": 0.4486, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.3829787234042552, | |
| "grad_norm": 1.0407070508163005, | |
| "learning_rate": 7.263777670240282e-05, | |
| "loss": 0.4518, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.3936170212765957, | |
| "grad_norm": 1.0870662063487706, | |
| "learning_rate": 7.246512873026125e-05, | |
| "loss": 0.4436, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.4042553191489362, | |
| "grad_norm": 1.040969884122318, | |
| "learning_rate": 7.229069001148518e-05, | |
| "loss": 0.4376, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.4148936170212765, | |
| "grad_norm": 0.8210287133544034, | |
| "learning_rate": 7.211447016795388e-05, | |
| "loss": 0.4459, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.425531914893617, | |
| "grad_norm": 0.6924416024292802, | |
| "learning_rate": 7.193647891979177e-05, | |
| "loss": 0.4492, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.4361702127659575, | |
| "grad_norm": 0.691504635298063, | |
| "learning_rate": 7.17567260848324e-05, | |
| "loss": 0.4382, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.4468085106382977, | |
| "grad_norm": 0.557066728314828, | |
| "learning_rate": 7.157522157807675e-05, | |
| "loss": 0.4475, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.4574468085106382, | |
| "grad_norm": 0.578188119392695, | |
| "learning_rate": 7.139197541114645e-05, | |
| "loss": 0.4416, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.4680851063829787, | |
| "grad_norm": 0.8694472070990833, | |
| "learning_rate": 7.120699769173149e-05, | |
| "loss": 0.4492, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.4787234042553192, | |
| "grad_norm": 1.0414676752540923, | |
| "learning_rate": 7.10202986230327e-05, | |
| "loss": 0.4371, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.4893617021276595, | |
| "grad_norm": 0.9048472155799596, | |
| "learning_rate": 7.083188850319895e-05, | |
| "loss": 0.4411, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 0.9161726375221393, | |
| "learning_rate": 7.064177772475912e-05, | |
| "loss": 0.4529, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.5106382978723403, | |
| "grad_norm": 1.1657596780724042, | |
| "learning_rate": 7.044997677404888e-05, | |
| "loss": 0.4433, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.521276595744681, | |
| "grad_norm": 0.8278836619484179, | |
| "learning_rate": 7.025649623063223e-05, | |
| "loss": 0.4432, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.5319148936170213, | |
| "grad_norm": 0.6982292221675673, | |
| "learning_rate": 7.006134676671791e-05, | |
| "loss": 0.4498, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.5425531914893615, | |
| "grad_norm": 1.1530549987417396, | |
| "learning_rate": 6.986453914657083e-05, | |
| "loss": 0.4484, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.5531914893617023, | |
| "grad_norm": 1.2290110194371844, | |
| "learning_rate": 6.96660842259183e-05, | |
| "loss": 0.4541, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.5638297872340425, | |
| "grad_norm": 0.6040374185478055, | |
| "learning_rate": 6.946599295135116e-05, | |
| "loss": 0.4356, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.574468085106383, | |
| "grad_norm": 0.5974414549578787, | |
| "learning_rate": 6.926427635972003e-05, | |
| "loss": 0.4374, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.5851063829787235, | |
| "grad_norm": 0.9321005443290319, | |
| "learning_rate": 6.906094557752654e-05, | |
| "loss": 0.4432, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.5957446808510638, | |
| "grad_norm": 1.0909193280528477, | |
| "learning_rate": 6.885601182030958e-05, | |
| "loss": 0.4474, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.6063829787234043, | |
| "grad_norm": 0.7679852263478857, | |
| "learning_rate": 6.864948639202667e-05, | |
| "loss": 0.4442, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.6170212765957448, | |
| "grad_norm": 0.7550553221749178, | |
| "learning_rate": 6.844138068443043e-05, | |
| "loss": 0.4452, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.627659574468085, | |
| "grad_norm": 1.1202478929189756, | |
| "learning_rate": 6.823170617644029e-05, | |
| "loss": 0.4391, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.6382978723404256, | |
| "grad_norm": 0.8957986675565583, | |
| "learning_rate": 6.802047443350915e-05, | |
| "loss": 0.44, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.648936170212766, | |
| "grad_norm": 0.7670887365074628, | |
| "learning_rate": 6.78076971069857e-05, | |
| "loss": 0.4427, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.6595744680851063, | |
| "grad_norm": 0.7780853531387013, | |
| "learning_rate": 6.759338593347148e-05, | |
| "loss": 0.4398, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.6702127659574468, | |
| "grad_norm": 0.7615691990716689, | |
| "learning_rate": 6.737755273417367e-05, | |
| "loss": 0.4456, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.6808510638297873, | |
| "grad_norm": 0.9908956798698196, | |
| "learning_rate": 6.716020941425302e-05, | |
| "loss": 0.4511, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.6914893617021276, | |
| "grad_norm": 1.1748417677944305, | |
| "learning_rate": 6.694136796216706e-05, | |
| "loss": 0.4442, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.702127659574468, | |
| "grad_norm": 0.7089766235204993, | |
| "learning_rate": 6.672104044900901e-05, | |
| "loss": 0.4437, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.7127659574468086, | |
| "grad_norm": 0.6183339909308402, | |
| "learning_rate": 6.649923902784178e-05, | |
| "loss": 0.4391, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.7234042553191489, | |
| "grad_norm": 0.6684140166542728, | |
| "learning_rate": 6.627597593302772e-05, | |
| "loss": 0.4543, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.7340425531914894, | |
| "grad_norm": 0.8241026926395437, | |
| "learning_rate": 6.605126347955376e-05, | |
| "loss": 0.446, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.7446808510638299, | |
| "grad_norm": 0.8675844769376447, | |
| "learning_rate": 6.58251140623521e-05, | |
| "loss": 0.4394, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.7553191489361701, | |
| "grad_norm": 0.7195091122160452, | |
| "learning_rate": 6.559754015561655e-05, | |
| "loss": 0.4361, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.7659574468085106, | |
| "grad_norm": 0.7586387307080512, | |
| "learning_rate": 6.536855431211445e-05, | |
| "loss": 0.4452, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.7765957446808511, | |
| "grad_norm": 0.8241109857261576, | |
| "learning_rate": 6.513816916249427e-05, | |
| "loss": 0.4462, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.7872340425531914, | |
| "grad_norm": 0.7828717831576161, | |
| "learning_rate": 6.490639741458891e-05, | |
| "loss": 0.4387, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.797872340425532, | |
| "grad_norm": 0.9841281333161127, | |
| "learning_rate": 6.46732518527148e-05, | |
| "loss": 0.4433, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.8085106382978724, | |
| "grad_norm": 0.8948930714592511, | |
| "learning_rate": 6.443874533696662e-05, | |
| "loss": 0.4329, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.8191489361702127, | |
| "grad_norm": 0.6167915147089398, | |
| "learning_rate": 6.420289080250804e-05, | |
| "loss": 0.4423, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.8297872340425532, | |
| "grad_norm": 0.7688689128970628, | |
| "learning_rate": 6.396570125885823e-05, | |
| "loss": 0.448, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.8404255319148937, | |
| "grad_norm": 0.7366333884582301, | |
| "learning_rate": 6.372718978917421e-05, | |
| "loss": 0.4421, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.851063829787234, | |
| "grad_norm": 0.6292983833226043, | |
| "learning_rate": 6.348736954952923e-05, | |
| "loss": 0.4345, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.8617021276595744, | |
| "grad_norm": 0.6094161942129656, | |
| "learning_rate": 6.324625376818707e-05, | |
| "loss": 0.4289, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.872340425531915, | |
| "grad_norm": 0.5223920021177019, | |
| "learning_rate": 6.300385574487242e-05, | |
| "loss": 0.4281, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.8829787234042552, | |
| "grad_norm": 0.43105768007225675, | |
| "learning_rate": 6.276018885003727e-05, | |
| "loss": 0.43, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.8936170212765957, | |
| "grad_norm": 0.4154261253991378, | |
| "learning_rate": 6.251526652412335e-05, | |
| "loss": 0.4272, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.9042553191489362, | |
| "grad_norm": 0.5129738950204691, | |
| "learning_rate": 6.226910227682087e-05, | |
| "loss": 0.4404, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.9148936170212765, | |
| "grad_norm": 0.4069384888530334, | |
| "learning_rate": 6.202170968632324e-05, | |
| "loss": 0.4379, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.925531914893617, | |
| "grad_norm": 0.4346407097225191, | |
| "learning_rate": 6.177310239857815e-05, | |
| "loss": 0.4393, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.9361702127659575, | |
| "grad_norm": 0.7030752370713033, | |
| "learning_rate": 6.152329412653491e-05, | |
| "loss": 0.4359, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.9468085106382977, | |
| "grad_norm": 0.7931293701516278, | |
| "learning_rate": 6.127229864938798e-05, | |
| "loss": 0.4324, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.9574468085106385, | |
| "grad_norm": 0.8569387442215538, | |
| "learning_rate": 6.1020129811816985e-05, | |
| "loss": 0.439, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 1.9680851063829787, | |
| "grad_norm": 0.9366916666402957, | |
| "learning_rate": 6.076680152322302e-05, | |
| "loss": 0.4365, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 1.978723404255319, | |
| "grad_norm": 0.9953725617355069, | |
| "learning_rate": 6.051232775696143e-05, | |
| "loss": 0.436, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 1.9893617021276597, | |
| "grad_norm": 1.0794213458303916, | |
| "learning_rate": 6.025672254957106e-05, | |
| "loss": 0.4423, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.724853495699258, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.4317, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.0106382978723403, | |
| "grad_norm": 0.5489002192429712, | |
| "learning_rate": 5.9742174268827936e-05, | |
| "loss": 0.4137, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.021276595744681, | |
| "grad_norm": 0.5979267122970597, | |
| "learning_rate": 5.948325957748498e-05, | |
| "loss": 0.4107, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.0319148936170213, | |
| "grad_norm": 0.6769157942235892, | |
| "learning_rate": 5.9223270207467355e-05, | |
| "loss": 0.415, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.0425531914893615, | |
| "grad_norm": 0.694157788349472, | |
| "learning_rate": 5.896222049954951e-05, | |
| "loss": 0.4138, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.0531914893617023, | |
| "grad_norm": 0.7900658328855281, | |
| "learning_rate": 5.870012485299318e-05, | |
| "loss": 0.415, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.0638297872340425, | |
| "grad_norm": 0.6937916417075501, | |
| "learning_rate": 5.843699772475312e-05, | |
| "loss": 0.4132, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.074468085106383, | |
| "grad_norm": 0.6698631990082241, | |
| "learning_rate": 5.8172853628679676e-05, | |
| "loss": 0.4121, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.0851063829787235, | |
| "grad_norm": 0.6656108376135776, | |
| "learning_rate": 5.790770713471816e-05, | |
| "loss": 0.4127, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.095744680851064, | |
| "grad_norm": 0.5836102935781896, | |
| "learning_rate": 5.764157286810527e-05, | |
| "loss": 0.4068, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.106382978723404, | |
| "grad_norm": 0.6583841935134351, | |
| "learning_rate": 5.7374465508562324e-05, | |
| "loss": 0.4063, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.117021276595745, | |
| "grad_norm": 0.5654983891994843, | |
| "learning_rate": 5.710639978948555e-05, | |
| "loss": 0.412, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.127659574468085, | |
| "grad_norm": 0.423035149723838, | |
| "learning_rate": 5.6837390497133406e-05, | |
| "loss": 0.4038, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.1382978723404253, | |
| "grad_norm": 0.6023613024743625, | |
| "learning_rate": 5.6567452469810984e-05, | |
| "loss": 0.4072, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.148936170212766, | |
| "grad_norm": 0.5872005390949395, | |
| "learning_rate": 5.629660059705153e-05, | |
| "loss": 0.4064, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.1595744680851063, | |
| "grad_norm": 0.38488472443140903, | |
| "learning_rate": 5.602484981879519e-05, | |
| "loss": 0.4088, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.1702127659574466, | |
| "grad_norm": 0.34867056077161274, | |
| "learning_rate": 5.5752215124564895e-05, | |
| "loss": 0.4165, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.1808510638297873, | |
| "grad_norm": 0.5189695063587118, | |
| "learning_rate": 5.547871155263955e-05, | |
| "loss": 0.403, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.1914893617021276, | |
| "grad_norm": 0.5349240957528729, | |
| "learning_rate": 5.5204354189224596e-05, | |
| "loss": 0.4076, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.202127659574468, | |
| "grad_norm": 0.4097332185439771, | |
| "learning_rate": 5.492915816761979e-05, | |
| "loss": 0.4055, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.2127659574468086, | |
| "grad_norm": 0.29969289622943385, | |
| "learning_rate": 5.465313866738454e-05, | |
| "loss": 0.4102, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.223404255319149, | |
| "grad_norm": 0.3210843691588522, | |
| "learning_rate": 5.4376310913500514e-05, | |
| "loss": 0.4082, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.2340425531914896, | |
| "grad_norm": 0.3342881154938907, | |
| "learning_rate": 5.409869017553199e-05, | |
| "loss": 0.4043, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.24468085106383, | |
| "grad_norm": 0.3152754379746835, | |
| "learning_rate": 5.382029176678345e-05, | |
| "loss": 0.4091, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.25531914893617, | |
| "grad_norm": 0.27611840786578706, | |
| "learning_rate": 5.354113104345503e-05, | |
| "loss": 0.4039, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.2659574468085104, | |
| "grad_norm": 0.34803624809617745, | |
| "learning_rate": 5.326122340379539e-05, | |
| "loss": 0.4063, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.276595744680851, | |
| "grad_norm": 0.36164081157802974, | |
| "learning_rate": 5.2980584287252456e-05, | |
| "loss": 0.4054, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.2872340425531914, | |
| "grad_norm": 0.28080091164784143, | |
| "learning_rate": 5.269922917362171e-05, | |
| "loss": 0.4137, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.297872340425532, | |
| "grad_norm": 0.3123559500548175, | |
| "learning_rate": 5.241717358219239e-05, | |
| "loss": 0.4136, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.3085106382978724, | |
| "grad_norm": 0.33068468215663827, | |
| "learning_rate": 5.213443307089144e-05, | |
| "loss": 0.4067, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.3191489361702127, | |
| "grad_norm": 0.2984606793541823, | |
| "learning_rate": 5.1851023235425366e-05, | |
| "loss": 0.3982, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.329787234042553, | |
| "grad_norm": 0.3545208178999491, | |
| "learning_rate": 5.156695970841997e-05, | |
| "loss": 0.41, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.3404255319148937, | |
| "grad_norm": 0.3590473616023012, | |
| "learning_rate": 5.128225815855805e-05, | |
| "loss": 0.4119, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.351063829787234, | |
| "grad_norm": 0.3815124462451615, | |
| "learning_rate": 5.099693428971522e-05, | |
| "loss": 0.4117, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.3617021276595747, | |
| "grad_norm": 0.3777633888640618, | |
| "learning_rate": 5.0711003840093583e-05, | |
| "loss": 0.4064, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.372340425531915, | |
| "grad_norm": 0.42841655218775976, | |
| "learning_rate": 5.042448258135371e-05, | |
| "loss": 0.4059, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.382978723404255, | |
| "grad_norm": 0.3957041429267785, | |
| "learning_rate": 5.013738631774463e-05, | |
| "loss": 0.4082, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.393617021276596, | |
| "grad_norm": 0.23548416366582237, | |
| "learning_rate": 4.984973088523216e-05, | |
| "loss": 0.4069, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.404255319148936, | |
| "grad_norm": 0.3532835233128077, | |
| "learning_rate": 4.9561532150625305e-05, | |
| "loss": 0.4123, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.4148936170212765, | |
| "grad_norm": 0.41182185945267635, | |
| "learning_rate": 4.927280601070114e-05, | |
| "loss": 0.4043, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.425531914893617, | |
| "grad_norm": 0.3444115639955698, | |
| "learning_rate": 4.898356839132793e-05, | |
| "loss": 0.4085, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.4361702127659575, | |
| "grad_norm": 0.3641718763669911, | |
| "learning_rate": 4.869383524658668e-05, | |
| "loss": 0.4044, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.4468085106382977, | |
| "grad_norm": 0.31325645363197496, | |
| "learning_rate": 4.840362255789112e-05, | |
| "loss": 0.4088, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.4574468085106385, | |
| "grad_norm": 0.22787584781999606, | |
| "learning_rate": 4.811294633310617e-05, | |
| "loss": 0.4072, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.4680851063829787, | |
| "grad_norm": 0.34212691771738396, | |
| "learning_rate": 4.782182260566498e-05, | |
| "loss": 0.4029, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.478723404255319, | |
| "grad_norm": 0.4204486154126815, | |
| "learning_rate": 4.7530267433684546e-05, | |
| "loss": 0.4082, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.4893617021276597, | |
| "grad_norm": 0.4183586609670886, | |
| "learning_rate": 4.723829689907993e-05, | |
| "loss": 0.4069, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.361420219603079, | |
| "learning_rate": 4.694592710667723e-05, | |
| "loss": 0.4051, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.5106382978723403, | |
| "grad_norm": 0.3230255284520481, | |
| "learning_rate": 4.665317418332521e-05, | |
| "loss": 0.4032, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.521276595744681, | |
| "grad_norm": 0.370284885999409, | |
| "learning_rate": 4.6360054277005826e-05, | |
| "loss": 0.406, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.5319148936170213, | |
| "grad_norm": 0.30097725112497414, | |
| "learning_rate": 4.606658355594344e-05, | |
| "loss": 0.3993, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.5425531914893615, | |
| "grad_norm": 0.32435130844112303, | |
| "learning_rate": 4.577277820771307e-05, | |
| "loss": 0.4102, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.3080820117631687, | |
| "learning_rate": 4.5478654438347414e-05, | |
| "loss": 0.4026, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.5638297872340425, | |
| "grad_norm": 0.39816170983067595, | |
| "learning_rate": 4.518422847144304e-05, | |
| "loss": 0.406, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.574468085106383, | |
| "grad_norm": 0.3800310483317905, | |
| "learning_rate": 4.488951654726539e-05, | |
| "loss": 0.4073, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.5851063829787235, | |
| "grad_norm": 0.2671985596247953, | |
| "learning_rate": 4.4594534921853096e-05, | |
| "loss": 0.4038, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.595744680851064, | |
| "grad_norm": 0.3417581375350411, | |
| "learning_rate": 4.429929986612125e-05, | |
| "loss": 0.4047, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.6063829787234045, | |
| "grad_norm": 0.3200819491909366, | |
| "learning_rate": 4.400382766496394e-05, | |
| "loss": 0.4051, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.617021276595745, | |
| "grad_norm": 0.2966057678537604, | |
| "learning_rate": 4.3708134616355934e-05, | |
| "loss": 0.4106, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.627659574468085, | |
| "grad_norm": 0.31666189726009564, | |
| "learning_rate": 4.341223703045379e-05, | |
| "loss": 0.4044, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.6382978723404253, | |
| "grad_norm": 0.2379399207090575, | |
| "learning_rate": 4.311615122869613e-05, | |
| "loss": 0.4049, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.648936170212766, | |
| "grad_norm": 0.33070194627221655, | |
| "learning_rate": 4.281989354290341e-05, | |
| "loss": 0.4104, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.6595744680851063, | |
| "grad_norm": 0.29152996582110796, | |
| "learning_rate": 4.2523480314376996e-05, | |
| "loss": 0.4083, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.670212765957447, | |
| "grad_norm": 0.2768236406010446, | |
| "learning_rate": 4.222692789299794e-05, | |
| "loss": 0.4005, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.6808510638297873, | |
| "grad_norm": 0.32822073352952436, | |
| "learning_rate": 4.193025263632495e-05, | |
| "loss": 0.4098, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.6914893617021276, | |
| "grad_norm": 0.20759537070759246, | |
| "learning_rate": 4.163347090869227e-05, | |
| "loss": 0.4015, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.702127659574468, | |
| "grad_norm": 0.2450986247008019, | |
| "learning_rate": 4.133659908030699e-05, | |
| "loss": 0.4011, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.7127659574468086, | |
| "grad_norm": 0.3094748307247204, | |
| "learning_rate": 4.103965352634604e-05, | |
| "loss": 0.4128, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.723404255319149, | |
| "grad_norm": 0.2811649837424367, | |
| "learning_rate": 4.0742650626053004e-05, | |
| "loss": 0.409, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.7340425531914896, | |
| "grad_norm": 0.2666126452683894, | |
| "learning_rate": 4.044560676183462e-05, | |
| "loss": 0.4052, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.74468085106383, | |
| "grad_norm": 0.3043401200996446, | |
| "learning_rate": 4.014853831835721e-05, | |
| "loss": 0.4075, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.75531914893617, | |
| "grad_norm": 0.26121736766165093, | |
| "learning_rate": 3.985146168164281e-05, | |
| "loss": 0.4114, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.7659574468085104, | |
| "grad_norm": 0.25762450388003205, | |
| "learning_rate": 3.9554393238165386e-05, | |
| "loss": 0.4066, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.776595744680851, | |
| "grad_norm": 0.2541891436719679, | |
| "learning_rate": 3.9257349373947016e-05, | |
| "loss": 0.4059, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.7872340425531914, | |
| "grad_norm": 0.2722839322667676, | |
| "learning_rate": 3.896034647365398e-05, | |
| "loss": 0.4058, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.797872340425532, | |
| "grad_norm": 0.28452361022067413, | |
| "learning_rate": 3.866340091969303e-05, | |
| "loss": 0.4077, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.8085106382978724, | |
| "grad_norm": 0.2530140333177483, | |
| "learning_rate": 3.836652909130774e-05, | |
| "loss": 0.4093, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.8191489361702127, | |
| "grad_norm": 0.45406695750367465, | |
| "learning_rate": 3.806974736367507e-05, | |
| "loss": 0.4117, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.829787234042553, | |
| "grad_norm": 0.2988144349921366, | |
| "learning_rate": 3.7773072107002084e-05, | |
| "loss": 0.4023, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.8404255319148937, | |
| "grad_norm": 0.22739583662187962, | |
| "learning_rate": 3.747651968562302e-05, | |
| "loss": 0.4054, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.851063829787234, | |
| "grad_norm": 0.2636532613327371, | |
| "learning_rate": 3.718010645709661e-05, | |
| "loss": 0.4106, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.8617021276595747, | |
| "grad_norm": 0.24726890660848583, | |
| "learning_rate": 3.688384877130388e-05, | |
| "loss": 0.4059, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.872340425531915, | |
| "grad_norm": 0.2944014064456227, | |
| "learning_rate": 3.658776296954622e-05, | |
| "loss": 0.4051, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.882978723404255, | |
| "grad_norm": 0.22859295611193128, | |
| "learning_rate": 3.629186538364408e-05, | |
| "loss": 0.4108, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.8936170212765955, | |
| "grad_norm": 0.2939557054489036, | |
| "learning_rate": 3.5996172335036065e-05, | |
| "loss": 0.4004, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.904255319148936, | |
| "grad_norm": 0.26529631331687625, | |
| "learning_rate": 3.570070013387876e-05, | |
| "loss": 0.4076, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.9148936170212765, | |
| "grad_norm": 0.25501324633080297, | |
| "learning_rate": 3.540546507814692e-05, | |
| "loss": 0.3984, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.925531914893617, | |
| "grad_norm": 0.21459730505126537, | |
| "learning_rate": 3.5110483452734633e-05, | |
| "loss": 0.4025, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.9361702127659575, | |
| "grad_norm": 0.2046281102038189, | |
| "learning_rate": 3.4815771528556976e-05, | |
| "loss": 0.4066, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.9468085106382977, | |
| "grad_norm": 0.25983079535813713, | |
| "learning_rate": 3.452134556165259e-05, | |
| "loss": 0.4062, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 2.9574468085106385, | |
| "grad_norm": 0.18624346254163252, | |
| "learning_rate": 3.4227221792286945e-05, | |
| "loss": 0.4101, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 2.9680851063829787, | |
| "grad_norm": 0.25858622274413134, | |
| "learning_rate": 3.393341644405657e-05, | |
| "loss": 0.3966, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 2.978723404255319, | |
| "grad_norm": 0.21292012777230374, | |
| "learning_rate": 3.363994572299418e-05, | |
| "loss": 0.4061, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 2.9893617021276597, | |
| "grad_norm": 0.2509387412075419, | |
| "learning_rate": 3.3346825816674804e-05, | |
| "loss": 0.4108, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 0.23091413343534087, | |
| "learning_rate": 3.305407289332279e-05, | |
| "loss": 0.3937, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 3.0106382978723403, | |
| "grad_norm": 0.20905420882080966, | |
| "learning_rate": 3.276170310092008e-05, | |
| "loss": 0.3778, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 3.021276595744681, | |
| "grad_norm": 0.23991076346040097, | |
| "learning_rate": 3.246973256631546e-05, | |
| "loss": 0.3798, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 3.0319148936170213, | |
| "grad_norm": 0.24449335128970107, | |
| "learning_rate": 3.217817739433502e-05, | |
| "loss": 0.3802, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 3.0425531914893615, | |
| "grad_norm": 0.245315113180501, | |
| "learning_rate": 3.1887053666893834e-05, | |
| "loss": 0.3875, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 3.0531914893617023, | |
| "grad_norm": 0.24057913704130401, | |
| "learning_rate": 3.159637744210888e-05, | |
| "loss": 0.3738, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 3.0638297872340425, | |
| "grad_norm": 0.2549241752014073, | |
| "learning_rate": 3.130616475341332e-05, | |
| "loss": 0.3814, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 3.074468085106383, | |
| "grad_norm": 0.21160294231186252, | |
| "learning_rate": 3.101643160867208e-05, | |
| "loss": 0.3815, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 3.0851063829787235, | |
| "grad_norm": 0.23805410062655133, | |
| "learning_rate": 3.072719398929887e-05, | |
| "loss": 0.3801, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 3.095744680851064, | |
| "grad_norm": 0.20195712835328383, | |
| "learning_rate": 3.0438467849374702e-05, | |
| "loss": 0.3797, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 3.106382978723404, | |
| "grad_norm": 0.23312007432367898, | |
| "learning_rate": 3.0150269114767862e-05, | |
| "loss": 0.3764, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 3.117021276595745, | |
| "grad_norm": 0.18274301420799274, | |
| "learning_rate": 2.9862613682255383e-05, | |
| "loss": 0.3754, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 3.127659574468085, | |
| "grad_norm": 0.2478292014777071, | |
| "learning_rate": 2.957551741864631e-05, | |
| "loss": 0.3767, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 3.1382978723404253, | |
| "grad_norm": 0.1927262119265681, | |
| "learning_rate": 2.928899615990643e-05, | |
| "loss": 0.375, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 3.148936170212766, | |
| "grad_norm": 0.2868229283783766, | |
| "learning_rate": 2.90030657102848e-05, | |
| "loss": 0.3808, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 3.1595744680851063, | |
| "grad_norm": 0.18547034376158922, | |
| "learning_rate": 2.8717741841441964e-05, | |
| "loss": 0.3767, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 3.1702127659574466, | |
| "grad_norm": 0.278737988337013, | |
| "learning_rate": 2.8433040291580053e-05, | |
| "loss": 0.3809, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 3.1808510638297873, | |
| "grad_norm": 0.1707915913816605, | |
| "learning_rate": 2.8148976764574648e-05, | |
| "loss": 0.377, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 3.1914893617021276, | |
| "grad_norm": 0.2487011269688432, | |
| "learning_rate": 2.7865566929108573e-05, | |
| "loss": 0.3838, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 3.202127659574468, | |
| "grad_norm": 0.17967682539404906, | |
| "learning_rate": 2.758282641780762e-05, | |
| "loss": 0.382, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 3.2127659574468086, | |
| "grad_norm": 0.21709328897403127, | |
| "learning_rate": 2.7300770826378302e-05, | |
| "loss": 0.3816, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 3.223404255319149, | |
| "grad_norm": 0.178520628301021, | |
| "learning_rate": 2.7019415712747558e-05, | |
| "loss": 0.3834, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 3.2340425531914896, | |
| "grad_norm": 0.21223809263671378, | |
| "learning_rate": 2.6738776596204624e-05, | |
| "loss": 0.3832, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 3.24468085106383, | |
| "grad_norm": 0.15649445787021687, | |
| "learning_rate": 2.6458868956544984e-05, | |
| "loss": 0.3726, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 3.25531914893617, | |
| "grad_norm": 0.1764738407571, | |
| "learning_rate": 2.6179708233216557e-05, | |
| "loss": 0.3831, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 3.2659574468085104, | |
| "grad_norm": 0.15542729747310566, | |
| "learning_rate": 2.590130982446802e-05, | |
| "loss": 0.3736, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 3.276595744680851, | |
| "grad_norm": 0.1747611677030782, | |
| "learning_rate": 2.5623689086499496e-05, | |
| "loss": 0.3793, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 3.2872340425531914, | |
| "grad_norm": 0.13212244563255485, | |
| "learning_rate": 2.5346861332615476e-05, | |
| "loss": 0.3767, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 3.297872340425532, | |
| "grad_norm": 0.17023514875738952, | |
| "learning_rate": 2.5070841832380212e-05, | |
| "loss": 0.3879, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 3.3085106382978724, | |
| "grad_norm": 0.15961770967934297, | |
| "learning_rate": 2.4795645810775414e-05, | |
| "loss": 0.3769, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 3.3191489361702127, | |
| "grad_norm": 0.18863144944962443, | |
| "learning_rate": 2.4521288447360457e-05, | |
| "loss": 0.3814, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 3.329787234042553, | |
| "grad_norm": 0.17296200115217117, | |
| "learning_rate": 2.424778487543512e-05, | |
| "loss": 0.38, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 3.3404255319148937, | |
| "grad_norm": 0.18291540188393604, | |
| "learning_rate": 2.3975150181204817e-05, | |
| "loss": 0.3736, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 3.351063829787234, | |
| "grad_norm": 0.1539347041742154, | |
| "learning_rate": 2.370339940294848e-05, | |
| "loss": 0.374, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 3.3617021276595747, | |
| "grad_norm": 0.17067354232177032, | |
| "learning_rate": 2.3432547530189033e-05, | |
| "loss": 0.375, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 3.372340425531915, | |
| "grad_norm": 0.16125243954444898, | |
| "learning_rate": 2.316260950286661e-05, | |
| "loss": 0.386, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 3.382978723404255, | |
| "grad_norm": 0.17706581816471947, | |
| "learning_rate": 2.2893600210514464e-05, | |
| "loss": 0.3859, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 3.393617021276596, | |
| "grad_norm": 0.19387187739363282, | |
| "learning_rate": 2.2625534491437672e-05, | |
| "loss": 0.3818, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 3.404255319148936, | |
| "grad_norm": 0.16113410459885513, | |
| "learning_rate": 2.2358427131894732e-05, | |
| "loss": 0.3762, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 3.4148936170212765, | |
| "grad_norm": 0.20116908168809555, | |
| "learning_rate": 2.2092292865281845e-05, | |
| "loss": 0.3803, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 3.425531914893617, | |
| "grad_norm": 0.16099148978300246, | |
| "learning_rate": 2.1827146371320334e-05, | |
| "loss": 0.3822, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 3.4361702127659575, | |
| "grad_norm": 0.18990059248934593, | |
| "learning_rate": 2.156300227524688e-05, | |
| "loss": 0.3825, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 3.4468085106382977, | |
| "grad_norm": 0.16540658424680887, | |
| "learning_rate": 2.1299875147006838e-05, | |
| "loss": 0.3781, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 3.4574468085106385, | |
| "grad_norm": 0.16752244552028583, | |
| "learning_rate": 2.10377795004505e-05, | |
| "loss": 0.3809, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 3.4680851063829787, | |
| "grad_norm": 0.1810597095974272, | |
| "learning_rate": 2.0776729792532652e-05, | |
| "loss": 0.3836, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 3.478723404255319, | |
| "grad_norm": 0.1417871274980451, | |
| "learning_rate": 2.0516740422515022e-05, | |
| "loss": 0.3878, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 3.4893617021276597, | |
| "grad_norm": 0.15754394939129449, | |
| "learning_rate": 2.0257825731172077e-05, | |
| "loss": 0.3833, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.12745856435079908, | |
| "learning_rate": 2.0000000000000012e-05, | |
| "loss": 0.3736, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 3.5106382978723403, | |
| "grad_norm": 0.13505830495843163, | |
| "learning_rate": 1.9743277450428962e-05, | |
| "loss": 0.3807, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 3.521276595744681, | |
| "grad_norm": 0.12857257976193204, | |
| "learning_rate": 1.9487672243038594e-05, | |
| "loss": 0.381, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 3.5319148936170213, | |
| "grad_norm": 0.13541243433649724, | |
| "learning_rate": 1.9233198476777003e-05, | |
| "loss": 0.385, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 3.5425531914893615, | |
| "grad_norm": 0.13836684138720814, | |
| "learning_rate": 1.897987018818302e-05, | |
| "loss": 0.3808, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 3.5531914893617023, | |
| "grad_norm": 0.12028403797052283, | |
| "learning_rate": 1.8727701350612026e-05, | |
| "loss": 0.3787, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 3.5638297872340425, | |
| "grad_norm": 0.13853549831902906, | |
| "learning_rate": 1.8476705873465097e-05, | |
| "loss": 0.3845, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 3.574468085106383, | |
| "grad_norm": 0.12529122214780986, | |
| "learning_rate": 1.8226897601421858e-05, | |
| "loss": 0.3773, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 3.5851063829787235, | |
| "grad_norm": 0.13718627686941767, | |
| "learning_rate": 1.7978290313676774e-05, | |
| "loss": 0.3771, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 3.595744680851064, | |
| "grad_norm": 0.13452451362504975, | |
| "learning_rate": 1.7730897723179144e-05, | |
| "loss": 0.3799, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 3.6063829787234045, | |
| "grad_norm": 0.13390152462264926, | |
| "learning_rate": 1.748473347587666e-05, | |
| "loss": 0.3784, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 3.617021276595745, | |
| "grad_norm": 0.1381149397634833, | |
| "learning_rate": 1.7239811149962756e-05, | |
| "loss": 0.3759, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 3.627659574468085, | |
| "grad_norm": 0.12941518047233325, | |
| "learning_rate": 1.6996144255127586e-05, | |
| "loss": 0.377, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 3.6382978723404253, | |
| "grad_norm": 0.14340217919626994, | |
| "learning_rate": 1.675374623181294e-05, | |
| "loss": 0.3799, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 3.648936170212766, | |
| "grad_norm": 0.11836717659820724, | |
| "learning_rate": 1.6512630450470784e-05, | |
| "loss": 0.3768, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 3.6595744680851063, | |
| "grad_norm": 0.15778520770347512, | |
| "learning_rate": 1.6272810210825794e-05, | |
| "loss": 0.3832, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 3.670212765957447, | |
| "grad_norm": 0.1204529845934934, | |
| "learning_rate": 1.6034298741141768e-05, | |
| "loss": 0.382, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 3.6808510638297873, | |
| "grad_norm": 0.15750579133093734, | |
| "learning_rate": 1.579710919749196e-05, | |
| "loss": 0.3798, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 3.6914893617021276, | |
| "grad_norm": 0.1386144145229538, | |
| "learning_rate": 1.5561254663033393e-05, | |
| "loss": 0.3764, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 3.702127659574468, | |
| "grad_norm": 0.1239231423498722, | |
| "learning_rate": 1.53267481472852e-05, | |
| "loss": 0.3827, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 3.7127659574468086, | |
| "grad_norm": 0.14157764395949052, | |
| "learning_rate": 1.5093602585411078e-05, | |
| "loss": 0.3773, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 3.723404255319149, | |
| "grad_norm": 0.140686345314232, | |
| "learning_rate": 1.4861830837505733e-05, | |
| "loss": 0.3784, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 3.7340425531914896, | |
| "grad_norm": 0.12320059023506465, | |
| "learning_rate": 1.4631445687885553e-05, | |
| "loss": 0.3844, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 3.74468085106383, | |
| "grad_norm": 0.14431820751164123, | |
| "learning_rate": 1.4402459844383451e-05, | |
| "loss": 0.3803, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 3.75531914893617, | |
| "grad_norm": 0.12143326106626687, | |
| "learning_rate": 1.4174885937647905e-05, | |
| "loss": 0.383, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 3.7659574468085104, | |
| "grad_norm": 0.11735894401221722, | |
| "learning_rate": 1.3948736520446246e-05, | |
| "loss": 0.3751, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 3.776595744680851, | |
| "grad_norm": 0.13185033729281154, | |
| "learning_rate": 1.372402406697229e-05, | |
| "loss": 0.3737, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 3.7872340425531914, | |
| "grad_norm": 0.1154428148825697, | |
| "learning_rate": 1.3500760972158223e-05, | |
| "loss": 0.378, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 3.797872340425532, | |
| "grad_norm": 0.12842073282310573, | |
| "learning_rate": 1.3278959550991011e-05, | |
| "loss": 0.3775, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 3.8085106382978724, | |
| "grad_norm": 0.12670592325854088, | |
| "learning_rate": 1.3058632037832957e-05, | |
| "loss": 0.3828, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 3.8191489361702127, | |
| "grad_norm": 0.11334725671446295, | |
| "learning_rate": 1.2839790585747008e-05, | |
| "loss": 0.3784, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 3.829787234042553, | |
| "grad_norm": 0.11759887567137367, | |
| "learning_rate": 1.2622447265826345e-05, | |
| "loss": 0.3823, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 3.8404255319148937, | |
| "grad_norm": 0.11200560690429617, | |
| "learning_rate": 1.2406614066528543e-05, | |
| "loss": 0.3839, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 3.851063829787234, | |
| "grad_norm": 0.10929402999457798, | |
| "learning_rate": 1.219230289301431e-05, | |
| "loss": 0.38, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 3.8617021276595747, | |
| "grad_norm": 0.12132649925473797, | |
| "learning_rate": 1.1979525566490845e-05, | |
| "loss": 0.3752, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 3.872340425531915, | |
| "grad_norm": 0.11204838908548817, | |
| "learning_rate": 1.176829382355973e-05, | |
| "loss": 0.3752, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 3.882978723404255, | |
| "grad_norm": 0.11317597743783404, | |
| "learning_rate": 1.1558619315569572e-05, | |
| "loss": 0.3815, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 3.8936170212765955, | |
| "grad_norm": 0.13136090336497436, | |
| "learning_rate": 1.1350513607973351e-05, | |
| "loss": 0.3839, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 3.904255319148936, | |
| "grad_norm": 0.11332184292598117, | |
| "learning_rate": 1.1143988179690441e-05, | |
| "loss": 0.3784, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 3.9148936170212765, | |
| "grad_norm": 0.12049799122378656, | |
| "learning_rate": 1.093905442247348e-05, | |
| "loss": 0.3737, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 3.925531914893617, | |
| "grad_norm": 0.12112505146396743, | |
| "learning_rate": 1.073572364027999e-05, | |
| "loss": 0.3784, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 3.9361702127659575, | |
| "grad_norm": 0.11152808514251403, | |
| "learning_rate": 1.0534007048648846e-05, | |
| "loss": 0.3836, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 3.9468085106382977, | |
| "grad_norm": 0.11026956462916394, | |
| "learning_rate": 1.0333915774081698e-05, | |
| "loss": 0.382, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 3.9574468085106385, | |
| "grad_norm": 0.10840535830420614, | |
| "learning_rate": 1.0135460853429166e-05, | |
| "loss": 0.3734, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 3.9680851063829787, | |
| "grad_norm": 0.1236335149703349, | |
| "learning_rate": 9.938653233282105e-06, | |
| "loss": 0.3763, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 3.978723404255319, | |
| "grad_norm": 0.12252596041380151, | |
| "learning_rate": 9.74350376936779e-06, | |
| "loss": 0.3716, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 3.9893617021276597, | |
| "grad_norm": 0.10540461633402395, | |
| "learning_rate": 9.550023225951124e-06, | |
| "loss": 0.3725, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.12520513195330457, | |
| "learning_rate": 9.358222275240884e-06, | |
| "loss": 0.3755, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 4.01063829787234, | |
| "grad_norm": 0.14847057489702098, | |
| "learning_rate": 9.168111496801071e-06, | |
| "loss": 0.3652, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 4.0212765957446805, | |
| "grad_norm": 0.11332943867336272, | |
| "learning_rate": 8.979701376967313e-06, | |
| "loss": 0.364, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 4.031914893617022, | |
| "grad_norm": 0.1232583568124227, | |
| "learning_rate": 8.793002308268521e-06, | |
| "loss": 0.3577, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 4.042553191489362, | |
| "grad_norm": 0.12780375441362188, | |
| "learning_rate": 8.608024588853561e-06, | |
| "loss": 0.3635, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 4.053191489361702, | |
| "grad_norm": 0.12259673347799972, | |
| "learning_rate": 8.424778421923258e-06, | |
| "loss": 0.3622, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 4.0638297872340425, | |
| "grad_norm": 0.11562199484437485, | |
| "learning_rate": 8.24327391516761e-06, | |
| "loss": 0.3619, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 4.074468085106383, | |
| "grad_norm": 0.11649371701938849, | |
| "learning_rate": 8.06352108020823e-06, | |
| "loss": 0.367, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 4.085106382978723, | |
| "grad_norm": 0.10570361375232744, | |
| "learning_rate": 7.885529832046134e-06, | |
| "loss": 0.3582, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 4.095744680851064, | |
| "grad_norm": 0.10461915141244181, | |
| "learning_rate": 7.709309988514824e-06, | |
| "loss": 0.3608, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 4.1063829787234045, | |
| "grad_norm": 0.11511232586029703, | |
| "learning_rate": 7.534871269738753e-06, | |
| "loss": 0.3647, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 4.117021276595745, | |
| "grad_norm": 0.10867329200636058, | |
| "learning_rate": 7.362223297597184e-06, | |
| "loss": 0.3629, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 4.127659574468085, | |
| "grad_norm": 0.10982240516813774, | |
| "learning_rate": 7.191375595193433e-06, | |
| "loss": 0.3691, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 4.138297872340425, | |
| "grad_norm": 0.11776263009120118, | |
| "learning_rate": 7.022337586329597e-06, | |
| "loss": 0.3617, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 4.148936170212766, | |
| "grad_norm": 0.10569398652654016, | |
| "learning_rate": 6.855118594986718e-06, | |
| "loss": 0.3567, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 4.159574468085107, | |
| "grad_norm": 0.10505309486877212, | |
| "learning_rate": 6.6897278448105405e-06, | |
| "loss": 0.3639, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 4.170212765957447, | |
| "grad_norm": 0.1079113458903354, | |
| "learning_rate": 6.526174458602681e-06, | |
| "loss": 0.368, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 4.180851063829787, | |
| "grad_norm": 0.11197666687109296, | |
| "learning_rate": 6.364467457817482e-06, | |
| "loss": 0.3587, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 4.191489361702128, | |
| "grad_norm": 0.11278688179148387, | |
| "learning_rate": 6.20461576206433e-06, | |
| "loss": 0.3611, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 4.202127659574468, | |
| "grad_norm": 0.11202771282958791, | |
| "learning_rate": 6.046628188615718e-06, | |
| "loss": 0.3632, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 4.212765957446808, | |
| "grad_norm": 0.098500856006095, | |
| "learning_rate": 5.890513451920843e-06, | |
| "loss": 0.3538, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 4.223404255319149, | |
| "grad_norm": 0.09784156477819876, | |
| "learning_rate": 5.736280163124974e-06, | |
| "loss": 0.365, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 4.23404255319149, | |
| "grad_norm": 0.10953538933768776, | |
| "learning_rate": 5.583936829594434e-06, | |
| "loss": 0.3603, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 4.24468085106383, | |
| "grad_norm": 0.10639360562326262, | |
| "learning_rate": 5.4334918544473436e-06, | |
| "loss": 0.3659, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 4.25531914893617, | |
| "grad_norm": 0.10211474883073758, | |
| "learning_rate": 5.284953536090131e-06, | |
| "loss": 0.3641, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 4.26595744680851, | |
| "grad_norm": 0.10574255358672346, | |
| "learning_rate": 5.1383300677598024e-06, | |
| "loss": 0.3643, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 4.276595744680851, | |
| "grad_norm": 0.11115010018265371, | |
| "learning_rate": 4.993629537071978e-06, | |
| "loss": 0.3701, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 4.287234042553192, | |
| "grad_norm": 0.09127212979081487, | |
| "learning_rate": 4.850859925574809e-06, | |
| "loss": 0.3662, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 4.297872340425532, | |
| "grad_norm": 0.08625180913725822, | |
| "learning_rate": 4.710029108308733e-06, | |
| "loss": 0.3548, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 4.308510638297872, | |
| "grad_norm": 0.10510607272882863, | |
| "learning_rate": 4.571144853372063e-06, | |
| "loss": 0.363, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 4.319148936170213, | |
| "grad_norm": 0.09294640053501395, | |
| "learning_rate": 4.434214821492542e-06, | |
| "loss": 0.3659, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 4.329787234042553, | |
| "grad_norm": 0.10322126590894673, | |
| "learning_rate": 4.299246565604756e-06, | |
| "loss": 0.3636, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 4.340425531914893, | |
| "grad_norm": 0.08513844389462451, | |
| "learning_rate": 4.166247530433531e-06, | |
| "loss": 0.361, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 4.351063829787234, | |
| "grad_norm": 0.09456400637267966, | |
| "learning_rate": 4.035225052083309e-06, | |
| "loss": 0.3631, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 4.361702127659575, | |
| "grad_norm": 0.09912377148014549, | |
| "learning_rate": 3.906186357633455e-06, | |
| "loss": 0.3627, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 4.372340425531915, | |
| "grad_norm": 0.1007261033413922, | |
| "learning_rate": 3.779138564739646e-06, | |
| "loss": 0.3625, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 4.382978723404255, | |
| "grad_norm": 0.08934724234816895, | |
| "learning_rate": 3.6540886812412547e-06, | |
| "loss": 0.3648, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 4.3936170212765955, | |
| "grad_norm": 0.0925468177451363, | |
| "learning_rate": 3.5310436047748263e-06, | |
| "loss": 0.3581, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 4.404255319148936, | |
| "grad_norm": 0.08919561633872664, | |
| "learning_rate": 3.4100101223935743e-06, | |
| "loss": 0.3571, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 4.414893617021277, | |
| "grad_norm": 0.09499084946810057, | |
| "learning_rate": 3.290994910193037e-06, | |
| "loss": 0.3579, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 4.425531914893617, | |
| "grad_norm": 0.09014647001183278, | |
| "learning_rate": 3.174004532942845e-06, | |
| "loss": 0.3689, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 4.4361702127659575, | |
| "grad_norm": 0.08581817647958025, | |
| "learning_rate": 3.059045443724582e-06, | |
| "loss": 0.3623, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 4.446808510638298, | |
| "grad_norm": 0.09478322839033952, | |
| "learning_rate": 2.946123983575846e-06, | |
| "loss": 0.3621, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 4.457446808510638, | |
| "grad_norm": 0.09127292341411823, | |
| "learning_rate": 2.8352463811404952e-06, | |
| "loss": 0.352, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 4.468085106382979, | |
| "grad_norm": 0.09314064563107814, | |
| "learning_rate": 2.726418752325084e-06, | |
| "loss": 0.3604, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 4.4787234042553195, | |
| "grad_norm": 0.08258953047242434, | |
| "learning_rate": 2.619647099961502e-06, | |
| "loss": 0.3613, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 4.48936170212766, | |
| "grad_norm": 0.08671649480194962, | |
| "learning_rate": 2.514937313475865e-06, | |
| "loss": 0.3619, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.08348749730392281, | |
| "learning_rate": 2.4122951685636674e-06, | |
| "loss": 0.362, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 4.51063829787234, | |
| "grad_norm": 0.08847421831587975, | |
| "learning_rate": 2.3117263268712e-06, | |
| "loss": 0.3643, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 4.5212765957446805, | |
| "grad_norm": 0.08541200353819858, | |
| "learning_rate": 2.2132363356832532e-06, | |
| "loss": 0.3617, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 4.531914893617021, | |
| "grad_norm": 0.09072202451972457, | |
| "learning_rate": 2.11683062761713e-06, | |
| "loss": 0.3637, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 4.542553191489362, | |
| "grad_norm": 0.08771913713301659, | |
| "learning_rate": 2.0225145203230044e-06, | |
| "loss": 0.3635, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 4.553191489361702, | |
| "grad_norm": 0.08227630624973846, | |
| "learning_rate": 1.930293216190586e-06, | |
| "loss": 0.3677, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 4.5638297872340425, | |
| "grad_norm": 0.08142816323777968, | |
| "learning_rate": 1.8401718020621694e-06, | |
| "loss": 0.3653, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 4.574468085106383, | |
| "grad_norm": 0.08626743211827775, | |
| "learning_rate": 1.7521552489520566e-06, | |
| "loss": 0.3685, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 4.585106382978723, | |
| "grad_norm": 0.08468387346146096, | |
| "learning_rate": 1.666248411772342e-06, | |
| "loss": 0.3651, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 4.595744680851064, | |
| "grad_norm": 0.08278070326775537, | |
| "learning_rate": 1.5824560290651404e-06, | |
| "loss": 0.3638, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 4.6063829787234045, | |
| "grad_norm": 0.08542627322845557, | |
| "learning_rate": 1.5007827227411942e-06, | |
| "loss": 0.3608, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 4.617021276595745, | |
| "grad_norm": 0.0850283799369069, | |
| "learning_rate": 1.4212329978249417e-06, | |
| "loss": 0.3694, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 4.627659574468085, | |
| "grad_norm": 0.08375767787904646, | |
| "learning_rate": 1.3438112422060256e-06, | |
| "loss": 0.3572, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 4.638297872340425, | |
| "grad_norm": 0.08181511297244645, | |
| "learning_rate": 1.2685217263972693e-06, | |
| "loss": 0.3625, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 4.648936170212766, | |
| "grad_norm": 0.08280046685526832, | |
| "learning_rate": 1.1953686032990964e-06, | |
| "loss": 0.3604, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 4.659574468085106, | |
| "grad_norm": 0.07768111306576052, | |
| "learning_rate": 1.124355907970487e-06, | |
| "loss": 0.3642, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 4.670212765957447, | |
| "grad_norm": 0.08142474629210497, | |
| "learning_rate": 1.0554875574063784e-06, | |
| "loss": 0.3645, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 4.680851063829787, | |
| "grad_norm": 0.0795668987754362, | |
| "learning_rate": 9.887673503216422e-07, | |
| "loss": 0.3663, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 4.691489361702128, | |
| "grad_norm": 0.07508730919255964, | |
| "learning_rate": 9.241989669415097e-07, | |
| "loss": 0.3661, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 4.702127659574468, | |
| "grad_norm": 0.08240799525472663, | |
| "learning_rate": 8.61785968798623e-07, | |
| "loss": 0.3608, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 4.712765957446808, | |
| "grad_norm": 0.07849402490031887, | |
| "learning_rate": 8.015317985365301e-07, | |
| "loss": 0.3616, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 4.723404255319149, | |
| "grad_norm": 0.07995230536661296, | |
| "learning_rate": 7.434397797198367e-07, | |
| "loss": 0.3599, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 4.73404255319149, | |
| "grad_norm": 0.08161632016821302, | |
| "learning_rate": 6.875131166508553e-07, | |
| "loss": 0.36, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 4.74468085106383, | |
| "grad_norm": 0.07946253233083479, | |
| "learning_rate": 6.337548941928839e-07, | |
| "loss": 0.3626, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 4.75531914893617, | |
| "grad_norm": 0.07786315052046529, | |
| "learning_rate": 5.821680776000049e-07, | |
| "loss": 0.3619, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 4.76595744680851, | |
| "grad_norm": 0.07728112424595031, | |
| "learning_rate": 5.32755512353571e-07, | |
| "loss": 0.3607, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 4.776595744680851, | |
| "grad_norm": 0.07848088350738662, | |
| "learning_rate": 4.8551992400522e-07, | |
| "loss": 0.361, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 4.787234042553192, | |
| "grad_norm": 0.07839104640568627, | |
| "learning_rate": 4.4046391802655463e-07, | |
| "loss": 0.3597, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 4.797872340425532, | |
| "grad_norm": 0.0757611098723252, | |
| "learning_rate": 3.975899796654137e-07, | |
| "loss": 0.3671, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 4.808510638297872, | |
| "grad_norm": 0.07660526674986573, | |
| "learning_rate": 3.569004738087989e-07, | |
| "loss": 0.3589, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 4.819148936170213, | |
| "grad_norm": 0.07303253050833616, | |
| "learning_rate": 3.183976448524106e-07, | |
| "loss": 0.363, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 4.829787234042553, | |
| "grad_norm": 0.0754318214355068, | |
| "learning_rate": 2.8208361657688474e-07, | |
| "loss": 0.3643, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 4.840425531914894, | |
| "grad_norm": 0.07876372271022951, | |
| "learning_rate": 2.479603920306106e-07, | |
| "loss": 0.3631, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 4.851063829787234, | |
| "grad_norm": 0.07633688369964613, | |
| "learning_rate": 2.1602985341925953e-07, | |
| "loss": 0.3667, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 4.861702127659575, | |
| "grad_norm": 0.07789291049580865, | |
| "learning_rate": 1.8629376200197004e-07, | |
| "loss": 0.3626, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 4.872340425531915, | |
| "grad_norm": 0.07875357482220077, | |
| "learning_rate": 1.5875375799419e-07, | |
| "loss": 0.3634, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 4.882978723404255, | |
| "grad_norm": 0.0786747469968429, | |
| "learning_rate": 1.3341136047719805e-07, | |
| "loss": 0.3643, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 4.8936170212765955, | |
| "grad_norm": 0.07411973494338685, | |
| "learning_rate": 1.1026796731433475e-07, | |
| "loss": 0.3619, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 4.904255319148936, | |
| "grad_norm": 0.07325052452508983, | |
| "learning_rate": 8.932485507387345e-08, | |
| "loss": 0.3538, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 4.914893617021277, | |
| "grad_norm": 0.07867411963780692, | |
| "learning_rate": 7.058317895861866e-08, | |
| "loss": 0.3627, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 4.925531914893617, | |
| "grad_norm": 0.07601171107784903, | |
| "learning_rate": 5.40439727421882e-08, | |
| "loss": 0.3645, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 4.9361702127659575, | |
| "grad_norm": 0.07741148103950671, | |
| "learning_rate": 3.970814871197437e-08, | |
| "loss": 0.3611, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 4.946808510638298, | |
| "grad_norm": 0.07590962518579049, | |
| "learning_rate": 2.75764976188464e-08, | |
| "loss": 0.3594, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 4.957446808510638, | |
| "grad_norm": 0.07705705973848143, | |
| "learning_rate": 1.764968863351424e-08, | |
| "loss": 0.3619, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 4.968085106382979, | |
| "grad_norm": 0.0738876803957471, | |
| "learning_rate": 9.928269309638083e-09, | |
| "loss": 0.3604, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 4.9787234042553195, | |
| "grad_norm": 0.07315503801724814, | |
| "learning_rate": 4.412665553594764e-09, | |
| "loss": 0.3638, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 4.98936170212766, | |
| "grad_norm": 0.07954351258156074, | |
| "learning_rate": 1.103181601020964e-09, | |
| "loss": 0.3628, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.07533000239349877, | |
| "learning_rate": 0.0, | |
| "loss": 0.3558, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "step": 470, | |
| "total_flos": 7884786861342720.0, | |
| "train_loss": 0.4284587557011462, | |
| "train_runtime": 6272.7597, | |
| "train_samples_per_second": 38.218, | |
| "train_steps_per_second": 0.075 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 470, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 7884786861342720.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |