|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1435, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 4.5454545454545457e-07, |
|
"loss": 2.0365, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 9.090909090909091e-07, |
|
"loss": 2.0545, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.3636363636363636e-06, |
|
"loss": 2.0566, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1.8181818181818183e-06, |
|
"loss": 2.0856, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.2727272727272728e-06, |
|
"loss": 2.0672, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 2.7272727272727272e-06, |
|
"loss": 2.0084, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 3.181818181818182e-06, |
|
"loss": 2.0725, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3.6363636363636366e-06, |
|
"loss": 1.9971, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.0909090909090915e-06, |
|
"loss": 1.9729, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 4.5454545454545455e-06, |
|
"loss": 1.9516, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5e-06, |
|
"loss": 1.8939, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.4545454545454545e-06, |
|
"loss": 1.9471, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 5.90909090909091e-06, |
|
"loss": 1.8715, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.363636363636364e-06, |
|
"loss": 1.829, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 6.818181818181818e-06, |
|
"loss": 1.8492, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.272727272727273e-06, |
|
"loss": 1.8362, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 7.727272727272727e-06, |
|
"loss": 1.8159, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.181818181818183e-06, |
|
"loss": 1.8037, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 8.636363636363637e-06, |
|
"loss": 1.8073, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.090909090909091e-06, |
|
"loss": 1.8474, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 9.545454545454547e-06, |
|
"loss": 1.8153, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1e-05, |
|
"loss": 1.7907, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0454545454545455e-05, |
|
"loss": 1.8268, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.0909090909090909e-05, |
|
"loss": 1.7755, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.1363636363636366e-05, |
|
"loss": 1.7727, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.181818181818182e-05, |
|
"loss": 1.7677, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2272727272727274e-05, |
|
"loss": 1.7951, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.2727272727272728e-05, |
|
"loss": 1.7916, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3181818181818183e-05, |
|
"loss": 1.7861, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.3636363636363637e-05, |
|
"loss": 1.7344, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4090909090909092e-05, |
|
"loss": 1.7898, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.4545454545454546e-05, |
|
"loss": 1.7743, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5000000000000002e-05, |
|
"loss": 1.7049, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.5454545454545454e-05, |
|
"loss": 1.7398, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 1.590909090909091e-05, |
|
"loss": 1.733, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.6363636363636366e-05, |
|
"loss": 1.7491, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.681818181818182e-05, |
|
"loss": 1.7568, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.7272727272727274e-05, |
|
"loss": 1.7299, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.772727272727273e-05, |
|
"loss": 1.701, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8181818181818182e-05, |
|
"loss": 1.7008, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.8636363636363638e-05, |
|
"loss": 1.7157, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9090909090909094e-05, |
|
"loss": 1.7319, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9545454545454546e-05, |
|
"loss": 1.7477, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 2e-05, |
|
"loss": 1.6897, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999974495588607e-05, |
|
"loss": 1.6833, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.999989798248451e-05, |
|
"loss": 1.7231, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999770461077993e-05, |
|
"loss": 1.7013, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999591932019537e-05, |
|
"loss": 1.6652, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.9999362396219793e-05, |
|
"loss": 1.756, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 1.99990818548496e-05, |
|
"loss": 1.6705, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998750309339958e-05, |
|
"loss": 1.7105, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9998367761382048e-05, |
|
"loss": 1.7006, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9997934212927197e-05, |
|
"loss": 1.689, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999744966618689e-05, |
|
"loss": 1.6968, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9996914123632743e-05, |
|
"loss": 1.6889, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999632758799649e-05, |
|
"loss": 1.7045, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9995690062269985e-05, |
|
"loss": 1.7144, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9995001549705172e-05, |
|
"loss": 1.6868, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999426205381407e-05, |
|
"loss": 1.6944, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999347157836876e-05, |
|
"loss": 1.6587, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.9992630127401366e-05, |
|
"loss": 1.6764, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999173770520403e-05, |
|
"loss": 1.7335, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.999079431632889e-05, |
|
"loss": 1.6862, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 1.998979996558807e-05, |
|
"loss": 1.6952, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9988754658053624e-05, |
|
"loss": 1.6376, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9987658399057554e-05, |
|
"loss": 1.7031, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998651119419174e-05, |
|
"loss": 1.6588, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998531304930794e-05, |
|
"loss": 1.711, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9984063970517757e-05, |
|
"loss": 1.6889, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9982763964192586e-05, |
|
"loss": 1.6664, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998141303696361e-05, |
|
"loss": 1.6618, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.998001119572175e-05, |
|
"loss": 1.6767, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9978558447617635e-05, |
|
"loss": 1.6762, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.9977054800061558e-05, |
|
"loss": 1.7072, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997550026072345e-05, |
|
"loss": 1.6879, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997389483753283e-05, |
|
"loss": 1.68, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997223853867878e-05, |
|
"loss": 1.6904, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.997053137260988e-05, |
|
"loss": 1.6716, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9968773348034183e-05, |
|
"loss": 1.6718, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996696447391917e-05, |
|
"loss": 1.6846, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9965104759491693e-05, |
|
"loss": 1.7067, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9963194214237935e-05, |
|
"loss": 1.6361, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.996123284790336e-05, |
|
"loss": 1.7163, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995922067049267e-05, |
|
"loss": 1.6533, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9957157692269746e-05, |
|
"loss": 1.6335, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9955043923757597e-05, |
|
"loss": 1.6288, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9952879375738302e-05, |
|
"loss": 1.6376, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.995066405925297e-05, |
|
"loss": 1.6908, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.994839798560167e-05, |
|
"loss": 1.6697, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.994608116634338e-05, |
|
"loss": 1.6703, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9943713613295916e-05, |
|
"loss": 1.644, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9941295338535885e-05, |
|
"loss": 1.651, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 1.9938826354398633e-05, |
|
"loss": 1.6688, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9936306673478147e-05, |
|
"loss": 1.6847, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.993373630862703e-05, |
|
"loss": 1.6274, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9931115272956405e-05, |
|
"loss": 1.7461, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992844357983587e-05, |
|
"loss": 1.6705, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9925721242893415e-05, |
|
"loss": 1.6894, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9922948276015362e-05, |
|
"loss": 1.6949, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.992012469334629e-05, |
|
"loss": 1.6221, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9917250509288957e-05, |
|
"loss": 1.6939, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991432573850424e-05, |
|
"loss": 1.6604, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.991135039591105e-05, |
|
"loss": 1.6738, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9908324496686262e-05, |
|
"loss": 1.655, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9905248056264626e-05, |
|
"loss": 1.6756, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.990212109033871e-05, |
|
"loss": 1.6534, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 1.9898943614858795e-05, |
|
"loss": 1.6928, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9895715646032806e-05, |
|
"loss": 1.649, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.989243720032624e-05, |
|
"loss": 1.6695, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9889108294462054e-05, |
|
"loss": 1.6752, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988572894542061e-05, |
|
"loss": 1.6838, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.988229917043957e-05, |
|
"loss": 1.6674, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.987881898701381e-05, |
|
"loss": 1.6954, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.987528841289534e-05, |
|
"loss": 1.6262, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.98717074660932e-05, |
|
"loss": 1.6642, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9868076164873372e-05, |
|
"loss": 1.66, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9864394527758707e-05, |
|
"loss": 1.6778, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9860662573528798e-05, |
|
"loss": 1.6475, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9856880321219903e-05, |
|
"loss": 1.6805, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9853047790124845e-05, |
|
"loss": 1.6558, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 1.9849164999792915e-05, |
|
"loss": 1.6769, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9845231970029774e-05, |
|
"loss": 1.7058, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9841248720897338e-05, |
|
"loss": 1.6534, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9837215272713694e-05, |
|
"loss": 1.6823, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9833131646052984e-05, |
|
"loss": 1.6857, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.982899786174531e-05, |
|
"loss": 1.7035, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9824813940876618e-05, |
|
"loss": 1.6148, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9820579904788597e-05, |
|
"loss": 1.6817, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9816295775078564e-05, |
|
"loss": 1.6904, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9811961573599364e-05, |
|
"loss": 1.6614, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9807577322459246e-05, |
|
"loss": 1.6663, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9803143044021757e-05, |
|
"loss": 1.6871, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9798658760905633e-05, |
|
"loss": 1.6642, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9794124495984675e-05, |
|
"loss": 1.6952, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.978954027238763e-05, |
|
"loss": 1.6006, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 1.9784906113498086e-05, |
|
"loss": 1.6386, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9780222042954343e-05, |
|
"loss": 1.6474, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.977548808464929e-05, |
|
"loss": 1.6298, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9770704262730292e-05, |
|
"loss": 1.6823, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9765870601599064e-05, |
|
"loss": 1.6591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9760987125911536e-05, |
|
"loss": 1.6399, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9756053860577756e-05, |
|
"loss": 1.6259, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9751070830761715e-05, |
|
"loss": 1.634, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9746038061881267e-05, |
|
"loss": 1.6329, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.974095557960798e-05, |
|
"loss": 1.6721, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9735823409866986e-05, |
|
"loss": 1.6369, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9730641578836887e-05, |
|
"loss": 1.6647, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.972541011294959e-05, |
|
"loss": 1.6762, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9720129038890187e-05, |
|
"loss": 1.6009, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 1.9714798383596815e-05, |
|
"loss": 1.6468, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9709418174260523e-05, |
|
"loss": 1.6682, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.970398843832512e-05, |
|
"loss": 1.63, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.969850920348705e-05, |
|
"loss": 1.6754, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.969298049769525e-05, |
|
"loss": 1.641, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9687402349150992e-05, |
|
"loss": 1.6584, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9681774786307753e-05, |
|
"loss": 1.6875, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9676097837871078e-05, |
|
"loss": 1.6284, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9670371532798406e-05, |
|
"loss": 1.6342, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9664595900298947e-05, |
|
"loss": 1.6719, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.965877096983352e-05, |
|
"loss": 1.6473, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9652896771114416e-05, |
|
"loss": 1.6012, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.964697333410522e-05, |
|
"loss": 1.6521, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.96410006890207e-05, |
|
"loss": 1.6474, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.9634978866326607e-05, |
|
"loss": 1.6501, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 1.962890789673955e-05, |
|
"loss": 1.6197, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9622787811226834e-05, |
|
"loss": 1.625, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.961661864100629e-05, |
|
"loss": 1.6784, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9610400417546133e-05, |
|
"loss": 1.6368, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.960413317256479e-05, |
|
"loss": 1.6683, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9597816938030735e-05, |
|
"loss": 1.6466, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.959145174616234e-05, |
|
"loss": 1.6057, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9585037629427697e-05, |
|
"loss": 1.6649, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.957857462054446e-05, |
|
"loss": 1.6408, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9572062752479684e-05, |
|
"loss": 1.66, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9565502058449634e-05, |
|
"loss": 1.6668, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.955889257191964e-05, |
|
"loss": 1.6344, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.9552234326603912e-05, |
|
"loss": 1.6716, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.954552735646538e-05, |
|
"loss": 1.6554, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 1.953877169571551e-05, |
|
"loss": 1.6514, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9531967378814122e-05, |
|
"loss": 1.7037, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.952511444046925e-05, |
|
"loss": 1.6398, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9518212915636916e-05, |
|
"loss": 1.6702, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9511262839520988e-05, |
|
"loss": 1.6538, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.950426424757299e-05, |
|
"loss": 1.6556, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9497217175491908e-05, |
|
"loss": 1.6311, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9490121659224034e-05, |
|
"loss": 1.6735, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9482977734962753e-05, |
|
"loss": 1.6992, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9475785439148392e-05, |
|
"loss": 1.6556, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9468544808468e-05, |
|
"loss": 1.6402, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9461255879855182e-05, |
|
"loss": 1.6778, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9453918690489904e-05, |
|
"loss": 1.5854, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9446533277798307e-05, |
|
"loss": 1.6348, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 1.9439099679452512e-05, |
|
"loss": 1.6367, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9431617933370425e-05, |
|
"loss": 1.6657, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9424088077715557e-05, |
|
"loss": 1.6424, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9416510150896816e-05, |
|
"loss": 1.6965, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9408884191568305e-05, |
|
"loss": 1.6734, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9401210238629155e-05, |
|
"loss": 1.6907, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9393488331223293e-05, |
|
"loss": 1.6398, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9385718508739263e-05, |
|
"loss": 1.65, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.937790081081001e-05, |
|
"loss": 1.6113, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.937003527731269e-05, |
|
"loss": 1.6624, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9362121948368465e-05, |
|
"loss": 1.6128, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.93541608643423e-05, |
|
"loss": 1.6114, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.934615206584274e-05, |
|
"loss": 1.6207, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9338095593721727e-05, |
|
"loss": 1.6407, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.9329991489074376e-05, |
|
"loss": 1.6067, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.932183979323877e-05, |
|
"loss": 1.6654, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.931364054779575e-05, |
|
"loss": 1.6859, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9305393794568706e-05, |
|
"loss": 1.6345, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.929709957562335e-05, |
|
"loss": 1.6608, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9288757933267518e-05, |
|
"loss": 1.6543, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9280368910050943e-05, |
|
"loss": 1.6737, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9271932548765052e-05, |
|
"loss": 1.6466, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9263448892442723e-05, |
|
"loss": 1.6113, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9254917984358093e-05, |
|
"loss": 1.6581, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9246339868026322e-05, |
|
"loss": 1.602, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9237714587203363e-05, |
|
"loss": 1.652, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9229042185885768e-05, |
|
"loss": 1.6357, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9220322708310428e-05, |
|
"loss": 1.691, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.9211556198954375e-05, |
|
"loss": 1.6393, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 1.920274270253454e-05, |
|
"loss": 1.6426, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9193882264007532e-05, |
|
"loss": 1.6583, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.91849749285694e-05, |
|
"loss": 1.6112, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9176020741655424e-05, |
|
"loss": 1.6335, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9167019748939847e-05, |
|
"loss": 1.6614, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9157971996335676e-05, |
|
"loss": 1.6685, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9148877529994437e-05, |
|
"loss": 1.6277, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9139736396305927e-05, |
|
"loss": 1.6381, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.913054864189799e-05, |
|
"loss": 1.6556, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9121314313636288e-05, |
|
"loss": 1.6668, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9112033458624037e-05, |
|
"loss": 1.6614, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9102706124201785e-05, |
|
"loss": 1.6153, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9093332357947172e-05, |
|
"loss": 1.6453, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.9083912207674672e-05, |
|
"loss": 1.6536, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 1.907444572143536e-05, |
|
"loss": 1.6439, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9064932947516678e-05, |
|
"loss": 1.6314, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9055373934442153e-05, |
|
"loss": 1.6311, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9045768730971198e-05, |
|
"loss": 1.6024, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9036117386098816e-05, |
|
"loss": 1.6519, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9026419949055382e-05, |
|
"loss": 1.6191, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.901667646930639e-05, |
|
"loss": 1.6572, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.9006886996552166e-05, |
|
"loss": 1.6553, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8997051580727673e-05, |
|
"loss": 1.6317, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8987170272002206e-05, |
|
"loss": 1.6181, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8977243120779152e-05, |
|
"loss": 1.7047, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.896727017769575e-05, |
|
"loss": 1.6291, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.89572514936228e-05, |
|
"loss": 1.6362, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8947187119664436e-05, |
|
"loss": 1.6569, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.893707710715784e-05, |
|
"loss": 1.6264, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 1.8926921507672996e-05, |
|
"loss": 1.6361, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8916720373012425e-05, |
|
"loss": 1.6407, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8906473755210915e-05, |
|
"loss": 1.6415, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8896181706535252e-05, |
|
"loss": 1.6542, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8885844279483967e-05, |
|
"loss": 1.6066, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8875461526787058e-05, |
|
"loss": 1.6572, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8865033501405727e-05, |
|
"loss": 1.6423, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8854560256532098e-05, |
|
"loss": 1.6675, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.884404184558897e-05, |
|
"loss": 1.6618, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.883347832222951e-05, |
|
"loss": 1.6533, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.882286974033701e-05, |
|
"loss": 1.6644, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8812216154024596e-05, |
|
"loss": 1.609, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.880151761763496e-05, |
|
"loss": 1.6464, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8790774185740077e-05, |
|
"loss": 1.5785, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.8779985913140927e-05, |
|
"loss": 1.603, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8769152854867218e-05, |
|
"loss": 1.6247, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8758275066177103e-05, |
|
"loss": 1.6052, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.874735260255691e-05, |
|
"loss": 1.6542, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.873638551972083e-05, |
|
"loss": 1.6673, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8725373873610667e-05, |
|
"loss": 1.6332, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8714317720395526e-05, |
|
"loss": 1.6241, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8703217116471554e-05, |
|
"loss": 1.6645, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8692072118461617e-05, |
|
"loss": 1.6449, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.868088278321504e-05, |
|
"loss": 1.65, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.86696491678073e-05, |
|
"loss": 1.6187, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8658371329539752e-05, |
|
"loss": 1.6403, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8647049325939322e-05, |
|
"loss": 1.6159, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8635683214758213e-05, |
|
"loss": 1.6592, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 1.8624273053973624e-05, |
|
"loss": 1.6522, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8612818901787442e-05, |
|
"loss": 1.614, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8601320816625947e-05, |
|
"loss": 1.6527, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8589778857139517e-05, |
|
"loss": 1.6656, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.857819308220233e-05, |
|
"loss": 1.6697, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.856656355091206e-05, |
|
"loss": 1.6302, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8554890322589578e-05, |
|
"loss": 1.6221, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.854317345677865e-05, |
|
"loss": 1.6282, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8531413013245622e-05, |
|
"loss": 1.6277, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.851960905197914e-05, |
|
"loss": 1.6381, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.850776163318981e-05, |
|
"loss": 1.5964, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8495870817309932e-05, |
|
"loss": 1.6171, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8483936664993152e-05, |
|
"loss": 1.6441, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8471959237114178e-05, |
|
"loss": 1.6039, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.845993859476846e-05, |
|
"loss": 1.6281, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 1.8447874799271875e-05, |
|
"loss": 1.6623, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8435767912160427e-05, |
|
"loss": 1.6619, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8423617995189924e-05, |
|
"loss": 1.6681, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8411425110335663e-05, |
|
"loss": 1.6419, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8399189319792107e-05, |
|
"loss": 1.6201, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8386910685972593e-05, |
|
"loss": 1.6115, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.837458927150898e-05, |
|
"loss": 1.5913, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.836222513925135e-05, |
|
"loss": 1.6386, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8349818352267695e-05, |
|
"loss": 1.6896, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.833736897384357e-05, |
|
"loss": 1.6354, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8324877067481782e-05, |
|
"loss": 1.657, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.831234269690209e-05, |
|
"loss": 1.634, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.829976592604083e-05, |
|
"loss": 1.6393, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8287146819050633e-05, |
|
"loss": 1.6509, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 1.8274485440300084e-05, |
|
"loss": 1.6965, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.826178185437338e-05, |
|
"loss": 1.6445, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8249036126070015e-05, |
|
"loss": 1.6339, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8236248320404454e-05, |
|
"loss": 1.6185, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8223418502605788e-05, |
|
"loss": 1.606, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.82105467381174e-05, |
|
"loss": 1.6309, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8197633092596656e-05, |
|
"loss": 1.6566, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8184677631914533e-05, |
|
"loss": 1.6121, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8171680422155318e-05, |
|
"loss": 1.6365, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.815864152961624e-05, |
|
"loss": 1.6262, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8145561020807162e-05, |
|
"loss": 1.6201, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8132438962450212e-05, |
|
"loss": 1.6105, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8119275421479474e-05, |
|
"loss": 1.6428, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8106070465040618e-05, |
|
"loss": 1.6528, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 1.8092824160490568e-05, |
|
"loss": 1.6227, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8079536575397166e-05, |
|
"loss": 1.6215, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.806620777753882e-05, |
|
"loss": 1.6043, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8052837834904163e-05, |
|
"loss": 1.6257, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8039426815691688e-05, |
|
"loss": 1.6438, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.802597478830944e-05, |
|
"loss": 1.6552, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.8012481821374614e-05, |
|
"loss": 1.6236, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.799894798371325e-05, |
|
"loss": 1.6688, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.798537334435986e-05, |
|
"loss": 1.6246, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.797175797255709e-05, |
|
"loss": 1.6341, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7958101937755335e-05, |
|
"loss": 1.5884, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.794440530961243e-05, |
|
"loss": 1.6229, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.793066815799326e-05, |
|
"loss": 1.6315, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7916890552969417e-05, |
|
"loss": 1.6443, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7903072564818847e-05, |
|
"loss": 1.5758, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 1.7889214264025475e-05, |
|
"loss": 1.6148, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7875315721278864e-05, |
|
"loss": 1.649, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7861377007473848e-05, |
|
"loss": 1.6562, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7847398193710163e-05, |
|
"loss": 1.6357, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.783337935129209e-05, |
|
"loss": 1.6316, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7819320551728095e-05, |
|
"loss": 1.6306, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.780522186673046e-05, |
|
"loss": 1.6314, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.779108336821492e-05, |
|
"loss": 1.628, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7776905128300287e-05, |
|
"loss": 1.6286, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7762687219308095e-05, |
|
"loss": 1.5872, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7748429713762227e-05, |
|
"loss": 1.6216, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.773413268438854e-05, |
|
"loss": 1.6538, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7719796204114495e-05, |
|
"loss": 1.6254, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7705420346068788e-05, |
|
"loss": 1.6248, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 1.7691005183580985e-05, |
|
"loss": 1.6266, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7676550790181132e-05, |
|
"loss": 1.6344, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.766205723959938e-05, |
|
"loss": 1.666, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7647524605765633e-05, |
|
"loss": 1.6263, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7632952962809145e-05, |
|
"loss": 1.609, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7618342385058147e-05, |
|
"loss": 1.598, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7603692947039478e-05, |
|
"loss": 1.6, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7589004723478194e-05, |
|
"loss": 1.6481, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7574277789297202e-05, |
|
"loss": 1.6047, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7559512219616852e-05, |
|
"loss": 1.5625, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7544708089754583e-05, |
|
"loss": 1.6227, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.752986547522451e-05, |
|
"loss": 1.6455, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.751498445173707e-05, |
|
"loss": 1.6003, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7500065095198612e-05, |
|
"loss": 1.6164, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 1.7485107481711014e-05, |
|
"loss": 1.625, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.74701116875713e-05, |
|
"loss": 1.6485, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7455077789271255e-05, |
|
"loss": 1.6465, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7440005863497024e-05, |
|
"loss": 1.6157, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7424895987128723e-05, |
|
"loss": 1.6212, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7409748237240052e-05, |
|
"loss": 1.6148, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7394562691097906e-05, |
|
"loss": 1.6037, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7379339426161962e-05, |
|
"loss": 1.6029, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.736407852008431e-05, |
|
"loss": 1.6422, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7348780050709028e-05, |
|
"loss": 1.6615, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7333444096071804e-05, |
|
"loss": 1.6211, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.731807073439955e-05, |
|
"loss": 1.6152, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7302660044109963e-05, |
|
"loss": 1.6545, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.7287212103811164e-05, |
|
"loss": 1.612, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.727172699230128e-05, |
|
"loss": 1.638, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 1.725620478856804e-05, |
|
"loss": 1.6226, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7240645571788377e-05, |
|
"loss": 1.5929, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7225049421328024e-05, |
|
"loss": 1.6376, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.720941641674111e-05, |
|
"loss": 1.6419, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.719374663776975e-05, |
|
"loss": 1.6608, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7178040164343643e-05, |
|
"loss": 1.5991, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.716229707657966e-05, |
|
"loss": 1.6207, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.714651745478144e-05, |
|
"loss": 1.6393, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7130701379438974e-05, |
|
"loss": 1.5872, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7114848931228204e-05, |
|
"loss": 1.6652, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7098960191010596e-05, |
|
"loss": 1.6588, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.708303523983275e-05, |
|
"loss": 1.6158, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7067074158925963e-05, |
|
"loss": 1.6055, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.705107702970583e-05, |
|
"loss": 1.677, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 1.7035043933771824e-05, |
|
"loss": 1.6061, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7018974952906885e-05, |
|
"loss": 1.6463, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.7002870169076983e-05, |
|
"loss": 1.6627, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6986729664430734e-05, |
|
"loss": 1.5812, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6970553521298948e-05, |
|
"loss": 1.6268, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6954341822194228e-05, |
|
"loss": 1.6296, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.693809464981054e-05, |
|
"loss": 1.6383, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.69218120870228e-05, |
|
"loss": 1.5909, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6905494216886445e-05, |
|
"loss": 1.6374, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6889141122637007e-05, |
|
"loss": 1.6219, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6872752887689693e-05, |
|
"loss": 1.6365, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.685632959563897e-05, |
|
"loss": 1.624, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.68398713302581e-05, |
|
"loss": 1.6106, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.6823378175498778e-05, |
|
"loss": 1.6458, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 1.680685021549063e-05, |
|
"loss": 1.6136, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6790287534540838e-05, |
|
"loss": 1.615, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.677369021713369e-05, |
|
"loss": 1.6109, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6757058347930148e-05, |
|
"loss": 1.6439, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6740392011767413e-05, |
|
"loss": 1.6372, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6723691293658512e-05, |
|
"loss": 1.63, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6706956278791836e-05, |
|
"loss": 1.6108, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6690187052530733e-05, |
|
"loss": 1.6265, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6673383700413044e-05, |
|
"loss": 1.6142, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6656546308150693e-05, |
|
"loss": 1.5988, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.663967496162924e-05, |
|
"loss": 1.6474, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.662276974690743e-05, |
|
"loss": 1.61, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6605830750216778e-05, |
|
"loss": 1.5761, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.658885805796111e-05, |
|
"loss": 1.6264, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.6571851756716134e-05, |
|
"loss": 1.6052, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 1.655481193322899e-05, |
|
"loss": 1.6216, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.653773867441781e-05, |
|
"loss": 1.629, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6520632067371276e-05, |
|
"loss": 1.6385, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6503492199348178e-05, |
|
"loss": 1.6537, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.648631915777697e-05, |
|
"loss": 1.6039, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.646911303025531e-05, |
|
"loss": 1.5989, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6451873904549625e-05, |
|
"loss": 1.6123, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6434601868594672e-05, |
|
"loss": 1.6469, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.641729701049308e-05, |
|
"loss": 1.6151, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6399959418514876e-05, |
|
"loss": 1.6179, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.638258918109709e-05, |
|
"loss": 1.6242, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.636518638684325e-05, |
|
"loss": 1.6202, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6347751124522966e-05, |
|
"loss": 1.6075, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.6330283483071454e-05, |
|
"loss": 1.6368, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 1.63127835515891e-05, |
|
"loss": 1.6156, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6295251419340997e-05, |
|
"loss": 1.6085, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.627768717575648e-05, |
|
"loss": 1.6293, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.626009091042869e-05, |
|
"loss": 1.633, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6242462713114116e-05, |
|
"loss": 1.6293, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6224802673732103e-05, |
|
"loss": 1.5762, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6207110882364436e-05, |
|
"loss": 1.6262, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6189387429254867e-05, |
|
"loss": 1.5836, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6171632404808637e-05, |
|
"loss": 1.6299, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6153845899592037e-05, |
|
"loss": 1.6011, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.613602800433194e-05, |
|
"loss": 1.641, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6118178809915327e-05, |
|
"loss": 1.5751, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.610029840738884e-05, |
|
"loss": 1.636, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.608238688795831e-05, |
|
"loss": 1.6353, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6064444342988284e-05, |
|
"loss": 1.5984, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 1.6046470864001576e-05, |
|
"loss": 1.6517, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.602846654267878e-05, |
|
"loss": 1.5916, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.601043147085783e-05, |
|
"loss": 1.602, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5992365740533493e-05, |
|
"loss": 1.6234, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5974269443856942e-05, |
|
"loss": 1.611, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.595614267313525e-05, |
|
"loss": 1.6057, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.593798552083094e-05, |
|
"loss": 1.6586, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5919798079561518e-05, |
|
"loss": 1.6399, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.590158044209897e-05, |
|
"loss": 1.6063, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5883332701369323e-05, |
|
"loss": 1.6133, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5865054950452156e-05, |
|
"loss": 1.5935, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5846747282580126e-05, |
|
"loss": 1.6286, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5828409791138487e-05, |
|
"loss": 1.6286, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.581004256966463e-05, |
|
"loss": 1.6271, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 1.5791645711847583e-05, |
|
"loss": 1.6332, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.577321931152756e-05, |
|
"loss": 1.6596, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.575476346269545e-05, |
|
"loss": 1.5815, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5736278259492372e-05, |
|
"loss": 1.6319, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5717763796209174e-05, |
|
"loss": 1.6099, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5699220167285946e-05, |
|
"loss": 1.5987, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.568064746731156e-05, |
|
"loss": 1.6211, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.566204579102317e-05, |
|
"loss": 1.626, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5643415233305743e-05, |
|
"loss": 1.6113, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5624755889191556e-05, |
|
"loss": 1.6402, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5606067853859727e-05, |
|
"loss": 1.6454, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5587351222635727e-05, |
|
"loss": 1.6143, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5568606090990883e-05, |
|
"loss": 1.6173, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.554983255454191e-05, |
|
"loss": 1.6168, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 1.5531030709050402e-05, |
|
"loss": 1.6215, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.551220065042237e-05, |
|
"loss": 1.6142, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.549334247470771e-05, |
|
"loss": 1.6304, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5474456278099774e-05, |
|
"loss": 1.6522, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5455542156934815e-05, |
|
"loss": 1.6352, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5436600207691544e-05, |
|
"loss": 1.6026, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5417630526990613e-05, |
|
"loss": 1.5938, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.539863321159413e-05, |
|
"loss": 1.6332, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5379608358405166e-05, |
|
"loss": 1.6283, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5360556064467257e-05, |
|
"loss": 1.6143, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5341476426963908e-05, |
|
"loss": 1.606, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5322369543218105e-05, |
|
"loss": 1.6036, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5303235510691816e-05, |
|
"loss": 1.6193, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5284074426985487e-05, |
|
"loss": 1.5858, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.526488638983755e-05, |
|
"loss": 1.6249, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 1.5245671497123924e-05, |
|
"loss": 1.5899, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5226429846857516e-05, |
|
"loss": 1.5818, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5207161537187713e-05, |
|
"loss": 1.6526, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.51878666663999e-05, |
|
"loss": 1.5838, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5168545332914942e-05, |
|
"loss": 1.6348, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5149197635288685e-05, |
|
"loss": 1.6178, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5129823672211454e-05, |
|
"loss": 1.586, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.511042354250756e-05, |
|
"loss": 1.6216, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5090997345134777e-05, |
|
"loss": 1.624, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5071545179183846e-05, |
|
"loss": 1.617, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5052067143877986e-05, |
|
"loss": 1.6117, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5032563338572357e-05, |
|
"loss": 1.6136, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.5013033862753572e-05, |
|
"loss": 1.6113, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4993478816039191e-05, |
|
"loss": 1.5709, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 1.4973898298177206e-05, |
|
"loss": 1.6311, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.495429240904553e-05, |
|
"loss": 1.6197, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4934661248651497e-05, |
|
"loss": 1.6151, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4915004917131345e-05, |
|
"loss": 1.5786, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.489532351474971e-05, |
|
"loss": 1.567, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4875617141899103e-05, |
|
"loss": 1.6459, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.485588589909942e-05, |
|
"loss": 1.5866, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4836129886997401e-05, |
|
"loss": 1.5997, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.481634920636614e-05, |
|
"loss": 1.5837, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4796543958104561e-05, |
|
"loss": 1.5982, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4776714243236902e-05, |
|
"loss": 1.6198, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4756860162912208e-05, |
|
"loss": 1.5563, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4736981818403799e-05, |
|
"loss": 1.605, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.471707931110878e-05, |
|
"loss": 1.6325, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 1.4697152742547489e-05, |
|
"loss": 1.6153, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4677202214363008e-05, |
|
"loss": 1.6322, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4657227828320637e-05, |
|
"loss": 1.6161, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4637229686307367e-05, |
|
"loss": 1.612, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.461720789033136e-05, |
|
"loss": 1.6085, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4597162542521446e-05, |
|
"loss": 1.5963, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4577093745126583e-05, |
|
"loss": 1.6052, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4557001600515342e-05, |
|
"loss": 1.5949, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4536886211175393e-05, |
|
"loss": 1.5852, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4516747679712962e-05, |
|
"loss": 1.6111, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4496586108852336e-05, |
|
"loss": 1.6083, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4476401601435304e-05, |
|
"loss": 1.5824, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4456194260420668e-05, |
|
"loss": 1.6042, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4435964188883697e-05, |
|
"loss": 1.6241, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4415711490015602e-05, |
|
"loss": 1.6322, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 1.4395436267123017e-05, |
|
"loss": 1.6693, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4375138623627467e-05, |
|
"loss": 1.6304, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4354818663064838e-05, |
|
"loss": 1.596, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4334476489084863e-05, |
|
"loss": 1.5772, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4314112205450576e-05, |
|
"loss": 1.6021, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4293725916037784e-05, |
|
"loss": 1.6392, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4273317724834556e-05, |
|
"loss": 1.6302, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4252887735940673e-05, |
|
"loss": 1.6346, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4232436053567098e-05, |
|
"loss": 1.6206, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4211962782035457e-05, |
|
"loss": 1.6435, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4191468025777503e-05, |
|
"loss": 1.6076, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4170951889334572e-05, |
|
"loss": 1.6193, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4150414477357057e-05, |
|
"loss": 1.5853, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4129855894603885e-05, |
|
"loss": 1.5889, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.4109276245941962e-05, |
|
"loss": 1.6055, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4088675636345654e-05, |
|
"loss": 1.6493, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4068054170896249e-05, |
|
"loss": 1.6312, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.4047411954781409e-05, |
|
"loss": 1.6165, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.402674909329465e-05, |
|
"loss": 1.6203, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.40060656918348e-05, |
|
"loss": 1.6235, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3985361855905453e-05, |
|
"loss": 1.6065, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3964637691114434e-05, |
|
"loss": 1.645, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3943893303173273e-05, |
|
"loss": 1.6448, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.392312879789665e-05, |
|
"loss": 1.6174, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3902344281201863e-05, |
|
"loss": 1.6154, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3881539859108282e-05, |
|
"loss": 1.6285, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3860715637736817e-05, |
|
"loss": 1.6275, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.383987172330937e-05, |
|
"loss": 1.6086, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 1.3819008222148293e-05, |
|
"loss": 1.5968, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3798125240675853e-05, |
|
"loss": 1.5765, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3777222885413676e-05, |
|
"loss": 1.6159, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3756301262982217e-05, |
|
"loss": 1.6293, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.373536048010021e-05, |
|
"loss": 1.6052, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3714400643584123e-05, |
|
"loss": 1.6152, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3693421860347611e-05, |
|
"loss": 1.6067, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3672424237400986e-05, |
|
"loss": 1.63, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3651407881850642e-05, |
|
"loss": 1.613, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3630372900898538e-05, |
|
"loss": 1.616, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3609319401841638e-05, |
|
"loss": 1.6341, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.358824749207136e-05, |
|
"loss": 1.6375, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3567157279073036e-05, |
|
"loss": 1.5918, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3546048870425356e-05, |
|
"loss": 1.6121, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.3524922373799836e-05, |
|
"loss": 1.5584, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 1.350377789696024e-05, |
|
"loss": 1.6168, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3482615547762056e-05, |
|
"loss": 1.6142, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3461435434151942e-05, |
|
"loss": 1.6082, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.344023766416716e-05, |
|
"loss": 1.5946, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3419022345935035e-05, |
|
"loss": 1.6143, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3397789587672417e-05, |
|
"loss": 1.5935, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3376539497685099e-05, |
|
"loss": 1.5902, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3355272184367294e-05, |
|
"loss": 1.6076, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3333987756201058e-05, |
|
"loss": 1.593, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.331268632175576e-05, |
|
"loss": 1.6103, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3291367989687505e-05, |
|
"loss": 1.5757, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3270032868738594e-05, |
|
"loss": 1.6087, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3248681067736973e-05, |
|
"loss": 1.6108, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.322731269559566e-05, |
|
"loss": 1.5858, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 1.3205927861312207e-05, |
|
"loss": 1.6325, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.318452667396814e-05, |
|
"loss": 1.6255, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3163109242728391e-05, |
|
"loss": 1.6091, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3141675676840757e-05, |
|
"loss": 1.6319, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3120226085635337e-05, |
|
"loss": 1.592, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3098760578523965e-05, |
|
"loss": 1.6066, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3077279264999674e-05, |
|
"loss": 1.6218, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3055782254636109e-05, |
|
"loss": 1.6062, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3034269657086993e-05, |
|
"loss": 1.5726, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.3012741582085551e-05, |
|
"loss": 1.5991, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2991198139443959e-05, |
|
"loss": 1.6004, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2969639439052786e-05, |
|
"loss": 1.5796, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2948065590880421e-05, |
|
"loss": 1.5965, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2926476704972523e-05, |
|
"loss": 1.5953, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 1.2904872891451461e-05, |
|
"loss": 1.6045, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2883254260515745e-05, |
|
"loss": 1.638, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.286162092243946e-05, |
|
"loss": 1.5976, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2839972987571723e-05, |
|
"loss": 1.6045, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2818310566336098e-05, |
|
"loss": 1.6391, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.279663376923005e-05, |
|
"loss": 1.5872, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2774942706824365e-05, |
|
"loss": 1.6322, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.27532374897626e-05, |
|
"loss": 1.6326, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2731518228760513e-05, |
|
"loss": 1.6069, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2709785034605496e-05, |
|
"loss": 1.6225, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2688038018156014e-05, |
|
"loss": 1.6025, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2666277290341042e-05, |
|
"loss": 1.6447, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2644502962159485e-05, |
|
"loss": 1.6224, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.262271514467963e-05, |
|
"loss": 1.5553, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2600913949038571e-05, |
|
"loss": 1.5943, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 1.2579099486441637e-05, |
|
"loss": 1.5905, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2557271868161838e-05, |
|
"loss": 1.649, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2535431205539279e-05, |
|
"loss": 1.6286, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2513577609980616e-05, |
|
"loss": 1.635, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2491711192958467e-05, |
|
"loss": 1.6439, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2469832066010843e-05, |
|
"loss": 1.6028, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2447940340740603e-05, |
|
"loss": 1.6472, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2426036128814856e-05, |
|
"loss": 1.594, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.240411954196441e-05, |
|
"loss": 1.6277, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2382190691983198e-05, |
|
"loss": 1.5972, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2360249690727694e-05, |
|
"loss": 1.6022, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.233829665011637e-05, |
|
"loss": 1.602, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2316331682129101e-05, |
|
"loss": 1.5942, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2294354898806599e-05, |
|
"loss": 1.613, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2272366412249853e-05, |
|
"loss": 1.5964, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2250366334619538e-05, |
|
"loss": 1.5893, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2228354778135469e-05, |
|
"loss": 1.6593, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2206331855076e-05, |
|
"loss": 1.5685, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2184297677777463e-05, |
|
"loss": 1.6174, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2162252358633615e-05, |
|
"loss": 1.6137, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2140196010095022e-05, |
|
"loss": 1.586, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.211812874466853e-05, |
|
"loss": 1.6257, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.209605067491666e-05, |
|
"loss": 1.5819, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2073961913457039e-05, |
|
"loss": 1.6215, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2051862572961852e-05, |
|
"loss": 1.6121, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.2029752766157223e-05, |
|
"loss": 1.6269, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.200763260582268e-05, |
|
"loss": 1.6252, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1985502204790552e-05, |
|
"loss": 1.6129, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 1.1963361675945414e-05, |
|
"loss": 1.5826, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1941211132223492e-05, |
|
"loss": 1.5781, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1919050686612106e-05, |
|
"loss": 1.5664, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1896880452149077e-05, |
|
"loss": 1.6199, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.187470054192217e-05, |
|
"loss": 1.5663, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1852511069068482e-05, |
|
"loss": 1.5852, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1830312146773915e-05, |
|
"loss": 1.6238, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.180810388827255e-05, |
|
"loss": 1.6067, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1785886406846101e-05, |
|
"loss": 1.6041, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1763659815823327e-05, |
|
"loss": 1.6254, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1741424228579454e-05, |
|
"loss": 1.5914, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1719179758535587e-05, |
|
"loss": 1.5642, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.169692651915816e-05, |
|
"loss": 1.581, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.167466462395832e-05, |
|
"loss": 1.6328, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1652394186491373e-05, |
|
"loss": 1.6644, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 1.1630115320356206e-05, |
|
"loss": 1.5741, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1607828139194683e-05, |
|
"loss": 1.581, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.15855327566911e-05, |
|
"loss": 1.6009, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1563229286571574e-05, |
|
"loss": 1.6076, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1540917842603481e-05, |
|
"loss": 1.5943, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.151859853859487e-05, |
|
"loss": 1.627, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1496271488393884e-05, |
|
"loss": 1.5848, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.147393680588818e-05, |
|
"loss": 1.6078, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1451594605004339e-05, |
|
"loss": 1.6424, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1429244999707301e-05, |
|
"loss": 1.5847, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1406888103999772e-05, |
|
"loss": 1.5789, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1384524031921643e-05, |
|
"loss": 1.6188, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1362152897549413e-05, |
|
"loss": 1.5957, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1339774814995608e-05, |
|
"loss": 1.569, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 1.1317389898408188e-05, |
|
"loss": 1.5998, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1294998261969983e-05, |
|
"loss": 1.5698, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1272600019898093e-05, |
|
"loss": 1.5794, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1250195286443308e-05, |
|
"loss": 1.6487, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1227784175889545e-05, |
|
"loss": 1.5884, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1205366802553231e-05, |
|
"loss": 1.6084, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1182943280782757e-05, |
|
"loss": 1.6278, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1160513724957864e-05, |
|
"loss": 1.6457, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1138078249489076e-05, |
|
"loss": 1.5942, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1115636968817114e-05, |
|
"loss": 1.6002, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1093189997412308e-05, |
|
"loss": 1.5841, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1070737449774012e-05, |
|
"loss": 1.6074, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1048279440430036e-05, |
|
"loss": 1.5961, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1025816083936036e-05, |
|
"loss": 1.6039, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 1.1003347494874954e-05, |
|
"loss": 1.5762, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0980873787856415e-05, |
|
"loss": 1.5991, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0958395077516149e-05, |
|
"loss": 1.6149, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.093591147851542e-05, |
|
"loss": 1.5825, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0913423105540413e-05, |
|
"loss": 1.5742, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0890930073301674e-05, |
|
"loss": 1.5974, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0868432496533512e-05, |
|
"loss": 1.6136, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0845930489993414e-05, |
|
"loss": 1.6572, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0823424168461476e-05, |
|
"loss": 1.6344, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0800913646739785e-05, |
|
"loss": 1.6255, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0778399039651868e-05, |
|
"loss": 1.6221, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0755880462042082e-05, |
|
"loss": 1.6519, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.073335802877504e-05, |
|
"loss": 1.6173, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0710831854735027e-05, |
|
"loss": 1.6243, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.0688302054825403e-05, |
|
"loss": 1.5761, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.066576874396802e-05, |
|
"loss": 1.6018, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0643232037102656e-05, |
|
"loss": 1.5663, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0620692049186385e-05, |
|
"loss": 1.6266, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0598148895193044e-05, |
|
"loss": 1.6305, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0575602690112602e-05, |
|
"loss": 1.6497, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0553053548950598e-05, |
|
"loss": 1.6082, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0530501586727547e-05, |
|
"loss": 1.6143, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0507946918478354e-05, |
|
"loss": 1.613, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.048538965925172e-05, |
|
"loss": 1.6063, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0462829924109581e-05, |
|
"loss": 1.58, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0440267828126478e-05, |
|
"loss": 1.6067, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0417703486389017e-05, |
|
"loss": 1.6714, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0395137013995245e-05, |
|
"loss": 1.6027, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0372568526054075e-05, |
|
"loss": 1.6135, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 1.0349998137684716e-05, |
|
"loss": 1.5962, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.032742596401606e-05, |
|
"loss": 1.5889, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0304852120186102e-05, |
|
"loss": 1.6154, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0282276721341372e-05, |
|
"loss": 1.5981, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0259699882636309e-05, |
|
"loss": 1.588, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0237121719232723e-05, |
|
"loss": 1.5531, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.021454234629916e-05, |
|
"loss": 1.5465, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0191961879010342e-05, |
|
"loss": 1.6065, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0169380432546578e-05, |
|
"loss": 1.6257, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0146798122093167e-05, |
|
"loss": 1.5613, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0124215062839817e-05, |
|
"loss": 1.599, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0101631369980056e-05, |
|
"loss": 1.5813, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0079047158710638e-05, |
|
"loss": 1.6179, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.005646254423097e-05, |
|
"loss": 1.595, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0033877641742511e-05, |
|
"loss": 1.5425, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 1.0011292566448188e-05, |
|
"loss": 1.6263, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.988707433551816e-06, |
|
"loss": 1.5788, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.966122358257494e-06, |
|
"loss": 1.6317, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.943537455769034e-06, |
|
"loss": 1.5667, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.920952841289365e-06, |
|
"loss": 1.5987, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.898368630019948e-06, |
|
"loss": 1.5913, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.875784937160183e-06, |
|
"loss": 1.5909, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.853201877906836e-06, |
|
"loss": 1.6105, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.830619567453427e-06, |
|
"loss": 1.5993, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.808038120989662e-06, |
|
"loss": 1.6032, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.785457653700844e-06, |
|
"loss": 1.5996, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.76287828076728e-06, |
|
"loss": 1.5564, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.740300117363691e-06, |
|
"loss": 1.5832, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.717723278658633e-06, |
|
"loss": 1.5684, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.6951478798139e-06, |
|
"loss": 1.6344, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.672574035983945e-06, |
|
"loss": 1.5963, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.650001862315285e-06, |
|
"loss": 1.6111, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.627431473945927e-06, |
|
"loss": 1.6121, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.60486298600476e-06, |
|
"loss": 1.6066, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.582296513610983e-06, |
|
"loss": 1.6015, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.559732171873524e-06, |
|
"loss": 1.59, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.537170075890426e-06, |
|
"loss": 1.5938, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.514610340748281e-06, |
|
"loss": 1.5664, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.49205308152165e-06, |
|
"loss": 1.5595, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.469498413272456e-06, |
|
"loss": 1.6179, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.446946451049403e-06, |
|
"loss": 1.5752, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.4243973098874e-06, |
|
"loss": 1.6166, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.401851104806958e-06, |
|
"loss": 1.5828, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 9.379307950813617e-06, |
|
"loss": 1.5961, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.356767962897349e-06, |
|
"loss": 1.6062, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.33423125603198e-06, |
|
"loss": 1.6106, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.3116979451746e-06, |
|
"loss": 1.6055, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.289168145264975e-06, |
|
"loss": 1.6008, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.266641971224963e-06, |
|
"loss": 1.6411, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.244119537957923e-06, |
|
"loss": 1.5554, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.221600960348136e-06, |
|
"loss": 1.6471, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.199086353260217e-06, |
|
"loss": 1.5807, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.176575831538527e-06, |
|
"loss": 1.603, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.154069510006584e-06, |
|
"loss": 1.589, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.131567503466491e-06, |
|
"loss": 1.614, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.109069926698331e-06, |
|
"loss": 1.5766, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.08657689445959e-06, |
|
"loss": 1.6009, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.064088521484583e-06, |
|
"loss": 1.6193, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.041604922483853e-06, |
|
"loss": 1.6094, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 9.019126212143589e-06, |
|
"loss": 1.5921, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.996652505125047e-06, |
|
"loss": 1.6001, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.974183916063967e-06, |
|
"loss": 1.6032, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.95172055956997e-06, |
|
"loss": 1.6085, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.929262550225991e-06, |
|
"loss": 1.6167, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.906810002587696e-06, |
|
"loss": 1.595, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.884363031182889e-06, |
|
"loss": 1.5695, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.861921750510925e-06, |
|
"loss": 1.6225, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.839486275042137e-06, |
|
"loss": 1.6099, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.817056719217248e-06, |
|
"loss": 1.5838, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.79463319744677e-06, |
|
"loss": 1.5928, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.77221582411046e-06, |
|
"loss": 1.5619, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.749804713556696e-06, |
|
"loss": 1.6192, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 8.72739998010191e-06, |
|
"loss": 1.5752, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.705001738030017e-06, |
|
"loss": 1.645, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.682610101591813e-06, |
|
"loss": 1.6152, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.660225185004397e-06, |
|
"loss": 1.5816, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.637847102450589e-06, |
|
"loss": 1.6264, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.615475968078359e-06, |
|
"loss": 1.6285, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.593111896000233e-06, |
|
"loss": 1.5785, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.5707550002927e-06, |
|
"loss": 1.5976, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.548405394995664e-06, |
|
"loss": 1.6293, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.526063194111827e-06, |
|
"loss": 1.5788, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.50372851160612e-06, |
|
"loss": 1.541, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.481401461405132e-06, |
|
"loss": 1.6058, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.459082157396524e-06, |
|
"loss": 1.6058, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.436770713428428e-06, |
|
"loss": 1.5731, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 8.414467243308901e-06, |
|
"loss": 1.6116, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.39217186080532e-06, |
|
"loss": 1.6121, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.369884679643801e-06, |
|
"loss": 1.5998, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.34760581350863e-06, |
|
"loss": 1.6248, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.325335376041683e-06, |
|
"loss": 1.6211, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.303073480841844e-06, |
|
"loss": 1.622, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.280820241464413e-06, |
|
"loss": 1.5774, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.25857577142055e-06, |
|
"loss": 1.6093, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.236340184176678e-06, |
|
"loss": 1.6255, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.214113593153904e-06, |
|
"loss": 1.5867, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.191896111727453e-06, |
|
"loss": 1.5993, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.16968785322609e-06, |
|
"loss": 1.5907, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.147488930931518e-06, |
|
"loss": 1.5474, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.125299458077832e-06, |
|
"loss": 1.5874, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.103119547850924e-06, |
|
"loss": 1.6297, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 8.0809493133879e-06, |
|
"loss": 1.5933, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.058788867776512e-06, |
|
"loss": 1.5765, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.03663832405459e-06, |
|
"loss": 1.5898, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 8.01449779520945e-06, |
|
"loss": 1.5886, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.992367394177322e-06, |
|
"loss": 1.6402, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.970247233842778e-06, |
|
"loss": 1.5753, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.948137427038154e-06, |
|
"loss": 1.5689, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.926038086542963e-06, |
|
"loss": 1.6016, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.903949325083344e-06, |
|
"loss": 1.5765, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.881871255331472e-06, |
|
"loss": 1.598, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.859803989904978e-06, |
|
"loss": 1.6239, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.837747641366386e-06, |
|
"loss": 1.6134, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.815702322222539e-06, |
|
"loss": 1.6087, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.793668144924005e-06, |
|
"loss": 1.557, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 7.771645221864534e-06, |
|
"loss": 1.5814, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.749633665380463e-06, |
|
"loss": 1.5814, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.72763358775015e-06, |
|
"loss": 1.6045, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.705645101193403e-06, |
|
"loss": 1.5935, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.683668317870902e-06, |
|
"loss": 1.6357, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.661703349883633e-06, |
|
"loss": 1.6037, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.639750309272308e-06, |
|
"loss": 1.6101, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.617809308016806e-06, |
|
"loss": 1.5826, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.595880458035592e-06, |
|
"loss": 1.592, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.573963871185146e-06, |
|
"loss": 1.5701, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.552059659259398e-06, |
|
"loss": 1.6024, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.530167933989161e-06, |
|
"loss": 1.5676, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.50828880704154e-06, |
|
"loss": 1.6243, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.486422390019385e-06, |
|
"loss": 1.5521, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 7.464568794460723e-06, |
|
"loss": 1.6012, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.442728131838165e-06, |
|
"loss": 1.606, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.420900513558363e-06, |
|
"loss": 1.5572, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.399086050961434e-06, |
|
"loss": 1.6185, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.377284855320374e-06, |
|
"loss": 1.6133, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.355497037840519e-06, |
|
"loss": 1.5818, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.333722709658961e-06, |
|
"loss": 1.6089, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.311961981843988e-06, |
|
"loss": 1.5725, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.2902149653945065e-06, |
|
"loss": 1.6455, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.268481771239488e-06, |
|
"loss": 1.5952, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.246762510237404e-06, |
|
"loss": 1.61, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.225057293175639e-06, |
|
"loss": 1.588, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.203366230769952e-06, |
|
"loss": 1.5891, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.181689433663904e-06, |
|
"loss": 1.5863, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.160027012428279e-06, |
|
"loss": 1.5924, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 7.138379077560542e-06, |
|
"loss": 1.6264, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.116745739484261e-06, |
|
"loss": 1.5497, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.095127108548543e-06, |
|
"loss": 1.5803, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.07352329502748e-06, |
|
"loss": 1.5848, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.0519344091195815e-06, |
|
"loss": 1.5864, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.030360560947217e-06, |
|
"loss": 1.6043, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 7.008801860556042e-06, |
|
"loss": 1.5758, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.9872584179144485e-06, |
|
"loss": 1.519, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.965730342913011e-06, |
|
"loss": 1.5913, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.9442177453638945e-06, |
|
"loss": 1.5448, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.922720735000329e-06, |
|
"loss": 1.6209, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.901239421476037e-06, |
|
"loss": 1.5934, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.879773914364667e-06, |
|
"loss": 1.5571, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.858324323159244e-06, |
|
"loss": 1.5769, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 6.836890757271615e-06, |
|
"loss": 1.5702, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.815473326031865e-06, |
|
"loss": 1.5834, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.794072138687795e-06, |
|
"loss": 1.5734, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.772687304404341e-06, |
|
"loss": 1.5834, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.751318932263029e-06, |
|
"loss": 1.5761, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.729967131261406e-06, |
|
"loss": 1.6181, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.7086320103124945e-06, |
|
"loss": 1.6043, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.687313678244243e-06, |
|
"loss": 1.5647, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6660122437989425e-06, |
|
"loss": 1.6174, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.644727815632708e-06, |
|
"loss": 1.6209, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.6234605023149026e-06, |
|
"loss": 1.6036, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.602210412327585e-06, |
|
"loss": 1.6157, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.580977654064965e-06, |
|
"loss": 1.6018, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.5597623358328465e-06, |
|
"loss": 1.5748, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 6.538564565848064e-06, |
|
"loss": 1.6209, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.517384452237947e-06, |
|
"loss": 1.5568, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.496222103039764e-06, |
|
"loss": 1.5731, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.475077626200168e-06, |
|
"loss": 1.6075, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.453951129574644e-06, |
|
"loss": 1.6093, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.432842720926966e-06, |
|
"loss": 1.566, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.411752507928643e-06, |
|
"loss": 1.6165, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.390680598158365e-06, |
|
"loss": 1.6035, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.369627099101463e-06, |
|
"loss": 1.5883, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.348592118149362e-06, |
|
"loss": 1.6122, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.327575762599018e-06, |
|
"loss": 1.6047, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.3065781396523885e-06, |
|
"loss": 1.6192, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.2855993564158815e-06, |
|
"loss": 1.6037, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.2646395198997955e-06, |
|
"loss": 1.5744, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.243698737017786e-06, |
|
"loss": 1.5972, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.222777114586327e-06, |
|
"loss": 1.5967, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.2018747593241505e-06, |
|
"loss": 1.5867, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.180991777851707e-06, |
|
"loss": 1.6322, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.160128276690631e-06, |
|
"loss": 1.6352, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.139284362263185e-06, |
|
"loss": 1.5932, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.118460140891722e-06, |
|
"loss": 1.5555, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.09765571879814e-06, |
|
"loss": 1.6202, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.076871202103354e-06, |
|
"loss": 1.6198, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.056106696826729e-06, |
|
"loss": 1.5473, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.035362308885569e-06, |
|
"loss": 1.6149, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 6.014638144094554e-06, |
|
"loss": 1.5967, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.993934308165206e-06, |
|
"loss": 1.5704, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.973250906705353e-06, |
|
"loss": 1.5264, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.952588045218594e-06, |
|
"loss": 1.6055, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 5.931945829103756e-06, |
|
"loss": 1.5836, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.911324363654347e-06, |
|
"loss": 1.5851, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.89072375405804e-06, |
|
"loss": 1.595, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8701441053961185e-06, |
|
"loss": 1.5949, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.849585522642944e-06, |
|
"loss": 1.6168, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.8290481106654315e-06, |
|
"loss": 1.6349, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.808531974222496e-06, |
|
"loss": 1.5903, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.788037217964542e-06, |
|
"loss": 1.5821, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.767563946432903e-06, |
|
"loss": 1.601, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.747112264059331e-06, |
|
"loss": 1.5998, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.7266822751654475e-06, |
|
"loss": 1.5731, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.70627408396222e-06, |
|
"loss": 1.5896, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.6858877945494275e-06, |
|
"loss": 1.5941, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.665523510915139e-06, |
|
"loss": 1.5345, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 5.645181336935164e-06, |
|
"loss": 1.5967, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.624861376372534e-06, |
|
"loss": 1.6056, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.604563732876989e-06, |
|
"loss": 1.6051, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.5842885099843994e-06, |
|
"loss": 1.5751, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.564035811116305e-06, |
|
"loss": 1.5835, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.543805739579334e-06, |
|
"loss": 1.6005, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.523598398564696e-06, |
|
"loss": 1.6028, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.503413891147667e-06, |
|
"loss": 1.6049, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.483252320287043e-06, |
|
"loss": 1.595, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.46311378882461e-06, |
|
"loss": 1.6009, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.4429983994846604e-06, |
|
"loss": 1.582, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.422906254873422e-06, |
|
"loss": 1.6157, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.402837457478556e-06, |
|
"loss": 1.6068, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.382792109668642e-06, |
|
"loss": 1.5676, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.362770313692638e-06, |
|
"loss": 1.5958, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 5.342772171679364e-06, |
|
"loss": 1.6232, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.322797785636995e-06, |
|
"loss": 1.6241, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.3028472574525156e-06, |
|
"loss": 1.5988, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.282920688891222e-06, |
|
"loss": 1.6259, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.2630181815962e-06, |
|
"loss": 1.6143, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.243139837087795e-06, |
|
"loss": 1.5765, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.223285756763101e-06, |
|
"loss": 1.6094, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.203456041895443e-06, |
|
"loss": 1.618, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.183650793633865e-06, |
|
"loss": 1.5945, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1638701130026005e-06, |
|
"loss": 1.6124, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.144114100900583e-06, |
|
"loss": 1.5983, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.1243828581008984e-06, |
|
"loss": 1.6273, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.104676485250294e-06, |
|
"loss": 1.6211, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.084995082868658e-06, |
|
"loss": 1.6095, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 5.065338751348505e-06, |
|
"loss": 1.6219, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.045707590954474e-06, |
|
"loss": 1.5269, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.026101701822799e-06, |
|
"loss": 1.5721, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 5.006521183960813e-06, |
|
"loss": 1.604, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.986966137246428e-06, |
|
"loss": 1.4867, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.967436661427649e-06, |
|
"loss": 1.5843, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.947932856122016e-06, |
|
"loss": 1.5576, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.9284548208161556e-06, |
|
"loss": 1.605, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.909002654865229e-06, |
|
"loss": 1.6193, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.889576457492441e-06, |
|
"loss": 1.6165, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.870176327788546e-06, |
|
"loss": 1.5918, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.850802364711321e-06, |
|
"loss": 1.5826, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.831454667085059e-06, |
|
"loss": 1.5986, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.812133333600102e-06, |
|
"loss": 1.572, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 4.79283846281229e-06, |
|
"loss": 1.5432, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.773570153142486e-06, |
|
"loss": 1.605, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.7543285028760775e-06, |
|
"loss": 1.5659, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.735113610162451e-06, |
|
"loss": 1.5894, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.715925573014515e-06, |
|
"loss": 1.5702, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.696764489308187e-06, |
|
"loss": 1.5856, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.677630456781899e-06, |
|
"loss": 1.5426, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.658523573036096e-06, |
|
"loss": 1.5842, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.639443935532747e-06, |
|
"loss": 1.5914, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.620391641594837e-06, |
|
"loss": 1.5885, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.601366788405872e-06, |
|
"loss": 1.598, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.58236947300939e-06, |
|
"loss": 1.559, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.563399792308456e-06, |
|
"loss": 1.6086, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.544457843065187e-06, |
|
"loss": 1.5469, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.525543721900228e-06, |
|
"loss": 1.6074, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 4.50665752529229e-06, |
|
"loss": 1.599, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.487799349577633e-06, |
|
"loss": 1.5955, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.468969290949601e-06, |
|
"loss": 1.5805, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.450167445458092e-06, |
|
"loss": 1.6176, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.4313939090091194e-06, |
|
"loss": 1.5745, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.412648777364278e-06, |
|
"loss": 1.5866, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.393932146140273e-06, |
|
"loss": 1.6004, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.375244110808444e-06, |
|
"loss": 1.5695, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.35658476669426e-06, |
|
"loss": 1.5792, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.33795420897683e-06, |
|
"loss": 1.5809, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.319352532688444e-06, |
|
"loss": 1.6275, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.300779832714059e-06, |
|
"loss": 1.5547, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.282236203790829e-06, |
|
"loss": 1.6343, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.263721740507629e-06, |
|
"loss": 1.5566, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 4.245236537304553e-06, |
|
"loss": 1.5637, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.226780688472446e-06, |
|
"loss": 1.5816, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.208354288152421e-06, |
|
"loss": 1.5788, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.189957430335376e-06, |
|
"loss": 1.5621, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.171590208861515e-06, |
|
"loss": 1.5461, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.153252717419877e-06, |
|
"loss": 1.6421, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.134945049547846e-06, |
|
"loss": 1.575, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.11666729863068e-06, |
|
"loss": 1.6033, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.098419557901036e-06, |
|
"loss": 1.5678, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.080201920438485e-06, |
|
"loss": 1.5998, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.062014479169058e-06, |
|
"loss": 1.5732, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.043857326864753e-06, |
|
"loss": 1.6064, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.025730556143062e-06, |
|
"loss": 1.589, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 4.007634259466506e-06, |
|
"loss": 1.5945, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.989568529142176e-06, |
|
"loss": 1.6276, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 3.971533457321221e-06, |
|
"loss": 1.6103, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.953529135998427e-06, |
|
"loss": 1.6183, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.935555657011719e-06, |
|
"loss": 1.5439, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.917613112041691e-06, |
|
"loss": 1.5646, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.899701592611159e-06, |
|
"loss": 1.5469, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.881821190084677e-06, |
|
"loss": 1.6245, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8639719956680624e-06, |
|
"loss": 1.6034, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.846154100407964e-06, |
|
"loss": 1.5488, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.828367595191367e-06, |
|
"loss": 1.6453, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.8106125707451337e-06, |
|
"loss": 1.5813, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7928891176355643e-06, |
|
"loss": 1.5981, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7751973262678997e-06, |
|
"loss": 1.5728, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7575372868858884e-06, |
|
"loss": 1.6234, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7399090895713095e-06, |
|
"loss": 1.643, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 3.7223128242435235e-06, |
|
"loss": 1.5692, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.7047485806590055e-06, |
|
"loss": 1.5879, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6872164484109007e-06, |
|
"loss": 1.6376, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6697165169285476e-06, |
|
"loss": 1.5796, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.652248875477038e-06, |
|
"loss": 1.6097, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6348136131567537e-06, |
|
"loss": 1.6009, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.617410818902912e-06, |
|
"loss": 1.5418, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.6000405814851245e-06, |
|
"loss": 1.5588, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5827029895069252e-06, |
|
"loss": 1.5421, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5653981314053276e-06, |
|
"loss": 1.6347, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.548126095450375e-06, |
|
"loss": 1.5529, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.530886969744697e-06, |
|
"loss": 1.5782, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.5136808422230328e-06, |
|
"loss": 1.6363, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4965078006518226e-06, |
|
"loss": 1.5996, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 3.4793679326287265e-06, |
|
"loss": 1.6076, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4622613255821912e-06, |
|
"loss": 1.6104, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4451880667710102e-06, |
|
"loss": 1.6312, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4281482432838687e-06, |
|
"loss": 1.6261, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.4111419420388904e-06, |
|
"loss": 1.5681, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3941692497832236e-06, |
|
"loss": 1.593, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3772302530925726e-06, |
|
"loss": 1.6042, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.360325038370762e-06, |
|
"loss": 1.6079, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.3434536918493065e-06, |
|
"loss": 1.6029, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.326616299586961e-06, |
|
"loss": 1.5881, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.30981294746927e-06, |
|
"loss": 1.5663, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2930437212081644e-06, |
|
"loss": 1.5707, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2763087063414922e-06, |
|
"loss": 1.5895, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.259607988232587e-06, |
|
"loss": 1.6141, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2429416520698563e-06, |
|
"loss": 1.5599, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 3.2263097828663127e-06, |
|
"loss": 1.5824, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.209712465459165e-06, |
|
"loss": 1.5799, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1931497845093753e-06, |
|
"loss": 1.6093, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.176621824501227e-06, |
|
"loss": 1.6141, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1601286697418985e-06, |
|
"loss": 1.6184, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1436704043610345e-06, |
|
"loss": 1.5944, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1272471123103077e-06, |
|
"loss": 1.553, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.1108588773629976e-06, |
|
"loss": 1.5499, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0945057831135593e-06, |
|
"loss": 1.5646, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.078187912977201e-06, |
|
"loss": 1.5818, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0619053501894614e-06, |
|
"loss": 1.6253, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.045658177805775e-06, |
|
"loss": 1.5251, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.0294464787010515e-06, |
|
"loss": 1.5629, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 3.013270335569265e-06, |
|
"loss": 1.6413, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 2.99712983092302e-06, |
|
"loss": 1.5881, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.981025047093118e-06, |
|
"loss": 1.5706, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.964956066228177e-06, |
|
"loss": 1.5527, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9489229702941736e-06, |
|
"loss": 1.5805, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.932925841074039e-06, |
|
"loss": 1.6174, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.9169647601672524e-06, |
|
"loss": 1.5999, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.901039808989409e-06, |
|
"loss": 1.4965, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8851510687717998e-06, |
|
"loss": 1.6367, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8692986205610275e-06, |
|
"loss": 1.6031, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.853482545218562e-06, |
|
"loss": 1.6195, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.837702923420339e-06, |
|
"loss": 1.5977, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8219598356563573e-06, |
|
"loss": 1.6094, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.8062533622302512e-06, |
|
"loss": 1.5389, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.7905835832588925e-06, |
|
"loss": 1.576, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 2.77495057867198e-06, |
|
"loss": 1.6058, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.759354428211628e-06, |
|
"loss": 1.5677, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7437952114319622e-06, |
|
"loss": 1.5902, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.728273007698722e-06, |
|
"loss": 1.5861, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.7127878961888378e-06, |
|
"loss": 1.5793, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6973399558900404e-06, |
|
"loss": 1.6068, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6819292656004547e-06, |
|
"loss": 1.6245, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6665559039281963e-06, |
|
"loss": 1.5969, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6512199492909774e-06, |
|
"loss": 1.5975, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.635921479915694e-06, |
|
"loss": 1.5819, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6206605738380364e-06, |
|
"loss": 1.5847, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.6054373089020946e-06, |
|
"loss": 1.6426, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.590251762759951e-06, |
|
"loss": 1.5668, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.57510401287128e-06, |
|
"loss": 1.5983, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.55999413650298e-06, |
|
"loss": 1.5819, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.5449222107287476e-06, |
|
"loss": 1.5968, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.5298883124287e-06, |
|
"loss": 1.5801, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.514892518288988e-06, |
|
"loss": 1.6173, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.499934904801393e-06, |
|
"loss": 1.6081, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4850155482629313e-06, |
|
"loss": 1.598, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4701345247754914e-06, |
|
"loss": 1.5683, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.455291910245422e-06, |
|
"loss": 1.5488, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4404877803831485e-06, |
|
"loss": 1.6097, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.4257222107028e-06, |
|
"loss": 1.5933, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.410995276521807e-06, |
|
"loss": 1.6003, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.396307052960526e-06, |
|
"loss": 1.5962, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.381657614941858e-06, |
|
"loss": 1.5534, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.3670470371908605e-06, |
|
"loss": 1.6086, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.352475394234368e-06, |
|
"loss": 1.5717, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 2.337942760400621e-06, |
|
"loss": 1.5774, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.323449209818873e-06, |
|
"loss": 1.6426, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.308994816419018e-06, |
|
"loss": 1.6006, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2945796539312147e-06, |
|
"loss": 1.5829, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2802037958855083e-06, |
|
"loss": 1.5573, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.265867315611463e-06, |
|
"loss": 1.6273, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.251570286237774e-06, |
|
"loss": 1.5828, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.237312780691904e-06, |
|
"loss": 1.6103, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.2230948716997137e-06, |
|
"loss": 1.5718, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.208916631785085e-06, |
|
"loss": 1.5699, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1947781332695406e-06, |
|
"loss": 1.5866, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1806794482719074e-06, |
|
"loss": 1.5929, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1666206487079134e-06, |
|
"loss": 1.573, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.152601806289838e-06, |
|
"loss": 1.5964, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 2.1386229925261516e-06, |
|
"loss": 1.6075, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1246842787211385e-06, |
|
"loss": 1.5819, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.1107857359745264e-06, |
|
"loss": 1.5952, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0969274351811565e-06, |
|
"loss": 1.618, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0831094470305847e-06, |
|
"loss": 1.5723, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.069331842006741e-06, |
|
"loss": 1.5676, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0555946903875712e-06, |
|
"loss": 1.5774, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.041898062244667e-06, |
|
"loss": 1.5715, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.028242027442915e-06, |
|
"loss": 1.5496, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0146266556401405e-06, |
|
"loss": 1.6271, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 2.0010520162867543e-06, |
|
"loss": 1.5748, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9875181786253883e-06, |
|
"loss": 1.5987, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9740252116905632e-06, |
|
"loss": 1.6, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.960573184308312e-06, |
|
"loss": 1.5824, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.947162165095842e-06, |
|
"loss": 1.6137, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 1.9337922224611815e-06, |
|
"loss": 1.6051, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9204634246028354e-06, |
|
"loss": 1.5634, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.9071758395094353e-06, |
|
"loss": 1.5418, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8939295349593855e-06, |
|
"loss": 1.5276, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8807245785205274e-06, |
|
"loss": 1.5657, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8675610375497866e-06, |
|
"loss": 1.6083, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.854438979192843e-06, |
|
"loss": 1.5652, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8413584703837618e-06, |
|
"loss": 1.5395, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8283195778446861e-06, |
|
"loss": 1.595, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8153223680854693e-06, |
|
"loss": 1.5688, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.8023669074033457e-06, |
|
"loss": 1.556, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7894532618826e-06, |
|
"loss": 1.5831, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7765814973942176e-06, |
|
"loss": 1.5989, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.763751679595548e-06, |
|
"loss": 1.5865, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 1.7509638739299873e-06, |
|
"loss": 1.5855, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7382181456266246e-06, |
|
"loss": 1.5644, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7255145596999167e-06, |
|
"loss": 1.6058, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7128531809493665e-06, |
|
"loss": 1.5988, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.7002340739591717e-06, |
|
"loss": 1.601, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6876573030979138e-06, |
|
"loss": 1.5945, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6751229325182194e-06, |
|
"loss": 1.5738, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6626310261564371e-06, |
|
"loss": 1.5952, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6501816477323073e-06, |
|
"loss": 1.5879, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.637774860748651e-06, |
|
"loss": 1.5641, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6254107284910247e-06, |
|
"loss": 1.5667, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6130893140274107e-06, |
|
"loss": 1.6079, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.6008106802078938e-06, |
|
"loss": 1.5928, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5885748896643383e-06, |
|
"loss": 1.6098, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 1.5763820048100765e-06, |
|
"loss": 1.5731, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5642320878395744e-06, |
|
"loss": 1.5771, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5521252007281284e-06, |
|
"loss": 1.6423, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5400614052315466e-06, |
|
"loss": 1.6233, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5280407628858273e-06, |
|
"loss": 1.5793, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.516063335006851e-06, |
|
"loss": 1.6072, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.5041291826900717e-06, |
|
"loss": 1.6033, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4922383668101924e-06, |
|
"loss": 1.6111, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4803909480208646e-06, |
|
"loss": 1.5673, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.46858698675438e-06, |
|
"loss": 1.5945, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4568265432213558e-06, |
|
"loss": 1.6094, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4451096774104224e-06, |
|
"loss": 1.5437, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4334364490879416e-06, |
|
"loss": 1.5517, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.4218069177976724e-06, |
|
"loss": 1.5697, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.410221142860485e-06, |
|
"loss": 1.5776, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 1.3986791833740555e-06, |
|
"loss": 1.5871, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3871810982125632e-06, |
|
"loss": 1.5812, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3757269460263778e-06, |
|
"loss": 1.6255, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3643167852417894e-06, |
|
"loss": 1.5667, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3529506740606823e-06, |
|
"loss": 1.594, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3416286704602487e-06, |
|
"loss": 1.5762, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3303508321927027e-06, |
|
"loss": 1.536, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3191172167849643e-06, |
|
"loss": 1.6234, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.3079278815383856e-06, |
|
"loss": 1.5524, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2967828835284478e-06, |
|
"loss": 1.579, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2856822796044722e-06, |
|
"loss": 1.614, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2746261263893357e-06, |
|
"loss": 1.5966, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2636144802791739e-06, |
|
"loss": 1.5718, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2526473974430952e-06, |
|
"loss": 1.5475, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 1.2417249338228987e-06, |
|
"loss": 1.5881, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2308471451327874e-06, |
|
"loss": 1.6462, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2200140868590759e-06, |
|
"loss": 1.6194, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.2092258142599257e-06, |
|
"loss": 1.5798, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1984823823650427e-06, |
|
"loss": 1.6453, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1877838459754042e-06, |
|
"loss": 1.6049, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.177130259662993e-06, |
|
"loss": 1.5559, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1665216777704946e-06, |
|
"loss": 1.5649, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1559581544110332e-06, |
|
"loss": 1.5893, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1454397434679022e-06, |
|
"loss": 1.5951, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1349664985942755e-06, |
|
"loss": 1.599, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1245384732129427e-06, |
|
"loss": 1.6222, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1141557205160347e-06, |
|
"loss": 1.6087, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.1038182934647523e-06, |
|
"loss": 1.5711, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 1.0935262447890882e-06, |
|
"loss": 1.6361, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0832796269875757e-06, |
|
"loss": 1.5187, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0730784923270054e-06, |
|
"loss": 1.6045, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.062922892842162e-06, |
|
"loss": 1.5999, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0528128803355676e-06, |
|
"loss": 1.5537, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0427485063772013e-06, |
|
"loss": 1.5887, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0327298223042526e-06, |
|
"loss": 1.6186, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0227568792208475e-06, |
|
"loss": 1.5638, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.012829727997795e-06, |
|
"loss": 1.5997, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 1.0029484192723272e-06, |
|
"loss": 1.5386, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.931130034478343e-07, |
|
"loss": 1.5808, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.833235306936152e-07, |
|
"loss": 1.6269, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.735800509446191e-07, |
|
"loss": 1.5828, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.63882613901187e-07, |
|
"loss": 1.594, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.542312690288035e-07, |
|
"loss": 1.577, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.446260655578465e-07, |
|
"loss": 1.5923, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.35067052483325e-07, |
|
"loss": 1.5896, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.255542785646388e-07, |
|
"loss": 1.5548, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.160877923253298e-07, |
|
"loss": 1.5974, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 9.06667642052832e-07, |
|
"loss": 1.5632, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.972938757982164e-07, |
|
"loss": 1.5737, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.879665413759663e-07, |
|
"loss": 1.6042, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.78685686363715e-07, |
|
"loss": 1.6122, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.694513581020103e-07, |
|
"loss": 1.5825, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.602636036940771e-07, |
|
"loss": 1.5815, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.511224700055676e-07, |
|
"loss": 1.6098, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.420280036643246e-07, |
|
"loss": 1.5809, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.329802510601559e-07, |
|
"loss": 1.5533, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.239792583445794e-07, |
|
"loss": 1.5476, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 8.150250714305996e-07, |
|
"loss": 1.5622, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 8.06117735992471e-07, |
|
"loss": 1.5803, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.972572974654625e-07, |
|
"loss": 1.5849, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.884438010456274e-07, |
|
"loss": 1.5995, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.796772916895745e-07, |
|
"loss": 1.5516, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.709578141142349e-07, |
|
"loss": 1.5175, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.622854127966373e-07, |
|
"loss": 1.5752, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.536601319736814e-07, |
|
"loss": 1.568, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.450820156419081e-07, |
|
"loss": 1.5458, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.365511075572784e-07, |
|
"loss": 1.5921, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.280674512349517e-07, |
|
"loss": 1.5311, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.196310899490577e-07, |
|
"loss": 1.5691, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.11242066732486e-07, |
|
"loss": 1.6182, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 7.029004243766536e-07, |
|
"loss": 1.6056, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 6.946062054312952e-07, |
|
"loss": 1.5944, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.863594522042494e-07, |
|
"loss": 1.5925, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.781602067612325e-07, |
|
"loss": 1.5635, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.700085109256261e-07, |
|
"loss": 1.6183, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.619044062782754e-07, |
|
"loss": 1.6058, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.538479341572624e-07, |
|
"loss": 1.6122, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.45839135657701e-07, |
|
"loss": 1.6157, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.378780516315342e-07, |
|
"loss": 1.6203, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.29964722687314e-07, |
|
"loss": 1.5581, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.220991891899941e-07, |
|
"loss": 1.5958, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.142814912607409e-07, |
|
"loss": 1.5424, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 6.06511668776708e-07, |
|
"loss": 1.5912, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.98789761370846e-07, |
|
"loss": 1.5947, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.91115808431696e-07, |
|
"loss": 1.5994, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.834898491031893e-07, |
|
"loss": 1.5659, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 5.759119222844445e-07, |
|
"loss": 1.5669, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.683820666295769e-07, |
|
"loss": 1.6232, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.609003205474917e-07, |
|
"loss": 1.5883, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.534667222016943e-07, |
|
"loss": 1.6414, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.46081309510097e-07, |
|
"loss": 1.5704, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.387441201448195e-07, |
|
"loss": 1.5795, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.314551915320021e-07, |
|
"loss": 1.5997, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.242145608516103e-07, |
|
"loss": 1.5907, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.17022265037247e-07, |
|
"loss": 1.6073, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.0987834077597e-07, |
|
"loss": 1.5701, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 5.027828245080945e-07, |
|
"loss": 1.6031, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.957357524270134e-07, |
|
"loss": 1.5959, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.887371604790136e-07, |
|
"loss": 1.5505, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.817870843630867e-07, |
|
"loss": 1.5855, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 4.748855595307522e-07, |
|
"loss": 1.545, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.6803262118587764e-07, |
|
"loss": 1.6207, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.612283042844945e-07, |
|
"loss": 1.601, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.544726435346203e-07, |
|
"loss": 1.6025, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.477656733960889e-07, |
|
"loss": 1.5926, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.41107428080364e-07, |
|
"loss": 1.5392, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.3449794155036805e-07, |
|
"loss": 1.6043, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.2793724752031807e-07, |
|
"loss": 1.5977, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.214253794555401e-07, |
|
"loss": 1.5908, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.1496237057230493e-07, |
|
"loss": 1.5825, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.085482538376628e-07, |
|
"loss": 1.5973, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 4.021830619692679e-07, |
|
"loss": 1.5748, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.958668274352129e-07, |
|
"loss": 1.5684, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.8959958245386896e-07, |
|
"loss": 1.6402, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.833813589937119e-07, |
|
"loss": 1.592, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 3.7721218877316836e-07, |
|
"loss": 1.5599, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.7109210326045197e-07, |
|
"loss": 1.5798, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.6502113367339554e-07, |
|
"loss": 1.5656, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.589993109793022e-07, |
|
"loss": 1.6507, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.530266658947801e-07, |
|
"loss": 1.595, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.471032288855869e-07, |
|
"loss": 1.5739, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.4122903016648e-07, |
|
"loss": 1.5979, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.354040997010555e-07, |
|
"loss": 1.605, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.296284672015959e-07, |
|
"loss": 1.5475, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.23902162128924e-07, |
|
"loss": 1.6133, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.182252136922481e-07, |
|
"loss": 1.5944, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.125976508490125e-07, |
|
"loss": 1.5717, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.070195023047551e-07, |
|
"loss": 1.5824, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 3.014907965129521e-07, |
|
"loss": 1.5811, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 2.9601156167488267e-07, |
|
"loss": 1.4796, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.905818257394799e-07, |
|
"loss": 1.5754, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.8520161640318675e-07, |
|
"loss": 1.5934, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.798709611098138e-07, |
|
"loss": 1.567, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.745898870504116e-07, |
|
"loss": 1.5676, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6935842116311505e-07, |
|
"loss": 1.5823, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.6417659013301513e-07, |
|
"loss": 1.5752, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.590444203920239e-07, |
|
"loss": 1.6168, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.53961938118733e-07, |
|
"loss": 1.601, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4892916923828667e-07, |
|
"loss": 1.5519, |
|
"step": 1336 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.4394613942224755e-07, |
|
"loss": 1.5668, |
|
"step": 1337 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.390128740884634e-07, |
|
"loss": 1.5644, |
|
"step": 1338 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.3412939840093853e-07, |
|
"loss": 1.6003, |
|
"step": 1339 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.292957372697102e-07, |
|
"loss": 1.6067, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.245119153507136e-07, |
|
"loss": 1.5845, |
|
"step": 1341 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.197779570456615e-07, |
|
"loss": 1.5881, |
|
"step": 1342 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.1509388650191699e-07, |
|
"loss": 1.6118, |
|
"step": 1343 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.104597276123721e-07, |
|
"loss": 1.5527, |
|
"step": 1344 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.0587550401532818e-07, |
|
"loss": 1.5647, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 2.013412390943681e-07, |
|
"loss": 1.6079, |
|
"step": 1346 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9685695597824518e-07, |
|
"loss": 1.5682, |
|
"step": 1347 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.9242267754075782e-07, |
|
"loss": 1.5774, |
|
"step": 1348 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8803842640063853e-07, |
|
"loss": 1.5633, |
|
"step": 1349 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.8370422492143712e-07, |
|
"loss": 1.6008, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7942009521140447e-07, |
|
"loss": 1.5973, |
|
"step": 1351 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7518605912338337e-07, |
|
"loss": 1.5985, |
|
"step": 1352 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.7100213825469114e-07, |
|
"loss": 1.5474, |
|
"step": 1353 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.668683539470173e-07, |
|
"loss": 1.5802, |
|
"step": 1354 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.6278472728631035e-07, |
|
"loss": 1.6278, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 1.5875127910266462e-07, |
|
"loss": 1.5925, |
|
"step": 1356 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5476802997022812e-07, |
|
"loss": 1.5499, |
|
"step": 1357 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.5083500020708575e-07, |
|
"loss": 1.5955, |
|
"step": 1358 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4695220987515634e-07, |
|
"loss": 1.5748, |
|
"step": 1359 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.4311967878010035e-07, |
|
"loss": 1.6005, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.393374264712044e-07, |
|
"loss": 1.5821, |
|
"step": 1361 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3560547224129472e-07, |
|
"loss": 1.6163, |
|
"step": 1362 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.3192383512662832e-07, |
|
"loss": 1.6259, |
|
"step": 1363 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.282925339068042e-07, |
|
"loss": 1.5645, |
|
"step": 1364 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.2471158710466113e-07, |
|
"loss": 1.541, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.21181012986189e-07, |
|
"loss": 1.6008, |
|
"step": 1366 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1770082956042983e-07, |
|
"loss": 1.5446, |
|
"step": 1367 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1427105457939014e-07, |
|
"loss": 1.555, |
|
"step": 1368 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.1089170553794659e-07, |
|
"loss": 1.5895, |
|
"step": 1369 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 1.075627996737627e-07, |
|
"loss": 1.5976, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0428435396719449e-07, |
|
"loss": 1.5929, |
|
"step": 1371 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 1.0105638514120719e-07, |
|
"loss": 1.5855, |
|
"step": 1372 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.787890966129088e-08, |
|
"loss": 1.5982, |
|
"step": 1373 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.475194373537388e-08, |
|
"loss": 1.5565, |
|
"step": 1374 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.167550331374065e-08, |
|
"loss": 1.5263, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.864960408895173e-08, |
|
"loss": 1.5731, |
|
"step": 1376 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.567426149576286e-08, |
|
"loss": 1.5575, |
|
"step": 1377 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 8.274949071104599e-08, |
|
"loss": 1.5968, |
|
"step": 1378 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.98753066537128e-08, |
|
"loss": 1.595, |
|
"step": 1379 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.705172398463911e-08, |
|
"loss": 1.5671, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.427875710658616e-08, |
|
"loss": 1.5861, |
|
"step": 1381 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 7.155642016413166e-08, |
|
"loss": 1.5737, |
|
"step": 1382 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.888472704359661e-08, |
|
"loss": 1.5752, |
|
"step": 1383 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 6.626369137297306e-08, |
|
"loss": 1.5806, |
|
"step": 1384 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.369332652185422e-08, |
|
"loss": 1.588, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 6.117364560137007e-08, |
|
"loss": 1.5946, |
|
"step": 1386 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.870466146411624e-08, |
|
"loss": 1.5976, |
|
"step": 1387 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.628638670408859e-08, |
|
"loss": 1.612, |
|
"step": 1388 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.391883365662431e-08, |
|
"loss": 1.5435, |
|
"step": 1389 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 5.16020143983309e-08, |
|
"loss": 1.6457, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.933594074703063e-08, |
|
"loss": 1.5453, |
|
"step": 1391 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.712062426170061e-08, |
|
"loss": 1.549, |
|
"step": 1392 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.495607624240728e-08, |
|
"loss": 1.574, |
|
"step": 1393 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.284230773025533e-08, |
|
"loss": 1.5367, |
|
"step": 1394 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 4.07793295073311e-08, |
|
"loss": 1.5497, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.8767152096641504e-08, |
|
"loss": 1.5954, |
|
"step": 1396 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.680578576206739e-08, |
|
"loss": 1.5961, |
|
"step": 1397 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.489524050830806e-08, |
|
"loss": 1.5609, |
|
"step": 1398 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 3.303552608082905e-08, |
|
"loss": 1.5851, |
|
"step": 1399 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 3.122665196581554e-08, |
|
"loss": 1.6068, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.946862739012013e-08, |
|
"loss": 1.5744, |
|
"step": 1401 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7761461321220706e-08, |
|
"loss": 1.5848, |
|
"step": 1402 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.610516246717043e-08, |
|
"loss": 1.6052, |
|
"step": 1403 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.449973927655336e-08, |
|
"loss": 1.59, |
|
"step": 1404 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.2945199938445572e-08, |
|
"loss": 1.5922, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.1441552382368557e-08, |
|
"loss": 1.5977, |
|
"step": 1406 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.998880427825256e-08, |
|
"loss": 1.6205, |
|
"step": 1407 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.858696303639218e-08, |
|
"loss": 1.5525, |
|
"step": 1408 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.7236035807416397e-08, |
|
"loss": 1.5953, |
|
"step": 1409 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.5936029482246374e-08, |
|
"loss": 1.5664, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.4686950692059943e-08, |
|
"loss": 1.5835, |
|
"step": 1411 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.3488805808262729e-08, |
|
"loss": 1.5709, |
|
"step": 1412 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 1.2341600942449295e-08, |
|
"loss": 1.5872, |
|
"step": 1413 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.1245341946376499e-08, |
|
"loss": 1.6021, |
|
"step": 1414 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.0200034411933513e-08, |
|
"loss": 1.612, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.205683671109633e-09, |
|
"loss": 1.5772, |
|
"step": 1416 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 8.26229479597207e-09, |
|
"loss": 1.5805, |
|
"step": 1417 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 7.369872598635974e-09, |
|
"loss": 1.5357, |
|
"step": 1418 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 6.528421631242232e-09, |
|
"loss": 1.5783, |
|
"step": 1419 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 5.737946185933041e-09, |
|
"loss": 1.6011, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.998450294830814e-09, |
|
"loss": 1.5916, |
|
"step": 1421 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 4.309937730015978e-09, |
|
"loss": 1.5637, |
|
"step": 1422 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.6724120035114276e-09, |
|
"loss": 1.5541, |
|
"step": 1423 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 3.085876367260321e-09, |
|
"loss": 1.529, |
|
"step": 1424 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.5503338131116496e-09, |
|
"loss": 1.5896, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 2.0657870728046925e-09, |
|
"loss": 1.6168, |
|
"step": 1426 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 1.6322386179556949e-09, |
|
"loss": 1.6004, |
|
"step": 1427 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.2496906600445446e-09, |
|
"loss": 1.5373, |
|
"step": 1428 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.181451504036709e-10, |
|
"loss": 1.5639, |
|
"step": 1429 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 6.376037802069413e-10, |
|
"loss": 1.6132, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 4.0806798046300056e-10, |
|
"loss": 1.5546, |
|
"step": 1431 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.2953892200638927e-10, |
|
"loss": 1.6208, |
|
"step": 1432 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 1.0201751549421269e-10, |
|
"loss": 1.5726, |
|
"step": 1433 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 2.550441139725912e-11, |
|
"loss": 1.5562, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0, |
|
"loss": 1.5919, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1435, |
|
"total_flos": 2.764607855473459e+16, |
|
"train_loss": 1.616559085031835, |
|
"train_runtime": 55108.0061, |
|
"train_samples_per_second": 13.332, |
|
"train_steps_per_second": 0.026 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1435, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 2000, |
|
"total_flos": 2.764607855473459e+16, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|