|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 2.2345, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4e-05, |
|
"loss": 1.9907, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6e-05, |
|
"loss": 2.0186, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8e-05, |
|
"loss": 2.1185, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001, |
|
"loss": 2.0802, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.935483870967742e-05, |
|
"loss": 2.1074, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.870967741935484e-05, |
|
"loss": 2.0332, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.806451612903226e-05, |
|
"loss": 1.4836, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.741935483870968e-05, |
|
"loss": 1.8662, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.677419354838711e-05, |
|
"loss": 1.4542, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.612903225806452e-05, |
|
"loss": 1.4218, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.548387096774195e-05, |
|
"loss": 1.265, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.483870967741936e-05, |
|
"loss": 1.2859, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.419354838709677e-05, |
|
"loss": 1.3553, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.35483870967742e-05, |
|
"loss": 1.2128, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.290322580645162e-05, |
|
"loss": 1.3138, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.225806451612904e-05, |
|
"loss": 1.1788, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.161290322580646e-05, |
|
"loss": 0.9879, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.096774193548387e-05, |
|
"loss": 1.1457, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.032258064516129e-05, |
|
"loss": 1.0302, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.967741935483871e-05, |
|
"loss": 1.1569, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.903225806451614e-05, |
|
"loss": 0.931, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.838709677419355e-05, |
|
"loss": 0.9473, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.774193548387098e-05, |
|
"loss": 0.9048, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.709677419354839e-05, |
|
"loss": 1.0565, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.645161290322581e-05, |
|
"loss": 0.8026, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.580645161290323e-05, |
|
"loss": 0.6194, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.516129032258064e-05, |
|
"loss": 0.524, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.451612903225808e-05, |
|
"loss": 0.8018, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.387096774193549e-05, |
|
"loss": 0.7436, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.32258064516129e-05, |
|
"loss": 0.4924, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.258064516129033e-05, |
|
"loss": 0.6367, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.193548387096774e-05, |
|
"loss": 0.3933, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.129032258064517e-05, |
|
"loss": 0.4873, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.064516129032258e-05, |
|
"loss": 0.1927, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8e-05, |
|
"loss": 0.4483, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 7.935483870967743e-05, |
|
"loss": 0.4607, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 7.870967741935484e-05, |
|
"loss": 0.386, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.806451612903226e-05, |
|
"loss": 0.4068, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 7.741935483870968e-05, |
|
"loss": 0.4303, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 7.67741935483871e-05, |
|
"loss": 0.2937, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 7.612903225806451e-05, |
|
"loss": 0.1873, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 7.548387096774195e-05, |
|
"loss": 0.1604, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 7.483870967741936e-05, |
|
"loss": 0.2287, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 7.419354838709677e-05, |
|
"loss": 0.1159, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 7.35483870967742e-05, |
|
"loss": 0.1274, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 7.290322580645161e-05, |
|
"loss": 0.1719, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.225806451612904e-05, |
|
"loss": 0.126, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 7.161290322580646e-05, |
|
"loss": 0.0905, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 7.096774193548388e-05, |
|
"loss": 0.1262, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 7.03225806451613e-05, |
|
"loss": 0.0789, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 6.967741935483871e-05, |
|
"loss": 0.0773, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 6.903225806451613e-05, |
|
"loss": 0.1051, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 6.838709677419355e-05, |
|
"loss": 0.0453, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 6.774193548387096e-05, |
|
"loss": 0.1433, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.709677419354839e-05, |
|
"loss": 0.1128, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 6.645161290322582e-05, |
|
"loss": 0.078, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 6.580645161290323e-05, |
|
"loss": 0.0723, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 6.516129032258065e-05, |
|
"loss": 0.0853, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.451612903225807e-05, |
|
"loss": 0.0614, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 6.387096774193548e-05, |
|
"loss": 0.0603, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 6.32258064516129e-05, |
|
"loss": 0.0463, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 6.258064516129033e-05, |
|
"loss": 0.0757, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.193548387096774e-05, |
|
"loss": 0.0543, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.129032258064517e-05, |
|
"loss": 0.0434, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 6.064516129032258e-05, |
|
"loss": 0.067, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6e-05, |
|
"loss": 0.0439, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 5.935483870967742e-05, |
|
"loss": 0.0539, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 5.870967741935483e-05, |
|
"loss": 0.0502, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 5.8064516129032266e-05, |
|
"loss": 0.0369, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.7419354838709685e-05, |
|
"loss": 0.0721, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.67741935483871e-05, |
|
"loss": 0.0808, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 5.612903225806452e-05, |
|
"loss": 0.027, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 5.5483870967741936e-05, |
|
"loss": 0.0477, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 5.4838709677419355e-05, |
|
"loss": 0.0591, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.419354838709678e-05, |
|
"loss": 0.0661, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 5.35483870967742e-05, |
|
"loss": 0.0438, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 5.290322580645162e-05, |
|
"loss": 0.0669, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.225806451612903e-05, |
|
"loss": 0.0657, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.161290322580645e-05, |
|
"loss": 0.0384, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 5.096774193548387e-05, |
|
"loss": 0.0416, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 5.032258064516129e-05, |
|
"loss": 0.0354, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 4.967741935483871e-05, |
|
"loss": 0.0451, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.903225806451613e-05, |
|
"loss": 0.0482, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.0596, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.774193548387097e-05, |
|
"loss": 0.0475, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.7096774193548385e-05, |
|
"loss": 0.0441, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.645161290322581e-05, |
|
"loss": 0.0459, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.580645161290323e-05, |
|
"loss": 0.0426, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 0.0352, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.451612903225807e-05, |
|
"loss": 0.032, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.387096774193549e-05, |
|
"loss": 0.0408, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.322580645161291e-05, |
|
"loss": 0.0379, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 4.258064516129032e-05, |
|
"loss": 0.0516, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 0.0367, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.1290322580645165e-05, |
|
"loss": 0.0503, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 4.0645161290322584e-05, |
|
"loss": 0.0306, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 4e-05, |
|
"loss": 0.031, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 3.935483870967742e-05, |
|
"loss": 0.0339, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 0.046, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 3.8064516129032254e-05, |
|
"loss": 0.0437, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 3.741935483870968e-05, |
|
"loss": 0.0336, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 3.67741935483871e-05, |
|
"loss": 0.0462, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.612903225806452e-05, |
|
"loss": 0.0349, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.0558, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 3.483870967741936e-05, |
|
"loss": 0.0353, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 3.4193548387096776e-05, |
|
"loss": 0.0334, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 3.3548387096774195e-05, |
|
"loss": 0.0323, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 3.2903225806451614e-05, |
|
"loss": 0.03, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.035, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 3.161290322580645e-05, |
|
"loss": 0.0351, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.096774193548387e-05, |
|
"loss": 0.0429, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 3.032258064516129e-05, |
|
"loss": 0.0369, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 2.967741935483871e-05, |
|
"loss": 0.0333, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 0.0319, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.838709677419355e-05, |
|
"loss": 0.056, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 2.7741935483870968e-05, |
|
"loss": 0.0358, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 2.709677419354839e-05, |
|
"loss": 0.0273, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 2.645161290322581e-05, |
|
"loss": 0.0417, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 0.0316, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 2.5161290322580645e-05, |
|
"loss": 0.0517, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.4516129032258064e-05, |
|
"loss": 0.0384, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 2.3870967741935486e-05, |
|
"loss": 0.0256, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.3225806451612906e-05, |
|
"loss": 0.029, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 0.0408, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 2.1935483870967744e-05, |
|
"loss": 0.0354, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 2.129032258064516e-05, |
|
"loss": 0.0272, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.0645161290322582e-05, |
|
"loss": 0.0395, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0353, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.0493, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 1.870967741935484e-05, |
|
"loss": 0.0296, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.806451612903226e-05, |
|
"loss": 0.0272, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 1.741935483870968e-05, |
|
"loss": 0.0426, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 1.6774193548387098e-05, |
|
"loss": 0.0241, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.0384, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.5483870967741936e-05, |
|
"loss": 0.0298, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 1.4838709677419355e-05, |
|
"loss": 0.0322, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 1.4193548387096774e-05, |
|
"loss": 0.0284, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 1.3548387096774195e-05, |
|
"loss": 0.0291, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.2903225806451613e-05, |
|
"loss": 0.0296, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 1.2258064516129032e-05, |
|
"loss": 0.0481, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 1.1612903225806453e-05, |
|
"loss": 0.0574, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 1.0967741935483872e-05, |
|
"loss": 0.0228, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0322580645161291e-05, |
|
"loss": 0.0318, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.0395, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 0.024, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 0.0411, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 0.0319, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 7.096774193548387e-06, |
|
"loss": 0.0357, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.0238, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 0.0349, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 0.0335, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 0.029, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 0.0303, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 19.38, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.0294, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 0.0395, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 0.0309, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 0.0277, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 0.0288, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.0435, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 160, |
|
"total_flos": 8168227523788800.0, |
|
"train_loss": 0.31843259939923885, |
|
"train_runtime": 268.7126, |
|
"train_samples_per_second": 2.382, |
|
"train_steps_per_second": 0.595 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 160, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 8168227523788800.0, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|