|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 20.0, |
|
"eval_steps": 500, |
|
"global_step": 160, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 2e-05, |
|
"loss": 2.1504, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 4e-05, |
|
"loss": 2.3459, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 6e-05, |
|
"loss": 2.222, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 8e-05, |
|
"loss": 2.3004, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.0001, |
|
"loss": 2.2496, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.935483870967742e-05, |
|
"loss": 1.8725, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.870967741935484e-05, |
|
"loss": 2.1768, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 9.806451612903226e-05, |
|
"loss": 2.1073, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 9.741935483870968e-05, |
|
"loss": 1.8229, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 9.677419354838711e-05, |
|
"loss": 1.6071, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 9.612903225806452e-05, |
|
"loss": 1.8617, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 9.548387096774195e-05, |
|
"loss": 1.7659, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.483870967741936e-05, |
|
"loss": 1.533, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 9.419354838709677e-05, |
|
"loss": 1.4105, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 9.35483870967742e-05, |
|
"loss": 1.4407, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 9.35483870967742e-05, |
|
"loss": 1.3275, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 9.290322580645162e-05, |
|
"loss": 1.2213, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 9.225806451612904e-05, |
|
"loss": 1.137, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 9.161290322580646e-05, |
|
"loss": 1.4059, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 9.096774193548387e-05, |
|
"loss": 1.4018, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 9.032258064516129e-05, |
|
"loss": 1.4079, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.967741935483871e-05, |
|
"loss": 1.3643, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.903225806451614e-05, |
|
"loss": 1.394, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 8.838709677419355e-05, |
|
"loss": 1.085, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 8.774193548387098e-05, |
|
"loss": 1.2241, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 8.709677419354839e-05, |
|
"loss": 1.2023, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 8.645161290322581e-05, |
|
"loss": 1.2069, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 8.580645161290323e-05, |
|
"loss": 0.8765, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 8.516129032258064e-05, |
|
"loss": 1.0117, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 8.451612903225808e-05, |
|
"loss": 1.0511, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.387096774193549e-05, |
|
"loss": 0.9173, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 8.32258064516129e-05, |
|
"loss": 0.8711, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 4.12, |
|
"learning_rate": 8.258064516129033e-05, |
|
"loss": 0.8497, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 4.25, |
|
"learning_rate": 8.193548387096774e-05, |
|
"loss": 0.9193, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 8.129032258064517e-05, |
|
"loss": 0.768, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 8.064516129032258e-05, |
|
"loss": 0.6524, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"learning_rate": 8e-05, |
|
"loss": 0.8045, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 7.935483870967743e-05, |
|
"loss": 0.7216, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 7.870967741935484e-05, |
|
"loss": 0.5946, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 7.806451612903226e-05, |
|
"loss": 0.7398, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 7.741935483870968e-05, |
|
"loss": 0.5371, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 7.67741935483871e-05, |
|
"loss": 0.5425, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 5.38, |
|
"learning_rate": 7.612903225806451e-05, |
|
"loss": 0.4687, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 7.548387096774195e-05, |
|
"loss": 0.4539, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 7.483870967741936e-05, |
|
"loss": 0.3761, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 5.75, |
|
"learning_rate": 7.419354838709677e-05, |
|
"loss": 0.3375, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 5.88, |
|
"learning_rate": 7.35483870967742e-05, |
|
"loss": 0.2759, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 7.290322580645161e-05, |
|
"loss": 0.3232, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 7.225806451612904e-05, |
|
"loss": 0.2162, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 7.161290322580646e-05, |
|
"loss": 0.1798, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 7.096774193548388e-05, |
|
"loss": 0.1543, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 7.03225806451613e-05, |
|
"loss": 0.239, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 6.967741935483871e-05, |
|
"loss": 0.2297, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 6.903225806451613e-05, |
|
"loss": 0.2006, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 6.838709677419355e-05, |
|
"loss": 0.1463, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 6.774193548387096e-05, |
|
"loss": 0.2489, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 6.709677419354839e-05, |
|
"loss": 0.1289, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 6.645161290322582e-05, |
|
"loss": 0.1069, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 6.580645161290323e-05, |
|
"loss": 0.092, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 7.5, |
|
"learning_rate": 6.516129032258065e-05, |
|
"loss": 0.0909, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 6.451612903225807e-05, |
|
"loss": 0.1182, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 6.387096774193548e-05, |
|
"loss": 0.0968, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 6.32258064516129e-05, |
|
"loss": 0.0951, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 8.0, |
|
"learning_rate": 6.258064516129033e-05, |
|
"loss": 0.0895, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 8.12, |
|
"learning_rate": 6.193548387096774e-05, |
|
"loss": 0.0821, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 8.25, |
|
"learning_rate": 6.129032258064517e-05, |
|
"loss": 0.0888, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 6.064516129032258e-05, |
|
"loss": 0.0455, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 6e-05, |
|
"loss": 0.0482, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 5.935483870967742e-05, |
|
"loss": 0.0634, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 8.75, |
|
"learning_rate": 5.870967741935483e-05, |
|
"loss": 0.06, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 8.88, |
|
"learning_rate": 5.8064516129032266e-05, |
|
"loss": 0.0598, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 9.0, |
|
"learning_rate": 5.7419354838709685e-05, |
|
"loss": 0.0679, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 9.12, |
|
"learning_rate": 5.67741935483871e-05, |
|
"loss": 0.0327, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 9.25, |
|
"learning_rate": 5.612903225806452e-05, |
|
"loss": 0.0443, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 9.38, |
|
"learning_rate": 5.5483870967741936e-05, |
|
"loss": 0.0393, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.4838709677419355e-05, |
|
"loss": 0.0396, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 9.62, |
|
"learning_rate": 5.419354838709678e-05, |
|
"loss": 0.0483, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 9.75, |
|
"learning_rate": 5.35483870967742e-05, |
|
"loss": 0.0541, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 9.88, |
|
"learning_rate": 5.290322580645162e-05, |
|
"loss": 0.0455, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 5.225806451612903e-05, |
|
"loss": 0.0392, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 10.12, |
|
"learning_rate": 5.161290322580645e-05, |
|
"loss": 0.028, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 10.25, |
|
"learning_rate": 5.096774193548387e-05, |
|
"loss": 0.0393, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 10.38, |
|
"learning_rate": 5.032258064516129e-05, |
|
"loss": 0.0343, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 10.5, |
|
"learning_rate": 4.967741935483871e-05, |
|
"loss": 0.026, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 10.62, |
|
"learning_rate": 4.903225806451613e-05, |
|
"loss": 0.0258, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 10.75, |
|
"learning_rate": 4.8387096774193554e-05, |
|
"loss": 0.024, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 10.88, |
|
"learning_rate": 4.774193548387097e-05, |
|
"loss": 0.0457, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 11.0, |
|
"learning_rate": 4.7096774193548385e-05, |
|
"loss": 0.0348, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 11.12, |
|
"learning_rate": 4.645161290322581e-05, |
|
"loss": 0.0277, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 11.25, |
|
"learning_rate": 4.580645161290323e-05, |
|
"loss": 0.0247, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 11.38, |
|
"learning_rate": 4.516129032258064e-05, |
|
"loss": 0.0331, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 11.5, |
|
"learning_rate": 4.451612903225807e-05, |
|
"loss": 0.02, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 11.62, |
|
"learning_rate": 4.387096774193549e-05, |
|
"loss": 0.0259, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 11.75, |
|
"learning_rate": 4.322580645161291e-05, |
|
"loss": 0.0229, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 11.88, |
|
"learning_rate": 4.258064516129032e-05, |
|
"loss": 0.0326, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 12.0, |
|
"learning_rate": 4.1935483870967746e-05, |
|
"loss": 0.0237, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 12.12, |
|
"learning_rate": 4.1290322580645165e-05, |
|
"loss": 0.0259, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 12.25, |
|
"learning_rate": 4.0645161290322584e-05, |
|
"loss": 0.0216, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 12.38, |
|
"learning_rate": 4e-05, |
|
"loss": 0.0265, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 12.5, |
|
"learning_rate": 3.935483870967742e-05, |
|
"loss": 0.0229, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 12.62, |
|
"learning_rate": 3.870967741935484e-05, |
|
"loss": 0.0185, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 12.75, |
|
"learning_rate": 3.8064516129032254e-05, |
|
"loss": 0.0228, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 12.88, |
|
"learning_rate": 3.741935483870968e-05, |
|
"loss": 0.0246, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 13.0, |
|
"learning_rate": 3.67741935483871e-05, |
|
"loss": 0.0205, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 13.12, |
|
"learning_rate": 3.612903225806452e-05, |
|
"loss": 0.0197, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 13.25, |
|
"learning_rate": 3.548387096774194e-05, |
|
"loss": 0.0213, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 13.38, |
|
"learning_rate": 3.483870967741936e-05, |
|
"loss": 0.0173, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 13.5, |
|
"learning_rate": 3.4193548387096776e-05, |
|
"loss": 0.0244, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 13.62, |
|
"learning_rate": 3.3548387096774195e-05, |
|
"loss": 0.0193, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 13.75, |
|
"learning_rate": 3.2903225806451614e-05, |
|
"loss": 0.0246, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 13.88, |
|
"learning_rate": 3.2258064516129034e-05, |
|
"loss": 0.0276, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 14.0, |
|
"learning_rate": 3.161290322580645e-05, |
|
"loss": 0.0234, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 14.12, |
|
"learning_rate": 3.096774193548387e-05, |
|
"loss": 0.0218, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 14.25, |
|
"learning_rate": 3.032258064516129e-05, |
|
"loss": 0.0212, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 14.38, |
|
"learning_rate": 2.967741935483871e-05, |
|
"loss": 0.0177, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 14.5, |
|
"learning_rate": 2.9032258064516133e-05, |
|
"loss": 0.0221, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 14.62, |
|
"learning_rate": 2.838709677419355e-05, |
|
"loss": 0.0211, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 14.75, |
|
"learning_rate": 2.7741935483870968e-05, |
|
"loss": 0.0235, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 14.88, |
|
"learning_rate": 2.709677419354839e-05, |
|
"loss": 0.0205, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 15.0, |
|
"learning_rate": 2.645161290322581e-05, |
|
"loss": 0.0205, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 15.12, |
|
"learning_rate": 2.5806451612903226e-05, |
|
"loss": 0.0194, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 15.25, |
|
"learning_rate": 2.5161290322580645e-05, |
|
"loss": 0.0209, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 15.38, |
|
"learning_rate": 2.4516129032258064e-05, |
|
"loss": 0.0186, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 15.5, |
|
"learning_rate": 2.3870967741935486e-05, |
|
"loss": 0.019, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 15.62, |
|
"learning_rate": 2.3225806451612906e-05, |
|
"loss": 0.0199, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 15.75, |
|
"learning_rate": 2.258064516129032e-05, |
|
"loss": 0.0253, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 15.88, |
|
"learning_rate": 2.1935483870967744e-05, |
|
"loss": 0.0238, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 16.0, |
|
"learning_rate": 2.129032258064516e-05, |
|
"loss": 0.0243, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 16.12, |
|
"learning_rate": 2.0645161290322582e-05, |
|
"loss": 0.0211, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 16.25, |
|
"learning_rate": 2e-05, |
|
"loss": 0.0193, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 16.38, |
|
"learning_rate": 1.935483870967742e-05, |
|
"loss": 0.0172, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 16.5, |
|
"learning_rate": 1.870967741935484e-05, |
|
"loss": 0.018, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 16.62, |
|
"learning_rate": 1.806451612903226e-05, |
|
"loss": 0.0208, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 16.75, |
|
"learning_rate": 1.741935483870968e-05, |
|
"loss": 0.0221, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 16.88, |
|
"learning_rate": 1.6774193548387098e-05, |
|
"loss": 0.023, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 17.0, |
|
"learning_rate": 1.6129032258064517e-05, |
|
"loss": 0.0242, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 17.12, |
|
"learning_rate": 1.5483870967741936e-05, |
|
"loss": 0.0197, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 17.25, |
|
"learning_rate": 1.4838709677419355e-05, |
|
"loss": 0.0173, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 17.38, |
|
"learning_rate": 1.4193548387096774e-05, |
|
"loss": 0.0196, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 17.5, |
|
"learning_rate": 1.3548387096774195e-05, |
|
"loss": 0.0219, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 17.62, |
|
"learning_rate": 1.2903225806451613e-05, |
|
"loss": 0.0212, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 17.75, |
|
"learning_rate": 1.2258064516129032e-05, |
|
"loss": 0.0228, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 17.88, |
|
"learning_rate": 1.1612903225806453e-05, |
|
"loss": 0.0208, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 18.0, |
|
"learning_rate": 1.0967741935483872e-05, |
|
"loss": 0.0188, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 18.12, |
|
"learning_rate": 1.0322580645161291e-05, |
|
"loss": 0.0198, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 18.25, |
|
"learning_rate": 9.67741935483871e-06, |
|
"loss": 0.0165, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 18.38, |
|
"learning_rate": 9.03225806451613e-06, |
|
"loss": 0.0219, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 18.5, |
|
"learning_rate": 8.387096774193549e-06, |
|
"loss": 0.0216, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 18.62, |
|
"learning_rate": 7.741935483870968e-06, |
|
"loss": 0.0197, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 18.75, |
|
"learning_rate": 7.096774193548387e-06, |
|
"loss": 0.0179, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 18.88, |
|
"learning_rate": 6.451612903225806e-06, |
|
"loss": 0.0209, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 19.0, |
|
"learning_rate": 5.806451612903226e-06, |
|
"loss": 0.0203, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 19.12, |
|
"learning_rate": 5.161290322580646e-06, |
|
"loss": 0.0213, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 19.25, |
|
"learning_rate": 4.516129032258065e-06, |
|
"loss": 0.0216, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 19.38, |
|
"learning_rate": 3.870967741935484e-06, |
|
"loss": 0.0179, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 19.5, |
|
"learning_rate": 3.225806451612903e-06, |
|
"loss": 0.0224, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 19.62, |
|
"learning_rate": 2.580645161290323e-06, |
|
"loss": 0.0183, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 19.75, |
|
"learning_rate": 1.935483870967742e-06, |
|
"loss": 0.0199, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 19.88, |
|
"learning_rate": 1.2903225806451614e-06, |
|
"loss": 0.0186, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"learning_rate": 6.451612903225807e-07, |
|
"loss": 0.0233, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 20.0, |
|
"step": 160, |
|
"total_flos": 1.209918701961216e+16, |
|
"train_loss": 0.3965469028917141, |
|
"train_runtime": 374.4336, |
|
"train_samples_per_second": 1.709, |
|
"train_steps_per_second": 0.427 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 160, |
|
"num_train_epochs": 20, |
|
"save_steps": 500, |
|
"total_flos": 1.209918701961216e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|