|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 500, |
|
"global_step": 1314, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 5e-06, |
|
"loss": 1.6819, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"learning_rate": 1e-05, |
|
"loss": 1.3456, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.6309, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2e-05, |
|
"loss": 1.5144, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 2.5e-05, |
|
"loss": 1.5809, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"learning_rate": 3e-05, |
|
"loss": 1.4337, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 3.5e-05, |
|
"loss": 1.4476, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4e-05, |
|
"loss": 1.5057, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.2802, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 5e-05, |
|
"loss": 1.4187, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 1.3839, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6e-05, |
|
"loss": 1.3517, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 1.1437, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7e-05, |
|
"loss": 1.432, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 1.3195, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8e-05, |
|
"loss": 1.2114, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 8.5e-05, |
|
"loss": 1.1665, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9e-05, |
|
"loss": 1.4073, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"learning_rate": 9.5e-05, |
|
"loss": 1.2103, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.0001, |
|
"loss": 1.3239, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.000105, |
|
"loss": 1.2707, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00011000000000000002, |
|
"loss": 1.1545, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00011499999999999999, |
|
"loss": 1.3175, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 0.00012, |
|
"loss": 1.0579, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.000125, |
|
"loss": 1.3129, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00013000000000000002, |
|
"loss": 1.165, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00013500000000000003, |
|
"loss": 1.2197, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"learning_rate": 0.00014, |
|
"loss": 1.2123, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.000145, |
|
"loss": 1.2125, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00015000000000000001, |
|
"loss": 1.1781, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.000155, |
|
"loss": 1.2041, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 0.00016, |
|
"loss": 1.1366, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.000165, |
|
"loss": 1.146, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00017, |
|
"loss": 1.1651, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.000175, |
|
"loss": 1.239, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00018, |
|
"loss": 1.2837, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 0.00018500000000000002, |
|
"loss": 1.1846, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019, |
|
"loss": 1.3183, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.000195, |
|
"loss": 1.2054, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.0002, |
|
"loss": 1.2084, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 0.00019999969596003515, |
|
"loss": 1.1971, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001999987838419894, |
|
"loss": 1.202, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.00019999726365140918, |
|
"loss": 1.4853, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001999951353975384, |
|
"loss": 1.1747, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 0.0001999923990933186, |
|
"loss": 1.3454, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019998905475538873, |
|
"loss": 1.2324, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019998510240408496, |
|
"loss": 1.1027, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019998054206344076, |
|
"loss": 1.1632, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.00019997537376118668, |
|
"loss": 1.1752, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 0.0001999695975287501, |
|
"loss": 1.2875, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019996321340125519, |
|
"loss": 1.1448, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019995622141752244, |
|
"loss": 1.1917, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.0001999486216200688, |
|
"loss": 1.3177, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"learning_rate": 0.00019994041405510705, |
|
"loss": 1.272, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019993159877254577, |
|
"loss": 1.1357, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019992217582598894, |
|
"loss": 1.2682, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019991214527273558, |
|
"loss": 1.1612, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.00019990150717377948, |
|
"loss": 1.1252, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 0.0001998902615938088, |
|
"loss": 1.125, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019987840860120562, |
|
"loss": 1.2295, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.0001998659482680456, |
|
"loss": 1.2334, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019985288067009762, |
|
"loss": 1.2692, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 0.00019983920588682302, |
|
"loss": 1.2275, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019982492400137544, |
|
"loss": 1.2909, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019981003510060016, |
|
"loss": 1.1595, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019979453927503364, |
|
"loss": 1.1614, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"learning_rate": 0.00019977843661890283, |
|
"loss": 1.1937, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001997617272301248, |
|
"loss": 1.1761, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.0001997444112103059, |
|
"loss": 1.1969, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019972648866474146, |
|
"loss": 1.1487, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019970795970241482, |
|
"loss": 1.1874, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 0.00019968882443599696, |
|
"loss": 1.1191, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019966908298184552, |
|
"loss": 1.0698, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019964873546000436, |
|
"loss": 1.2601, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019962778199420265, |
|
"loss": 1.0591, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"learning_rate": 0.00019960622271185426, |
|
"loss": 1.2135, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019958405774405676, |
|
"loss": 1.0993, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019956128722559098, |
|
"loss": 1.2044, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019953791129491983, |
|
"loss": 1.2691, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.00019951393009418763, |
|
"loss": 1.2178, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 0.0001994893437692193, |
|
"loss": 1.1314, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.0001994641524695193, |
|
"loss": 1.1364, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019943835634827085, |
|
"loss": 1.2325, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019941195556233505, |
|
"loss": 1.1242, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"learning_rate": 0.00019938495027224974, |
|
"loss": 1.2255, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001993573406422287, |
|
"loss": 1.253, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00019932912684016051, |
|
"loss": 1.2712, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.00019930030903760764, |
|
"loss": 1.3722, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 0.0001992708874098054, |
|
"loss": 1.2131, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00019924086213566076, |
|
"loss": 1.2516, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00019921023339775147, |
|
"loss": 1.1578, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001991790013823246, |
|
"loss": 1.2367, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.0001991471662792959, |
|
"loss": 1.2715, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 0.00019911472828224817, |
|
"loss": 1.0971, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019908168758843035, |
|
"loss": 1.1773, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019904804439875633, |
|
"loss": 1.1332, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019901379891780355, |
|
"loss": 1.0965, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 0.00019897895135381188, |
|
"loss": 1.2134, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019894350191868246, |
|
"loss": 1.0705, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019890745082797606, |
|
"loss": 1.2053, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019887079830091224, |
|
"loss": 1.1043, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 0.00019883354456036757, |
|
"loss": 1.216, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019879568983287467, |
|
"loss": 1.2416, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019875723434862043, |
|
"loss": 1.1489, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019871817834144504, |
|
"loss": 1.2676, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019867852204884015, |
|
"loss": 1.298, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"learning_rate": 0.00019863826571194776, |
|
"loss": 1.15, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00019859740957555856, |
|
"loss": 1.2133, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00019855595388811053, |
|
"loss": 1.4566, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.0001985138989016874, |
|
"loss": 1.3723, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 0.00019847124487201704, |
|
"loss": 1.0748, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019842799205847014, |
|
"loss": 1.1737, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019838414072405826, |
|
"loss": 1.3032, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019833969113543267, |
|
"loss": 1.1347, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019829464356288233, |
|
"loss": 1.306, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 0.00019824899828033252, |
|
"loss": 1.0725, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019820275556534304, |
|
"loss": 1.3514, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019815591569910654, |
|
"loss": 1.0601, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019810847896644686, |
|
"loss": 1.1273, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 0.00019806044565581727, |
|
"loss": 1.0795, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019801181605929864, |
|
"loss": 1.3234, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019796259047259783, |
|
"loss": 1.1698, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019791276919504572, |
|
"loss": 1.1879, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 0.00019786235252959553, |
|
"loss": 1.1941, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019781134078282087, |
|
"loss": 1.2291, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019775973426491394, |
|
"loss": 1.1777, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.0001977075332896836, |
|
"loss": 1.2625, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019765473817455356, |
|
"loss": 1.133, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 0.00019760134924056024, |
|
"loss": 1.2002, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001975473668123511, |
|
"loss": 1.2641, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.00019749279121818235, |
|
"loss": 1.205, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001974376227899174, |
|
"loss": 1.1622, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"learning_rate": 0.0001973818618630242, |
|
"loss": 1.0307, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019732550877657386, |
|
"loss": 1.3657, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019726856387323815, |
|
"loss": 1.0734, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019721102749928763, |
|
"loss": 1.2233, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 0.00019715290000458946, |
|
"loss": 1.101, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001970941817426052, |
|
"loss": 1.2623, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001970348730703889, |
|
"loss": 1.1674, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.0001969749743485846, |
|
"loss": 1.0925, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019691448594142446, |
|
"loss": 1.2044, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 0.00019685340821672633, |
|
"loss": 1.1814, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019679174154589162, |
|
"loss": 1.1966, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019672948630390294, |
|
"loss": 1.1292, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019666664286932198, |
|
"loss": 1.1505, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 0.00019660321162428697, |
|
"loss": 1.0732, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019653919295451063, |
|
"loss": 0.9373, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.00019647458724927766, |
|
"loss": 1.1033, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001964093949014423, |
|
"loss": 1.1025, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001963436163074263, |
|
"loss": 1.2571, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 0.0001962772518672159, |
|
"loss": 1.2958, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019621030198436006, |
|
"loss": 1.1826, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019614276706596755, |
|
"loss": 1.0773, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.00019607464752270464, |
|
"loss": 1.0419, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"learning_rate": 0.0001960059437687926, |
|
"loss": 1.1271, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019593665622200522, |
|
"loss": 1.338, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019586678530366606, |
|
"loss": 1.1358, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.00019579633143864626, |
|
"loss": 1.2576, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 0.0001957252950553616, |
|
"loss": 1.3208, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019565367658577003, |
|
"loss": 1.2977, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019558147646536912, |
|
"loss": 1.1039, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019550869513319333, |
|
"loss": 1.1451, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019543533303181132, |
|
"loss": 1.2214, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.37, |
|
"learning_rate": 0.00019536139060732328, |
|
"loss": 1.172, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019528686830935828, |
|
"loss": 1.2827, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019521176659107142, |
|
"loss": 1.1607, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.00019513608590914123, |
|
"loss": 1.1205, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 0.0001950598267237667, |
|
"loss": 1.1986, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019498298949866468, |
|
"loss": 1.26, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019490557470106686, |
|
"loss": 1.198, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019482758280171716, |
|
"loss": 1.3148, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.00019474901427486856, |
|
"loss": 1.0349, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 0.0001946698695982806, |
|
"loss": 1.277, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019459014925321614, |
|
"loss": 1.0323, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019450985372443857, |
|
"loss": 1.2584, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019442898350020892, |
|
"loss": 1.3343, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"learning_rate": 0.00019434753907228279, |
|
"loss": 1.1134, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.0001942655209359074, |
|
"loss": 1.0455, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019418292958981855, |
|
"loss": 1.2267, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019409976553623766, |
|
"loss": 1.0701, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 0.00019401602928086864, |
|
"loss": 1.1027, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001939317213328949, |
|
"loss": 1.1847, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.00019384684220497605, |
|
"loss": 1.177, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001937613924132451, |
|
"loss": 1.1607, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.0001936753724773051, |
|
"loss": 1.3098, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.42, |
|
"learning_rate": 0.000193588782920226, |
|
"loss": 1.0301, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001935016242685415, |
|
"loss": 1.2378, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019341389705224585, |
|
"loss": 1.1978, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.0001933256018047907, |
|
"loss": 1.0652, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 0.00019323673906308168, |
|
"loss": 1.1856, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019314730936747533, |
|
"loss": 1.1039, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019305731326177566, |
|
"loss": 1.2479, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.0001929667512932309, |
|
"loss": 1.0944, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 0.00019287562401253022, |
|
"loss": 1.1903, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019278393197380038, |
|
"loss": 1.1799, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.0001926916757346022, |
|
"loss": 1.0663, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019259885585592735, |
|
"loss": 1.0972, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019250547290219493, |
|
"loss": 0.9872, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 0.00019241152744124787, |
|
"loss": 1.2429, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019231702004434976, |
|
"loss": 1.1506, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019222195128618106, |
|
"loss": 1.2485, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.00019212632174483578, |
|
"loss": 1.2173, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.46, |
|
"learning_rate": 0.000192030132001818, |
|
"loss": 1.1646, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019193338264203823, |
|
"loss": 1.2385, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019183607425380994, |
|
"loss": 1.0444, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019173820742884588, |
|
"loss": 1.1717, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019163978276225455, |
|
"loss": 1.1795, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 0.00019154080085253666, |
|
"loss": 1.1373, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019144126230158127, |
|
"loss": 0.9912, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019134116771466234, |
|
"loss": 1.1482, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.000191240517700435, |
|
"loss": 1.1779, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.48, |
|
"learning_rate": 0.00019113931287093177, |
|
"loss": 1.1486, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0001910375538415589, |
|
"loss": 1.2069, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.00019093524123109262, |
|
"loss": 1.2227, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0001908323756616754, |
|
"loss": 1.1186, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 0.0001907289577588121, |
|
"loss": 0.9993, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019062498815136626, |
|
"loss": 1.037, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019052046747155616, |
|
"loss": 1.1127, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0001904153963549511, |
|
"loss": 1.0655, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.00019030977544046742, |
|
"loss": 1.0131, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 0.0001902036053703647, |
|
"loss": 1.1767, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.0001900968867902419, |
|
"loss": 1.1909, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018998962034903326, |
|
"loss": 1.1264, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018988180669900442, |
|
"loss": 1.0607, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"learning_rate": 0.00018977344649574862, |
|
"loss": 0.9862, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018966454039818245, |
|
"loss": 1.2387, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018955508906854206, |
|
"loss": 1.1928, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018944509317237905, |
|
"loss": 1.1949, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 0.00018933455337855632, |
|
"loss": 1.2275, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001892234703592442, |
|
"loss": 1.1021, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018911184478991627, |
|
"loss": 1.4385, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018899967734934515, |
|
"loss": 1.1383, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.0001888869687195986, |
|
"loss": 1.0239, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.53, |
|
"learning_rate": 0.00018877371958603514, |
|
"loss": 1.2352, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018865993063730004, |
|
"loss": 1.1515, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.000188545602565321, |
|
"loss": 1.0593, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.00018843073606530415, |
|
"loss": 1.096, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 0.0001883153318357296, |
|
"loss": 1.2894, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001881993905783473, |
|
"loss": 1.0143, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018808291299817278, |
|
"loss": 1.2318, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018796589980348282, |
|
"loss": 1.0275, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.00018784835170581116, |
|
"loss": 1.1465, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 0.0001877302694199442, |
|
"loss": 1.147, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018761165366391663, |
|
"loss": 1.0989, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018749250515900706, |
|
"loss": 1.0905, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018737282462973363, |
|
"loss": 0.9672, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 0.00018725261280384958, |
|
"loss": 1.1728, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018713187041233896, |
|
"loss": 1.1453, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018701059818941195, |
|
"loss": 1.1036, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018688879687250067, |
|
"loss": 1.0362, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 0.00018676646720225444, |
|
"loss": 1.1521, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00018664360992253542, |
|
"loss": 0.9757, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00018652022578041414, |
|
"loss": 1.1481, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00018639631552616468, |
|
"loss": 1.0764, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00018627187991326056, |
|
"loss": 1.153, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.58, |
|
"learning_rate": 0.00018614691969836967, |
|
"loss": 1.1499, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001860214356413501, |
|
"loss": 1.0191, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.0001858954285052451, |
|
"loss": 1.1723, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00018576889905627883, |
|
"loss": 1.0602, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 0.00018564184806385146, |
|
"loss": 1.1461, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00018551427630053463, |
|
"loss": 1.1267, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00018538618454206655, |
|
"loss": 1.0706, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00018525757356734754, |
|
"loss": 1.1054, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 0.00018512844415843514, |
|
"loss": 1.2113, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00018499879710053932, |
|
"loss": 1.0667, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00018486863318201784, |
|
"loss": 1.1547, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00018473795319437136, |
|
"loss": 1.1461, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00018460675793223867, |
|
"loss": 1.1663, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 0.00018447504819339182, |
|
"loss": 1.0636, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00018434282477873133, |
|
"loss": 1.2808, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00018421008849228118, |
|
"loss": 1.1668, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00018407684014118414, |
|
"loss": 1.0716, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 0.00018394308053569665, |
|
"loss": 1.1568, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00018380881048918405, |
|
"loss": 1.1537, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00018367403081811557, |
|
"loss": 1.1852, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00018353874234205931, |
|
"loss": 1.164, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00018340294588367734, |
|
"loss": 1.2054, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 0.00018326664226872065, |
|
"loss": 1.1812, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018312983232602418, |
|
"loss": 1.1312, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018299251688750175, |
|
"loss": 1.1017, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.000182854696788141, |
|
"loss": 1.1522, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.64, |
|
"learning_rate": 0.00018271637286599823, |
|
"loss": 1.0512, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.0001825775459621935, |
|
"loss": 1.1148, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018243821692090533, |
|
"loss": 1.0796, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018229838658936564, |
|
"loss": 1.2327, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 0.00018215805581785465, |
|
"loss": 1.1625, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.0001820172254596956, |
|
"loss": 1.09, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00018187589637124962, |
|
"loss": 1.2245, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00018173406941191056, |
|
"loss": 1.1396, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00018159174544409964, |
|
"loss": 1.1925, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 0.00018144892533326042, |
|
"loss": 1.194, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00018130560994785325, |
|
"loss": 0.9423, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00018116180015935029, |
|
"loss": 0.9744, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00018101749684222996, |
|
"loss": 1.059, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 0.00018087270087397181, |
|
"loss": 1.0716, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.000180727413135051, |
|
"loss": 1.1569, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00018058163450893319, |
|
"loss": 0.9845, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00018043536588206887, |
|
"loss": 1.1119, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00018028860814388827, |
|
"loss": 1.2018, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 0.00018014136218679567, |
|
"loss": 1.1423, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00017999362890616424, |
|
"loss": 1.0031, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001798454092003304, |
|
"loss": 0.9347, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.0001796967039705884, |
|
"loss": 1.4172, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.69, |
|
"learning_rate": 0.00017954751412118495, |
|
"loss": 1.0023, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00017939784055931354, |
|
"loss": 1.1181, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00017924768419510904, |
|
"loss": 0.972, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00017909704594164222, |
|
"loss": 1.0207, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 0.00017894592671491405, |
|
"loss": 0.9375, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001787943274338502, |
|
"loss": 1.1102, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00017864224902029547, |
|
"loss": 1.2381, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.0001784896923990082, |
|
"loss": 1.0415, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.00017833665849765457, |
|
"loss": 1.0233, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 0.000178183148246803, |
|
"loss": 1.0763, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00017802916257991853, |
|
"loss": 1.2626, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.0001778747024333571, |
|
"loss": 0.9793, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00017771976874635985, |
|
"loss": 1.0299, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 0.00017756436246104743, |
|
"loss": 1.1348, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001774084845224143, |
|
"loss": 1.0651, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00017725213587832292, |
|
"loss": 1.0831, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.00017709531747949796, |
|
"loss": 1.0118, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.73, |
|
"learning_rate": 0.0001769380302795207, |
|
"loss": 1.0746, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017678027523482297, |
|
"loss": 1.2129, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017662205330468158, |
|
"loss": 1.0312, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001764633654512123, |
|
"loss": 1.2271, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.0001763042126393642, |
|
"loss": 1.1998, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 0.00017614459583691346, |
|
"loss": 1.1123, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.0001759845160144579, |
|
"loss": 1.0907, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017582397414541077, |
|
"loss": 1.1867, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017566297120599495, |
|
"loss": 1.0127, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 0.00017550150817523702, |
|
"loss": 1.0484, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00017533958603496125, |
|
"loss": 0.9896, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00017517720576978367, |
|
"loss": 1.0358, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.0001750143683671061, |
|
"loss": 1.1482, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00017485107481711012, |
|
"loss": 1.1702, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.76, |
|
"learning_rate": 0.00017468732611275098, |
|
"loss": 1.0627, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00017452312324975174, |
|
"loss": 1.1448, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.000174358467226597, |
|
"loss": 1.0747, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00017419335904452705, |
|
"loss": 1.0825, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 0.00017402779970753155, |
|
"loss": 1.1648, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00017386179022234365, |
|
"loss": 1.0696, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00017369533159843369, |
|
"loss": 1.0231, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00017352842484800313, |
|
"loss": 1.1968, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 0.00017336107098597846, |
|
"loss": 0.805, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00017319327103000492, |
|
"loss": 1.0623, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00017302502600044032, |
|
"loss": 1.1143, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00017285633692034897, |
|
"loss": 1.0105, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00017268720481549527, |
|
"loss": 1.0811, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 0.00017251763071433765, |
|
"loss": 1.0987, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00017234761564802214, |
|
"loss": 1.1191, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00017217716065037628, |
|
"loss": 1.3072, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00017200626675790268, |
|
"loss": 0.9639, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 0.00017183493500977278, |
|
"loss": 1.0101, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00017166316644782055, |
|
"loss": 1.2726, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00017149096211653618, |
|
"loss": 1.1479, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00017131832306305965, |
|
"loss": 0.9289, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 0.00017114525033717435, |
|
"loss": 1.0264, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001709717449913008, |
|
"loss": 0.9703, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00017079780808049023, |
|
"loss": 1.0139, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.000170623440662418, |
|
"loss": 1.2518, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.0001704486437973775, |
|
"loss": 1.2049, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 0.00017027341854827327, |
|
"loss": 1.1969, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00017009776598061495, |
|
"loss": 1.0092, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00016992168716251053, |
|
"loss": 0.9765, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00016974518316465996, |
|
"loss": 0.9931, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 0.00016956825506034867, |
|
"loss": 1.0219, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00016939090392544085, |
|
"loss": 1.0712, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.00016921313083837327, |
|
"loss": 1.0711, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001690349368801483, |
|
"loss": 1.2672, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001688563231343277, |
|
"loss": 1.2043, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 0.0001686772906870258, |
|
"loss": 1.0933, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00016849784062690298, |
|
"loss": 1.3335, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00016831797404515898, |
|
"loss": 1.0658, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.00016813769203552647, |
|
"loss": 0.9472, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 0.0001679569956942641, |
|
"loss": 1.0334, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00016777588612015013, |
|
"loss": 1.0628, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00016759436441447545, |
|
"loss": 1.1018, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.00016741243168103716, |
|
"loss": 1.043, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 0.0001672300890261317, |
|
"loss": 1.1859, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00016704733755854815, |
|
"loss": 1.1158, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00016686417838956147, |
|
"loss": 1.1174, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00016668061263292589, |
|
"loss": 1.1842, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00016649664140486786, |
|
"loss": 1.1987, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 0.00016631226582407952, |
|
"loss": 1.0134, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001661274870117118, |
|
"loss": 1.1397, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001659423060913675, |
|
"loss": 1.129, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001657567241890947, |
|
"loss": 1.1101, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 0.0001655707424333797, |
|
"loss": 1.1611, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00016538436195514015, |
|
"loss": 1.0567, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00016519758388771843, |
|
"loss": 1.1743, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00016501040936687443, |
|
"loss": 1.2523, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00016482283953077887, |
|
"loss": 1.2478, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.89, |
|
"learning_rate": 0.00016463487552000622, |
|
"loss": 1.2612, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00016444651847752798, |
|
"loss": 1.0391, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00016425776954870545, |
|
"loss": 1.2208, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00016406862988128303, |
|
"loss": 1.1316, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 0.00016387910062538107, |
|
"loss": 1.1503, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00016368918293348892, |
|
"loss": 1.022, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.000163498877960458, |
|
"loss": 0.9542, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00016330818686349457, |
|
"loss": 1.096, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"learning_rate": 0.00016311711080215297, |
|
"loss": 0.8671, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.0001629256509383284, |
|
"loss": 1.2355, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00016273380843624983, |
|
"loss": 1.0471, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.000162541584462473, |
|
"loss": 1.0837, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00016234898018587337, |
|
"loss": 1.1093, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.92, |
|
"learning_rate": 0.00016215599677763884, |
|
"loss": 1.2198, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001619626354112628, |
|
"loss": 1.1788, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00016176889726253688, |
|
"loss": 1.0887, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.00016157478350954394, |
|
"loss": 1.0198, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 0.0001613802953326507, |
|
"loss": 1.1771, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00016118543391450076, |
|
"loss": 0.9436, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00016099020044000727, |
|
"loss": 1.0344, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00016079459609634586, |
|
"loss": 1.0577, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.94, |
|
"learning_rate": 0.00016059862207294721, |
|
"loss": 0.9137, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.0001604022795614901, |
|
"loss": 1.1897, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00016020556975589382, |
|
"loss": 1.1896, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00016000849385231134, |
|
"loss": 1.0696, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00015981105304912162, |
|
"loss": 0.9894, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 0.00015961324854692254, |
|
"loss": 1.0098, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00015941508154852362, |
|
"loss": 1.0692, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.0001592165532589386, |
|
"loss": 1.0551, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00015901766488537813, |
|
"loss": 1.1279, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 0.00015881841763724252, |
|
"loss": 0.9259, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001586188127261143, |
|
"loss": 1.0716, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00015841885136575086, |
|
"loss": 1.0828, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.00015821853477207708, |
|
"loss": 0.9241, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.000158017864163178, |
|
"loss": 0.9474, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 0.0001578168407592913, |
|
"loss": 1.1444, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00015761546578279995, |
|
"loss": 1.0687, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00015741374045822476, |
|
"loss": 1.2261, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.00015721166601221698, |
|
"loss": 1.0513, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 0.0001570092436735507, |
|
"loss": 1.0648, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00015680647467311557, |
|
"loss": 1.0259, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.0001566033602439092, |
|
"loss": 1.0417, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00015639990162102965, |
|
"loss": 1.0093, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 0.00015619610004166798, |
|
"loss": 1.0372, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00015599195674510064, |
|
"loss": 0.9534, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00015578747297268213, |
|
"loss": 0.9691, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00015558264996783712, |
|
"loss": 1.0506, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.0001553774889760533, |
|
"loss": 0.8641, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"learning_rate": 0.00015517199124487346, |
|
"loss": 0.5738, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.000154966158023888, |
|
"loss": 0.6726, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00015475999056472752, |
|
"loss": 0.6682, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00015455349012105486, |
|
"loss": 0.5419, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 0.00015434665794855786, |
|
"loss": 0.5841, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0001541394953049414, |
|
"loss": 0.6176, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00015393200344991995, |
|
"loss": 0.6603, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.0001537241836452098, |
|
"loss": 0.5174, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 0.00015351603715452154, |
|
"loss": 0.6903, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00015330756524355217, |
|
"loss": 0.5471, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001530987691799775, |
|
"loss": 0.6601, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00015288965023344458, |
|
"loss": 0.4233, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.00015268020967556368, |
|
"loss": 0.5764, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 0.0001524704487799008, |
|
"loss": 0.6435, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00015226036882196988, |
|
"loss": 0.5094, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00015204997107922497, |
|
"loss": 0.6163, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00015183925683105254, |
|
"loss": 0.5219, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 0.00015162822735876358, |
|
"loss": 0.5448, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00015141688394558597, |
|
"loss": 0.6222, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0001512052278766566, |
|
"loss": 0.6122, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.0001509932604390136, |
|
"loss": 0.6443, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00015078098292158835, |
|
"loss": 0.5781, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 1.05, |
|
"learning_rate": 0.00015056839661519784, |
|
"loss": 0.5538, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00015035550281253674, |
|
"loss": 0.6293, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00015014230280816954, |
|
"loss": 0.4054, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.0001499287978985227, |
|
"loss": 0.6508, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 0.00014971498938187667, |
|
"loss": 0.7488, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00014950087855835815, |
|
"loss": 0.5754, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00014928646672993212, |
|
"loss": 0.6735, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.0001490717552003938, |
|
"loss": 0.5694, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"learning_rate": 0.00014885674527536098, |
|
"loss": 0.5249, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014864143826226584, |
|
"loss": 0.5441, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014842583547034708, |
|
"loss": 0.5825, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014820993821064207, |
|
"loss": 0.5348, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014799374779597867, |
|
"loss": 0.5719, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 0.00014777726554096737, |
|
"loss": 0.4694, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.0001475604927619934, |
|
"loss": 0.6275, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00014734343077720845, |
|
"loss": 0.7572, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00014712608090652294, |
|
"loss": 0.6331, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 0.00014690844447159775, |
|
"loss": 0.6404, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.0001466905227958364, |
|
"loss": 0.6063, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00014647231720437686, |
|
"loss": 0.7687, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00014625382902408356, |
|
"loss": 0.6441, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 1.1, |
|
"learning_rate": 0.00014603505958353917, |
|
"loss": 0.5014, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.0001458160102130369, |
|
"loss": 0.5251, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00014559668224457193, |
|
"loss": 0.6059, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00014537707701183357, |
|
"loss": 0.5934, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00014515719585019726, |
|
"loss": 0.5467, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 0.00014493704009671613, |
|
"loss": 0.6691, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00014471661109011322, |
|
"loss": 0.5212, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.000144495910170773, |
|
"loss": 0.7621, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00014427493868073346, |
|
"loss": 0.5422, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 1.12, |
|
"learning_rate": 0.00014405369796367795, |
|
"loss": 0.4715, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00014383218936492683, |
|
"loss": 0.5964, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00014361041423142942, |
|
"loss": 0.472, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00014338837391175582, |
|
"loss": 0.5631, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00014316606975608865, |
|
"loss": 0.5553, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 0.00014294350311621482, |
|
"loss": 0.5384, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00014272067534551743, |
|
"loss": 0.6568, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00014249758779896746, |
|
"loss": 0.6185, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00014227424183311545, |
|
"loss": 0.5833, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 0.00014205063880608345, |
|
"loss": 0.6853, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001418267800775565, |
|
"loss": 0.5071, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00014160266700877471, |
|
"loss": 0.5802, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.00014137830096252462, |
|
"loss": 0.4858, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 0.0001411536833031311, |
|
"loss": 0.6866, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00014092881539644908, |
|
"loss": 0.5416, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00014070369860985518, |
|
"loss": 0.7359, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00014047833431223938, |
|
"loss": 0.6102, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.00014025272387399674, |
|
"loss": 0.4416, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 0.0001400268686670191, |
|
"loss": 0.4473, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.0001398007700646866, |
|
"loss": 0.4096, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00013957442944185946, |
|
"loss": 0.6765, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00013934784817486956, |
|
"loss": 0.4677, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 0.00013912102764151215, |
|
"loss": 0.5784, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001388939692210374, |
|
"loss": 0.6791, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001386666742941419, |
|
"loss": 0.5077, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.00013843914424296053, |
|
"loss": 0.6087, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001382113804510579, |
|
"loss": 0.5096, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 1.18, |
|
"learning_rate": 0.0001379833843034199, |
|
"loss": 0.5513, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0001377551571864453, |
|
"loss": 0.6205, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00013752670048793744, |
|
"loss": 0.5468, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.0001372980155970956, |
|
"loss": 0.6585, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 0.00013706910390450677, |
|
"loss": 0.6355, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00013683996680213695, |
|
"loss": 0.7864, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00013661060568332292, |
|
"loss": 0.651, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00013638102194276356, |
|
"loss": 0.5124, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 0.00013615121697651156, |
|
"loss": 0.7163, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0001359211921819648, |
|
"loss": 0.5803, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00013569094895785782, |
|
"loss": 0.541, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00013546048870425356, |
|
"loss": 0.6183, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.00013522981282253452, |
|
"loss": 0.4994, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 0.0001349989227153944, |
|
"loss": 0.6614, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00013476781978682967, |
|
"loss": 0.5829, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00013453650544213076, |
|
"loss": 0.6755, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00013430498108787389, |
|
"loss": 0.4746, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 0.00013407324813191211, |
|
"loss": 0.6413, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00013384130798336705, |
|
"loss": 0.6338, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00013360916205262018, |
|
"loss": 0.5983, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00013337681175130438, |
|
"loss": 0.5958, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 1.23, |
|
"learning_rate": 0.00013314425849229509, |
|
"loss": 0.6361, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00013291150368970208, |
|
"loss": 0.4946, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00013267854875886057, |
|
"loss": 0.5745, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.0001324453951163227, |
|
"loss": 0.6291, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00013221204417984908, |
|
"loss": 0.6195, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 0.00013197849736839982, |
|
"loss": 0.5461, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00013174475610212624, |
|
"loss": 0.5355, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.0001315108218023621, |
|
"loss": 0.7082, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00013127669589161484, |
|
"loss": 0.5511, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 1.25, |
|
"learning_rate": 0.00013104237979355722, |
|
"loss": 0.5279, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00013080787493301838, |
|
"loss": 0.6608, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001305731827359753, |
|
"loss": 0.7352, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.00013033830462954414, |
|
"loss": 0.5717, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001301032420419715, |
|
"loss": 0.6129, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 0.0001298679964026258, |
|
"loss": 0.6058, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.0001296325691419886, |
|
"loss": 0.6566, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00012939696169164577, |
|
"loss": 0.597, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00012916117548427897, |
|
"loss": 0.6844, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 0.00012892521195365678, |
|
"loss": 0.6438, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00012868907253462607, |
|
"loss": 0.6251, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00012845275866310324, |
|
"loss": 0.5319, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.00012821627177606555, |
|
"loss": 0.581, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 1.28, |
|
"learning_rate": 0.0001279796133115423, |
|
"loss": 0.5434, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0001277427847086061, |
|
"loss": 0.3277, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.0001275057874073641, |
|
"loss": 0.5906, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00012726862284894938, |
|
"loss": 0.7878, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00012703129247551204, |
|
"loss": 0.682, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 0.00012679379773021038, |
|
"loss": 0.7769, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00012655614005720233, |
|
"loss": 0.4693, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.0001263183209016365, |
|
"loss": 0.6305, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.00012608034170964342, |
|
"loss": 0.7404, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"learning_rate": 0.0001258422039283268, |
|
"loss": 0.5719, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0001256039090057547, |
|
"loss": 0.6054, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00012536545839095074, |
|
"loss": 0.4711, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.00012512685353388523, |
|
"loss": 0.5795, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 0.0001248880958854664, |
|
"loss": 0.6066, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0001246491868975316, |
|
"loss": 0.5995, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00012441012802283843, |
|
"loss": 0.4882, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.00012417092071505595, |
|
"loss": 0.5523, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0001239315664287558, |
|
"loss": 0.6729, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 0.0001236920666194033, |
|
"loss": 0.6734, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00012345242274334876, |
|
"loss": 0.4987, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.0001232126362578185, |
|
"loss": 0.5735, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.000122972708620906, |
|
"loss": 0.6132, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 0.00012273264129156306, |
|
"loss": 0.4757, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00012249243572959095, |
|
"loss": 0.5593, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00012225209339563145, |
|
"loss": 0.652, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00012201161575115809, |
|
"loss": 0.655, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00012177100425846712, |
|
"loss": 0.64, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 0.00012153026038066876, |
|
"loss": 0.5883, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00012128938558167824, |
|
"loss": 0.6466, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00012104838132620684, |
|
"loss": 0.5959, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00012080724907975302, |
|
"loss": 0.4044, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 0.00012056599030859366, |
|
"loss": 0.5271, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.0001203246064797749, |
|
"loss": 0.6267, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00012008309906110331, |
|
"loss": 0.5708, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00011984146952113708, |
|
"loss": 0.6444, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 1.36, |
|
"learning_rate": 0.00011959971932917696, |
|
"loss": 0.5842, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00011935784995525732, |
|
"loss": 0.5752, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00011911586287013725, |
|
"loss": 0.4599, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00011887375954529168, |
|
"loss": 0.5428, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00011863154145290234, |
|
"loss": 0.6214, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 0.00011838921006584884, |
|
"loss": 0.5716, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00011814676685769967, |
|
"loss": 0.5751, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00011790421330270338, |
|
"loss": 0.4826, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.00011766155087577944, |
|
"loss": 0.7021, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 0.0001174187810525093, |
|
"loss": 0.4642, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00011717590530912763, |
|
"loss": 0.4898, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00011693292512251299, |
|
"loss": 0.6179, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00011668984197017919, |
|
"loss": 0.5781, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00011644665733026607, |
|
"loss": 0.6675, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.39, |
|
"learning_rate": 0.00011620337268153061, |
|
"loss": 0.5463, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00011595998950333793, |
|
"loss": 0.6657, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00011571650927565232, |
|
"loss": 0.6032, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00011547293347902812, |
|
"loss": 0.7151, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 0.00011522926359460093, |
|
"loss": 0.593, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00011498550110407836, |
|
"loss": 0.6023, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00011474164748973122, |
|
"loss": 0.6489, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00011449770423438442, |
|
"loss": 0.5674, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 1.41, |
|
"learning_rate": 0.00011425367282140786, |
|
"loss": 0.5911, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00011400955473470764, |
|
"loss": 0.6187, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00011376535145871684, |
|
"loss": 0.5259, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00011352106447838655, |
|
"loss": 0.6097, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.0001132766952791769, |
|
"loss": 0.4309, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 0.00011303224534704793, |
|
"loss": 0.6894, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00011278771616845061, |
|
"loss": 0.4132, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.00011254310923031781, |
|
"loss": 0.5026, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0001122984260200552, |
|
"loss": 0.4602, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 1.43, |
|
"learning_rate": 0.0001120536680255323, |
|
"loss": 0.5325, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00011180883673507338, |
|
"loss": 0.3364, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.0001115639336374483, |
|
"loss": 0.7636, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.0001113189602218637, |
|
"loss": 0.6035, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 0.00011107391797795373, |
|
"loss": 0.5702, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00011082880839577112, |
|
"loss": 0.6767, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00011058363296577803, |
|
"loss": 0.5896, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00011033839317883701, |
|
"loss": 0.6034, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00011009309052620204, |
|
"loss": 0.5635, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 0.00010984772649950932, |
|
"loss": 0.6818, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00010960230259076818, |
|
"loss": 0.4099, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00010935682029235222, |
|
"loss": 0.6667, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00010911128109699002, |
|
"loss": 0.664, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"learning_rate": 0.00010886568649775608, |
|
"loss": 0.4609, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00010862003798806196, |
|
"loss": 0.5718, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00010837433706164688, |
|
"loss": 0.5682, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00010812858521256888, |
|
"loss": 0.5714, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00010788278393519564, |
|
"loss": 0.6165, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 0.00010763693472419538, |
|
"loss": 0.5231, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00010739103907452784, |
|
"loss": 0.5929, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00010714509848143508, |
|
"loss": 0.6055, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00010689911444043248, |
|
"loss": 0.6752, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 0.00010665308844729967, |
|
"loss": 0.6143, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0001064070219980713, |
|
"loss": 0.603, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00010616091658902807, |
|
"loss": 0.6289, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.00010591477371668756, |
|
"loss": 0.62, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 0.0001056685948777952, |
|
"loss": 0.6476, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00010542238156931509, |
|
"loss": 0.6157, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00010517613528842097, |
|
"loss": 0.7172, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00010492985753248697, |
|
"loss": 0.6018, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.0001046835497990788, |
|
"loss": 0.5698, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 0.00010443721358594429, |
|
"loss": 0.5755, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00010419085039100451, |
|
"loss": 0.5586, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00010394446171234466, |
|
"loss": 0.6374, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00010369804904820474, |
|
"loss": 0.4623, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 0.00010345161389697082, |
|
"loss": 0.5365, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00010320515775716555, |
|
"loss": 0.6716, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.0001029586821274392, |
|
"loss": 0.5875, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00010271218850656066, |
|
"loss": 0.4641, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 0.00010246567839340817, |
|
"loss": 0.6042, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00010221915328696021, |
|
"loss": 0.6493, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00010197261468628654, |
|
"loss": 0.5813, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00010172606409053886, |
|
"loss": 0.4873, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.00010147950299894185, |
|
"loss": 0.6622, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 0.0001012329329107841, |
|
"loss": 0.5865, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00010098635532540872, |
|
"loss": 0.5075, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00010073977174220459, |
|
"loss": 0.6902, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00010049318366059697, |
|
"loss": 0.5963, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"learning_rate": 0.00010024659258003848, |
|
"loss": 0.6803, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 0.0001, |
|
"loss": 0.4835, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.975340741996156e-05, |
|
"loss": 0.5966, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.950681633940305e-05, |
|
"loss": 0.43, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.926022825779546e-05, |
|
"loss": 0.5717, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 9.901364467459131e-05, |
|
"loss": 0.6375, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.876706708921595e-05, |
|
"loss": 0.5587, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.852049700105815e-05, |
|
"loss": 0.5731, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.827393590946116e-05, |
|
"loss": 0.5774, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 9.802738531371346e-05, |
|
"loss": 0.6526, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.77808467130398e-05, |
|
"loss": 0.6426, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.753432160659185e-05, |
|
"loss": 0.5733, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.728781149343936e-05, |
|
"loss": 0.5751, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.57, |
|
"learning_rate": 9.704131787256083e-05, |
|
"loss": 0.7615, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.679484224283449e-05, |
|
"loss": 0.5689, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.654838610302923e-05, |
|
"loss": 0.6123, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.630195095179527e-05, |
|
"loss": 0.5531, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.605553828765539e-05, |
|
"loss": 0.7153, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.58091496089955e-05, |
|
"loss": 0.5583, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.556278641405573e-05, |
|
"loss": 0.5586, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.53164502009212e-05, |
|
"loss": 0.6765, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.507014246751304e-05, |
|
"loss": 0.5783, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 9.482386471157904e-05, |
|
"loss": 0.5004, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.457761843068493e-05, |
|
"loss": 0.5693, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.433140512220483e-05, |
|
"loss": 0.6927, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.408522628331246e-05, |
|
"loss": 0.4887, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 9.383908341097197e-05, |
|
"loss": 0.6689, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.359297800192872e-05, |
|
"loss": 0.5364, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.334691155270036e-05, |
|
"loss": 0.6707, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.31008855595675e-05, |
|
"loss": 0.6863, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.285490151856493e-05, |
|
"loss": 0.721, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.61, |
|
"learning_rate": 9.260896092547216e-05, |
|
"loss": 0.558, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.236306527580464e-05, |
|
"loss": 0.6502, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.211721606480434e-05, |
|
"loss": 0.7737, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.187141478743113e-05, |
|
"loss": 0.5102, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.162566293835316e-05, |
|
"loss": 0.4123, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.137996201193805e-05, |
|
"loss": 0.5964, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.113431350224395e-05, |
|
"loss": 0.4437, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.088871890301001e-05, |
|
"loss": 0.6312, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.06431797076478e-05, |
|
"loss": 0.5999, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 9.039769740923183e-05, |
|
"loss": 0.6335, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 9.015227350049071e-05, |
|
"loss": 0.5792, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.990690947379795e-05, |
|
"loss": 0.5945, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.9661606821163e-05, |
|
"loss": 0.4054, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 8.941636703422197e-05, |
|
"loss": 0.6032, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.917119160422889e-05, |
|
"loss": 0.5066, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.892608202204628e-05, |
|
"loss": 0.4883, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.868103977813631e-05, |
|
"loss": 0.5621, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.843606636255174e-05, |
|
"loss": 0.5431, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.819116326492664e-05, |
|
"loss": 0.6483, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.79463319744677e-05, |
|
"loss": 0.6156, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.770157397994481e-05, |
|
"loss": 0.6163, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.745689076968222e-05, |
|
"loss": 0.6626, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"learning_rate": 8.721228383154939e-05, |
|
"loss": 0.3811, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.69677546529521e-05, |
|
"loss": 0.7575, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.672330472082314e-05, |
|
"loss": 0.5763, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.647893552161347e-05, |
|
"loss": 0.6615, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.62346485412832e-05, |
|
"loss": 0.6341, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.599044526529238e-05, |
|
"loss": 0.5851, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.574632717859219e-05, |
|
"loss": 0.3902, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.55022957656156e-05, |
|
"loss": 0.5242, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.52583525102688e-05, |
|
"loss": 0.5237, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.501449889592164e-05, |
|
"loss": 0.6457, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.477073640539909e-05, |
|
"loss": 0.4338, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.452706652097186e-05, |
|
"loss": 0.6235, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.42834907243477e-05, |
|
"loss": 0.5244, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.404001049666211e-05, |
|
"loss": 0.6943, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.379662731846942e-05, |
|
"loss": 0.5344, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.355334266973397e-05, |
|
"loss": 0.6037, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.331015802982082e-05, |
|
"loss": 0.4845, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 8.306707487748705e-05, |
|
"loss": 0.7717, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.282409469087239e-05, |
|
"loss": 0.5519, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.258121894749073e-05, |
|
"loss": 0.5284, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.23384491242206e-05, |
|
"loss": 0.5785, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.209578669729663e-05, |
|
"loss": 0.5244, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.185323314230032e-05, |
|
"loss": 0.5486, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.161078993415117e-05, |
|
"loss": 0.4737, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.13684585470977e-05, |
|
"loss": 0.5918, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.112624045470835e-05, |
|
"loss": 0.6141, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.08841371298628e-05, |
|
"loss": 0.3379, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.064215004474272e-05, |
|
"loss": 0.5141, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.040028067082305e-05, |
|
"loss": 0.4426, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.01585304788629e-05, |
|
"loss": 0.661, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 7.991690093889671e-05, |
|
"loss": 0.4921, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.967539352022513e-05, |
|
"loss": 0.4858, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.943400969140635e-05, |
|
"loss": 0.5962, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.919275092024697e-05, |
|
"loss": 0.6083, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.89516186737932e-05, |
|
"loss": 0.6439, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 7.87106144183218e-05, |
|
"loss": 0.5193, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.846973961933124e-05, |
|
"loss": 0.2868, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.822899574153291e-05, |
|
"loss": 0.4139, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.798838424884195e-05, |
|
"loss": 0.5663, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 7.774790660436858e-05, |
|
"loss": 0.5397, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.750756427040906e-05, |
|
"loss": 0.6151, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.726735870843697e-05, |
|
"loss": 0.5842, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.702729137909401e-05, |
|
"loss": 0.4999, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.678736374218151e-05, |
|
"loss": 0.5844, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 7.654757725665124e-05, |
|
"loss": 0.6323, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.630793338059672e-05, |
|
"loss": 0.4702, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.606843357124426e-05, |
|
"loss": 0.5288, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.582907928494407e-05, |
|
"loss": 0.5858, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.77, |
|
"learning_rate": 7.55898719771616e-05, |
|
"loss": 0.484, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.535081310246841e-05, |
|
"loss": 0.6479, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.511190411453364e-05, |
|
"loss": 0.4571, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.487314646611479e-05, |
|
"loss": 0.4951, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 7.463454160904928e-05, |
|
"loss": 0.5957, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.43960909942453e-05, |
|
"loss": 0.4979, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.415779607167321e-05, |
|
"loss": 0.5733, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.391965829035659e-05, |
|
"loss": 0.5862, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.368167909836352e-05, |
|
"loss": 0.4632, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.79, |
|
"learning_rate": 7.34438599427977e-05, |
|
"loss": 0.5955, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.320620226978965e-05, |
|
"loss": 0.4981, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.296870752448801e-05, |
|
"loss": 0.6223, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.273137715105063e-05, |
|
"loss": 0.5994, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 7.249421259263592e-05, |
|
"loss": 0.731, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.225721529139393e-05, |
|
"loss": 0.5325, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.202038668845773e-05, |
|
"loss": 0.5118, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.178372822393444e-05, |
|
"loss": 0.5064, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 7.154724133689677e-05, |
|
"loss": 0.4535, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.131092746537394e-05, |
|
"loss": 0.6072, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.107478804634325e-05, |
|
"loss": 0.4077, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.083882451572107e-05, |
|
"loss": 0.6195, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.060303830835424e-05, |
|
"loss": 0.5435, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"learning_rate": 7.036743085801143e-05, |
|
"loss": 0.5076, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 7.01320035973742e-05, |
|
"loss": 0.496, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.989675795802853e-05, |
|
"loss": 0.7021, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.966169537045589e-05, |
|
"loss": 0.5237, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 6.942681726402473e-05, |
|
"loss": 0.4552, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.919212506698163e-05, |
|
"loss": 0.385, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.895762020644279e-05, |
|
"loss": 0.5672, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.872330410838516e-05, |
|
"loss": 0.6631, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.848917819763793e-05, |
|
"loss": 0.5667, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.84, |
|
"learning_rate": 6.82552438978738e-05, |
|
"loss": 0.5194, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.80215026316002e-05, |
|
"loss": 0.6219, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.778795582015097e-05, |
|
"loss": 0.6502, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.755460488367728e-05, |
|
"loss": 0.2574, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 6.732145124113947e-05, |
|
"loss": 0.4165, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.708849631029793e-05, |
|
"loss": 0.4889, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.685574150770495e-05, |
|
"loss": 0.5851, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.662318824869565e-05, |
|
"loss": 0.5219, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 6.639083794737983e-05, |
|
"loss": 0.4303, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.615869201663296e-05, |
|
"loss": 0.6366, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.59267518680879e-05, |
|
"loss": 0.5489, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.569501891212615e-05, |
|
"loss": 0.6061, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.546349455786926e-05, |
|
"loss": 0.5419, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 6.52321802131704e-05, |
|
"loss": 0.4464, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.500107728460562e-05, |
|
"loss": 0.613, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.477018717746551e-05, |
|
"loss": 0.3781, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.453951129574644e-05, |
|
"loss": 0.479, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.88, |
|
"learning_rate": 6.430905104214218e-05, |
|
"loss": 0.6029, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.407880781803521e-05, |
|
"loss": 0.6229, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.384878302348845e-05, |
|
"loss": 0.4271, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.361897805723645e-05, |
|
"loss": 0.6708, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.33893943166771e-05, |
|
"loss": 0.5021, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 6.316003319786309e-05, |
|
"loss": 0.4785, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.293089609549325e-05, |
|
"loss": 0.5957, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.270198440290443e-05, |
|
"loss": 0.5599, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.24732995120626e-05, |
|
"loss": 0.4143, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 6.224484281355473e-05, |
|
"loss": 0.5559, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.201661569658012e-05, |
|
"loss": 0.6875, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.178861954894212e-05, |
|
"loss": 0.5779, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.156085575703945e-05, |
|
"loss": 0.3266, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 6.133332570585812e-05, |
|
"loss": 0.5173, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.110603077896265e-05, |
|
"loss": 0.312, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.087897235848785e-05, |
|
"loss": 0.4248, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0652151825130486e-05, |
|
"loss": 0.4151, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.0425570558140574e-05, |
|
"loss": 0.4938, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 6.019922993531345e-05, |
|
"loss": 0.5815, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.9973131332980925e-05, |
|
"loss": 0.4496, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.9747276126003257e-05, |
|
"loss": 0.6376, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.952166568776062e-05, |
|
"loss": 0.6191, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.93, |
|
"learning_rate": 5.9296301390144825e-05, |
|
"loss": 0.5334, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.90711846035509e-05, |
|
"loss": 0.4456, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.884631669686892e-05, |
|
"loss": 0.635, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.8621699037475406e-05, |
|
"loss": 0.596, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 5.8397332991225295e-05, |
|
"loss": 0.5259, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.817321992244351e-05, |
|
"loss": 0.5855, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.794936119391658e-05, |
|
"loss": 0.4387, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.7725758166884546e-05, |
|
"loss": 0.4881, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.750241220103254e-05, |
|
"loss": 0.6042, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.95, |
|
"learning_rate": 5.727932465448258e-05, |
|
"loss": 0.5094, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.705649688378518e-05, |
|
"loss": 0.5251, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.683393024391139e-05, |
|
"loss": 0.4743, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.6611626088244194e-05, |
|
"loss": 0.7013, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 5.638958576857058e-05, |
|
"loss": 0.5063, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.6167810635073207e-05, |
|
"loss": 0.5708, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.594630203632206e-05, |
|
"loss": 0.4183, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.572506131926658e-05, |
|
"loss": 0.549, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.550408982922706e-05, |
|
"loss": 0.4257, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 5.528338890988681e-05, |
|
"loss": 0.5504, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.506295990328385e-05, |
|
"loss": 0.4364, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.484280414980277e-05, |
|
"loss": 0.587, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.4622922988166426e-05, |
|
"loss": 0.5226, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 5.440331775542813e-05, |
|
"loss": 0.5032, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.418398978696312e-05, |
|
"loss": 0.588, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.396494041646081e-05, |
|
"loss": 0.6562, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.37461709759165e-05, |
|
"loss": 0.558, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 5.3527682795623146e-05, |
|
"loss": 0.624, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.3309477204163637e-05, |
|
"loss": 0.4503, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.309155552840228e-05, |
|
"loss": 0.413, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.28739190934771e-05, |
|
"loss": 0.4639, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.265656922279154e-05, |
|
"loss": 0.2744, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"learning_rate": 5.243950723800663e-05, |
|
"loss": 0.2639, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.222273445903263e-05, |
|
"loss": 0.1992, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.200625220402139e-05, |
|
"loss": 0.2826, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.179006178935797e-05, |
|
"loss": 0.2146, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 5.157416452965293e-05, |
|
"loss": 0.2251, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.1358561737734214e-05, |
|
"loss": 0.1948, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.114325472463905e-05, |
|
"loss": 0.1372, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.092824479960625e-05, |
|
"loss": 0.1991, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 5.0713533270067936e-05, |
|
"loss": 0.2826, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.0499121441641864e-05, |
|
"loss": 0.2505, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.028501061812334e-05, |
|
"loss": 0.3082, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 5.0071202101477334e-05, |
|
"loss": 0.2937, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.985769719183045e-05, |
|
"loss": 0.2785, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 4.964449718746328e-05, |
|
"loss": 0.2436, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.943160338480217e-05, |
|
"loss": 0.2034, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.921901707841165e-05, |
|
"loss": 0.2472, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.900673956098644e-05, |
|
"loss": 0.2935, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 4.87947721233434e-05, |
|
"loss": 0.0893, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.858311605441408e-05, |
|
"loss": 0.266, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.837177264123648e-05, |
|
"loss": 0.1819, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.8160743168947496e-05, |
|
"loss": 0.2738, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.795002892077502e-05, |
|
"loss": 0.2068, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 4.773963117803013e-05, |
|
"loss": 0.1475, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.75295512200992e-05, |
|
"loss": 0.171, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.7319790324436367e-05, |
|
"loss": 0.2322, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.711034976655544e-05, |
|
"loss": 0.1909, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 2.06, |
|
"learning_rate": 4.690123082002249e-05, |
|
"loss": 0.1659, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.6692434756447876e-05, |
|
"loss": 0.275, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.648396284547848e-05, |
|
"loss": 0.2653, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.627581635479024e-05, |
|
"loss": 0.22, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 4.606799655008009e-05, |
|
"loss": 0.17, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.5860504695058625e-05, |
|
"loss": 0.2068, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.565334205144214e-05, |
|
"loss": 0.2213, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.544650987894514e-05, |
|
"loss": 0.1543, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.524000943527249e-05, |
|
"loss": 0.2182, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 4.5033841976112015e-05, |
|
"loss": 0.2541, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.482800875512656e-05, |
|
"loss": 0.2914, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.462251102394669e-05, |
|
"loss": 0.1802, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.4417350032162894e-05, |
|
"loss": 0.2606, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 4.421252702731791e-05, |
|
"loss": 0.1971, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.40080432548994e-05, |
|
"loss": 0.2834, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.380389995833206e-05, |
|
"loss": 0.2257, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3600098378970365e-05, |
|
"loss": 0.2955, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 4.3396639756090784e-05, |
|
"loss": 0.2264, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.3193525326884435e-05, |
|
"loss": 0.2673, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.29907563264493e-05, |
|
"loss": 0.1629, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.278833398778306e-05, |
|
"loss": 0.1767, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.258625954177525e-05, |
|
"loss": 0.0968, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 2.11, |
|
"learning_rate": 4.238453421720006e-05, |
|
"loss": 0.2329, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.218315924070874e-05, |
|
"loss": 0.2749, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.198213583682202e-05, |
|
"loss": 0.2321, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.1781465227922957e-05, |
|
"loss": 0.2391, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 4.1581148634249176e-05, |
|
"loss": 0.1415, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.1381187273885726e-05, |
|
"loss": 0.1352, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.1181582362757475e-05, |
|
"loss": 0.2324, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.098233511462189e-05, |
|
"loss": 0.1912, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.078344674106141e-05, |
|
"loss": 0.1645, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 4.058491845147641e-05, |
|
"loss": 0.2053, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.038675145307747e-05, |
|
"loss": 0.2527, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 4.0188946950878404e-05, |
|
"loss": 0.2433, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.999150614768869e-05, |
|
"loss": 0.1732, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 3.9794430244106196e-05, |
|
"loss": 0.1512, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.959772043850998e-05, |
|
"loss": 0.2331, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9401377927052816e-05, |
|
"loss": 0.2367, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.9205403903654156e-05, |
|
"loss": 0.255, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 2.15, |
|
"learning_rate": 3.900979955999271e-05, |
|
"loss": 0.2429, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.881456608549926e-05, |
|
"loss": 0.1963, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.861970466734931e-05, |
|
"loss": 0.1036, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.8425216490456095e-05, |
|
"loss": 0.1508, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.823110273746313e-05, |
|
"loss": 0.1603, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 3.803736458873722e-05, |
|
"loss": 0.2099, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.78440032223612e-05, |
|
"loss": 0.2079, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.7651019814126654e-05, |
|
"loss": 0.1721, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.745841553752703e-05, |
|
"loss": 0.1555, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 3.72661915637502e-05, |
|
"loss": 0.2968, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.707434906167161e-05, |
|
"loss": 0.1047, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.688288919784702e-05, |
|
"loss": 0.1934, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.669181313650545e-05, |
|
"loss": 0.1916, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.650112203954202e-05, |
|
"loss": 0.1549, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 2.18, |
|
"learning_rate": 3.6310817066511105e-05, |
|
"loss": 0.1003, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.6120899374618955e-05, |
|
"loss": 0.2804, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.593137011871698e-05, |
|
"loss": 0.1772, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.574223045129459e-05, |
|
"loss": 0.1746, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 3.5553481522472066e-05, |
|
"loss": 0.263, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.536512447999383e-05, |
|
"loss": 0.2504, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.517716046922118e-05, |
|
"loss": 0.1442, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.498959063312558e-05, |
|
"loss": 0.1643, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 3.4802416112281564e-05, |
|
"loss": 0.265, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.461563804485985e-05, |
|
"loss": 0.2275, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.442925756662032e-05, |
|
"loss": 0.2501, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.4243275810905314e-05, |
|
"loss": 0.2823, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.40576939086325e-05, |
|
"loss": 0.2683, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 3.387251298828822e-05, |
|
"loss": 0.2341, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.36877341759205e-05, |
|
"loss": 0.1833, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.350335859513216e-05, |
|
"loss": 0.123, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.331938736707415e-05, |
|
"loss": 0.2159, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 3.3135821610438545e-05, |
|
"loss": 0.2506, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2952662441451885e-05, |
|
"loss": 0.1828, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.276991097386831e-05, |
|
"loss": 0.2228, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2587568318962866e-05, |
|
"loss": 0.1768, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 3.2405635585524565e-05, |
|
"loss": 0.0988, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.2224113879849915e-05, |
|
"loss": 0.1921, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.20430043057359e-05, |
|
"loss": 0.2149, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.186230796447354e-05, |
|
"loss": 0.2384, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.168202595484103e-05, |
|
"loss": 0.1974, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 3.1502159373097053e-05, |
|
"loss": 0.2374, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.132270931297423e-05, |
|
"loss": 0.2636, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.114367686567228e-05, |
|
"loss": 0.2145, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.09650631198517e-05, |
|
"loss": 0.1412, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 3.0786869161626744e-05, |
|
"loss": 0.2153, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.060909607455916e-05, |
|
"loss": 0.2129, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0431744939651364e-05, |
|
"loss": 0.2433, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.025481683534006e-05, |
|
"loss": 0.2356, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 3.0078312837489487e-05, |
|
"loss": 0.2208, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 2.9902234019385057e-05, |
|
"loss": 0.2511, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.9726581451726766e-05, |
|
"loss": 0.3181, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.955135620262254e-05, |
|
"loss": 0.1662, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.937655933758202e-05, |
|
"loss": 0.1374, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 2.920219191950979e-05, |
|
"loss": 0.1709, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.9028255008699213e-05, |
|
"loss": 0.1662, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8854749662825664e-05, |
|
"loss": 0.2204, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.8681676936940393e-05, |
|
"loss": 0.1645, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 2.850903788346383e-05, |
|
"loss": 0.1947, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8336833552179443e-05, |
|
"loss": 0.2298, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.8165064990227252e-05, |
|
"loss": 0.2329, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.799373324209734e-05, |
|
"loss": 0.213, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7822839349623743e-05, |
|
"loss": 0.253, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"learning_rate": 2.7652384351977856e-05, |
|
"loss": 0.2194, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7482369285662378e-05, |
|
"loss": 0.2741, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.7312795184504715e-05, |
|
"loss": 0.2357, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.714366307965105e-05, |
|
"loss": 0.2567, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.6974973999559682e-05, |
|
"loss": 0.2402, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.680672896999512e-05, |
|
"loss": 0.0844, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6638929014021552e-05, |
|
"loss": 0.0055, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6471575151996865e-05, |
|
"loss": 0.1922, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 2.31, |
|
"learning_rate": 2.6304668401566335e-05, |
|
"loss": 0.0577, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.6138209777656352e-05, |
|
"loss": 0.1469, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5972200292468464e-05, |
|
"loss": 0.2158, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5806640955472972e-05, |
|
"loss": 0.2362, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5641532773402998e-05, |
|
"loss": 0.1729, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 2.32, |
|
"learning_rate": 2.5476876750248257e-05, |
|
"loss": 0.2804, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.5312673887249027e-05, |
|
"loss": 0.2605, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.514892518288988e-05, |
|
"loss": 0.1888, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.4985631632893902e-05, |
|
"loss": 0.1773, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 2.4822794230216327e-05, |
|
"loss": 0.2871, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4660413965038753e-05, |
|
"loss": 0.2498, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4498491824763002e-05, |
|
"loss": 0.231, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.4337028794005057e-05, |
|
"loss": 0.2382, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.417602585458927e-05, |
|
"loss": 0.1792, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 2.401548398554213e-05, |
|
"loss": 0.2354, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3855404163086558e-05, |
|
"loss": 0.2113, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3695787360635823e-05, |
|
"loss": 0.1993, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.3536634548787694e-05, |
|
"loss": 0.1822, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.337794669531842e-05, |
|
"loss": 0.2427, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.321972476517705e-05, |
|
"loss": 0.2494, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.3061969720479326e-05, |
|
"loss": 0.1765, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.290468252050204e-05, |
|
"loss": 0.224, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 2.36, |
|
"learning_rate": 2.274786412167711e-05, |
|
"loss": 0.2247, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.25915154775857e-05, |
|
"loss": 0.118, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.243563753895259e-05, |
|
"loss": 0.1374, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2280231253640173e-05, |
|
"loss": 0.2533, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.2125297566642922e-05, |
|
"loss": 0.2346, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 2.197083742008148e-05, |
|
"loss": 0.1625, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.181685175319702e-05, |
|
"loss": 0.1928, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1663341502345448e-05, |
|
"loss": 0.2175, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.151030760099182e-05, |
|
"loss": 0.2422, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 2.1357750979704528e-05, |
|
"loss": 0.1991, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1205672566149804e-05, |
|
"loss": 0.1839, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.1054073285085962e-05, |
|
"loss": 0.1046, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0902954058357782e-05, |
|
"loss": 0.2108, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0752315804890977e-05, |
|
"loss": 0.2033, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 2.0602159440686497e-05, |
|
"loss": 0.1988, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0452485878815063e-05, |
|
"loss": 0.2709, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0303296029411588e-05, |
|
"loss": 0.2455, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0154590799669627e-05, |
|
"loss": 0.1649, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 2.0006371093835763e-05, |
|
"loss": 0.0902, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.985863781320435e-05, |
|
"loss": 0.1709, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.971139185611176e-05, |
|
"loss": 0.1915, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9564634117931125e-05, |
|
"loss": 0.1739, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9418365491066837e-05, |
|
"loss": 0.1766, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.9272586864948993e-05, |
|
"loss": 0.2287, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.912729912602822e-05, |
|
"loss": 0.1076, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.898250315777005e-05, |
|
"loss": 0.1616, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8838199840649716e-05, |
|
"loss": 0.1938, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 1.8694390052146737e-05, |
|
"loss": 0.1587, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.855107466673961e-05, |
|
"loss": 0.2593, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8408254555900363e-05, |
|
"loss": 0.2158, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8265930588089476e-05, |
|
"loss": 0.2148, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 1.8124103628750388e-05, |
|
"loss": 0.1833, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7982774540304403e-05, |
|
"loss": 0.2417, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7841944182145365e-05, |
|
"loss": 0.226, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7701613410634365e-05, |
|
"loss": 0.2438, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 1.7561783079094705e-05, |
|
"loss": 0.1615, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7422454037806525e-05, |
|
"loss": 0.2321, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.728362713400178e-05, |
|
"loss": 0.1068, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7145303211859008e-05, |
|
"loss": 0.2583, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.7007483112498247e-05, |
|
"loss": 0.1893, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 1.687016767397581e-05, |
|
"loss": 0.1601, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6733357731279377e-05, |
|
"loss": 0.2538, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6597054116322687e-05, |
|
"loss": 0.26, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.64612576579407e-05, |
|
"loss": 0.1101, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.6325969181884436e-05, |
|
"loss": 0.1726, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.619118951081594e-05, |
|
"loss": 0.1489, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.6056919464303367e-05, |
|
"loss": 0.1859, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5923159858815885e-05, |
|
"loss": 0.1327, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5789911507718826e-05, |
|
"loss": 0.1259, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 2.47, |
|
"learning_rate": 1.5657175221268684e-05, |
|
"loss": 0.1762, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.552495180660818e-05, |
|
"loss": 0.1467, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5393242067761326e-05, |
|
"loss": 0.1864, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5262046805628662e-05, |
|
"loss": 0.1327, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 1.5131366817982185e-05, |
|
"loss": 0.1625, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.5001202899460697e-05, |
|
"loss": 0.1634, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4871555841564887e-05, |
|
"loss": 0.2654, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4742426432652457e-05, |
|
"loss": 0.2414, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"learning_rate": 1.4613815457933467e-05, |
|
"loss": 0.2181, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4485723699465392e-05, |
|
"loss": 0.2221, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4358151936148533e-05, |
|
"loss": 0.1683, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4231100943721175e-05, |
|
"loss": 0.238, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.4104571494754925e-05, |
|
"loss": 0.2, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 2.5, |
|
"learning_rate": 1.3978564358649927e-05, |
|
"loss": 0.1384, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3853080301630338e-05, |
|
"loss": 0.1809, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.372812008673946e-05, |
|
"loss": 0.2029, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.360368447383532e-05, |
|
"loss": 0.2099, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 1.3479774219585917e-05, |
|
"loss": 0.1694, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3356390077464587e-05, |
|
"loss": 0.1675, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.3233532797745596e-05, |
|
"loss": 0.1922, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.311120312749935e-05, |
|
"loss": 0.175, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.2989401810588053e-05, |
|
"loss": 0.2115, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.286812958766106e-05, |
|
"loss": 0.2005, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.274738719615044e-05, |
|
"loss": 0.2016, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2627175370266397e-05, |
|
"loss": 0.2266, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2507494840992962e-05, |
|
"loss": 0.1236, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 1.2388346336083379e-05, |
|
"loss": 0.2229, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2269730580055805e-05, |
|
"loss": 0.2306, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2151648294188866e-05, |
|
"loss": 0.2135, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.2034100196517206e-05, |
|
"loss": 0.185, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 2.54, |
|
"learning_rate": 1.1917087001827254e-05, |
|
"loss": 0.2213, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.180060942165272e-05, |
|
"loss": 0.1765, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1684668164270418e-05, |
|
"loss": 0.2436, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1569263934695851e-05, |
|
"loss": 0.1658, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.1454397434679021e-05, |
|
"loss": 0.0351, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 1.134006936269999e-05, |
|
"loss": 0.2288, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1226280413964884e-05, |
|
"loss": 0.1378, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1113031280401409e-05, |
|
"loss": 0.2044, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.1000322650654837e-05, |
|
"loss": 0.1946, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 1.088815521008375e-05, |
|
"loss": 0.1979, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0776529640755795e-05, |
|
"loss": 0.2315, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0665446621443708e-05, |
|
"loss": 0.2385, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.0554906827620981e-05, |
|
"loss": 0.1666, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.044491093145793e-05, |
|
"loss": 0.1671, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0335459601817543e-05, |
|
"loss": 0.2662, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0226553504251401e-05, |
|
"loss": 0.1882, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.0118193300995582e-05, |
|
"loss": 0.1017, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 1.001037965096676e-05, |
|
"loss": 0.1749, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 9.903113209758096e-06, |
|
"loss": 0.2557, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.796394629635275e-06, |
|
"loss": 0.2487, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.690224559532612e-06, |
|
"loss": 0.1444, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.58460364504894e-06, |
|
"loss": 0.2408, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 2.59, |
|
"learning_rate": 9.47953252844388e-06, |
|
"loss": 0.1939, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.375011848633764e-06, |
|
"loss": 0.2524, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.2710422411879e-06, |
|
"loss": 0.1851, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.1676243383246e-06, |
|
"loss": 0.2296, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 9.064758768907377e-06, |
|
"loss": 0.2288, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.962446158441108e-06, |
|
"loss": 0.1709, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.860687129068245e-06, |
|
"loss": 0.2003, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.759482299565003e-06, |
|
"loss": 0.2107, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.65883228533766e-06, |
|
"loss": 0.2051, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.558737698418761e-06, |
|
"loss": 0.2017, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.45919914746337e-06, |
|
"loss": 0.2266, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.360217237745472e-06, |
|
"loss": 0.2327, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.261792571154148e-06, |
|
"loss": 0.2191, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 8.163925746190082e-06, |
|
"loss": 0.1925, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 8.06661735796177e-06, |
|
"loss": 0.253, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.969867998182024e-06, |
|
"loss": 0.1904, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.87367825516424e-06, |
|
"loss": 0.2178, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.778048713818975e-06, |
|
"loss": 0.222, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 7.682979955650249e-06, |
|
"loss": 0.1368, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.588472558752114e-06, |
|
"loss": 0.2443, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.494527097805104e-06, |
|
"loss": 0.1468, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.401144144072669e-06, |
|
"loss": 0.2342, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.308324265397836e-06, |
|
"loss": 0.1645, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.216068026199641e-06, |
|
"loss": 0.1807, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.124375987469767e-06, |
|
"loss": 0.1215, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 7.033248706769102e-06, |
|
"loss": 0.1607, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"learning_rate": 6.9426867382243684e-06, |
|
"loss": 0.2165, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.852690632524694e-06, |
|
"loss": 0.1695, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.763260936918325e-06, |
|
"loss": 0.1586, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.674398195209308e-06, |
|
"loss": 0.2074, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.586102947754147e-06, |
|
"loss": 0.1315, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 6.498375731458528e-06, |
|
"loss": 0.213, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.411217079774012e-06, |
|
"loss": 0.2707, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.324627522694903e-06, |
|
"loss": 0.2264, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.238607586754896e-06, |
|
"loss": 0.1265, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.67, |
|
"learning_rate": 6.153157795023956e-06, |
|
"loss": 0.1781, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 6.068278667105132e-06, |
|
"loss": 0.1526, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.9839707191313706e-06, |
|
"loss": 0.1672, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.900234463762366e-06, |
|
"loss": 0.1981, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.81707041018148e-06, |
|
"loss": 0.1957, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 5.734479064092624e-06, |
|
"loss": 0.2222, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.652460927717218e-06, |
|
"loss": 0.194, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.571016499791093e-06, |
|
"loss": 0.2159, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.490146275561437e-06, |
|
"loss": 0.2476, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 5.4098507467838955e-06, |
|
"loss": 0.1322, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.3301304017194135e-06, |
|
"loss": 0.2163, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.250985725131441e-06, |
|
"loss": 0.1821, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.172417198282864e-06, |
|
"loss": 0.1862, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 5.094425298933136e-06, |
|
"loss": 0.2165, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 5.01701050133534e-06, |
|
"loss": 0.1419, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.9401732762333065e-06, |
|
"loss": 0.1452, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.86391409085879e-06, |
|
"loss": 0.1948, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.788233408928589e-06, |
|
"loss": 0.2447, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 4.713131690641759e-06, |
|
"loss": 0.2473, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.6386093926767405e-06, |
|
"loss": 0.1738, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.564666968188713e-06, |
|
"loss": 0.1201, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.491304866806667e-06, |
|
"loss": 0.1836, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 2.72, |
|
"learning_rate": 4.4185235346308805e-06, |
|
"loss": 0.198, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.34632341422998e-06, |
|
"loss": 0.2354, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.27470494463843e-06, |
|
"loss": 0.205, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.203668561353757e-06, |
|
"loss": 0.2448, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 4.133214696333942e-06, |
|
"loss": 0.1934, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 4.063343777994821e-06, |
|
"loss": 0.267, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.99405623120741e-06, |
|
"loss": 0.2535, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.925352477295385e-06, |
|
"loss": 0.2343, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.857232934032473e-06, |
|
"loss": 0.1343, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 3.789698015639953e-06, |
|
"loss": 0.1522, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.722748132784093e-06, |
|
"loss": 0.1969, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.6563836925737284e-06, |
|
"loss": 0.1712, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.5906050985576645e-06, |
|
"loss": 0.1957, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 3.525412750722368e-06, |
|
"loss": 0.1356, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.4608070454893894e-06, |
|
"loss": 0.1817, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.3967883757130447e-06, |
|
"loss": 0.2089, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.3333571306780497e-06, |
|
"loss": 0.2297, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.270513696097055e-06, |
|
"loss": 0.1586, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 3.208258454108393e-06, |
|
"loss": 0.1958, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.146591783273667e-06, |
|
"loss": 0.1063, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.085514058575556e-06, |
|
"loss": 0.1937, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 3.025025651415414e-06, |
|
"loss": 0.1601, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"learning_rate": 2.9651269296111283e-06, |
|
"loss": 0.1627, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.905818257394799e-06, |
|
"loss": 0.185, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.8470999954105517e-06, |
|
"loss": 0.2361, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.788972500712372e-06, |
|
"loss": 0.1271, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 2.7314361267618436e-06, |
|
"loss": 0.2226, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.6744912234261677e-06, |
|
"loss": 0.1953, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.618138136975823e-06, |
|
"loss": 0.1943, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.562377210082656e-06, |
|
"loss": 0.2166, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.5072087818176382e-06, |
|
"loss": 0.1295, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 2.4526331876489338e-06, |
|
"loss": 0.2534, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3986507594397687e-06, |
|
"loss": 0.1885, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.3452618254464543e-06, |
|
"loss": 0.1965, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.2924667103163878e-06, |
|
"loss": 0.1939, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 2.240265735086067e-06, |
|
"loss": 0.2037, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.1886592171791454e-06, |
|
"loss": 0.1508, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.137647470404469e-06, |
|
"loss": 0.1761, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.0872308049542877e-06, |
|
"loss": 0.1381, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 2.81, |
|
"learning_rate": 2.03740952740219e-06, |
|
"loss": 0.1882, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9881839407013823e-06, |
|
"loss": 0.2602, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.9395543441827592e-06, |
|
"loss": 0.1877, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8915210335531453e-06, |
|
"loss": 0.2036, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.8440843008934561e-06, |
|
"loss": 0.1789, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 1.797244434656975e-06, |
|
"loss": 0.1377, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7510017196674777e-06, |
|
"loss": 0.1597, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.7053564371176777e-06, |
|
"loss": 0.1465, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6603088645673503e-06, |
|
"loss": 0.2292, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.83, |
|
"learning_rate": 1.6158592759417334e-06, |
|
"loss": 0.2373, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.572007941529896e-06, |
|
"loss": 0.1416, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.5287551279829504e-06, |
|
"loss": 0.056, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.48610109831262e-06, |
|
"loss": 0.2153, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.4440461118894743e-06, |
|
"loss": 0.2421, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.402590424441441e-06, |
|
"loss": 0.2739, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.361734288052252e-06, |
|
"loss": 0.1582, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.3214779511598664e-06, |
|
"loss": 0.1724, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2818216585549825e-06, |
|
"loss": 0.2197, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 2.85, |
|
"learning_rate": 1.2427656513795739e-06, |
|
"loss": 0.1686, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.2043101671253554e-06, |
|
"loss": 0.226, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.166455439632419e-06, |
|
"loss": 0.2176, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.1292016990877895e-06, |
|
"loss": 0.1974, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 2.86, |
|
"learning_rate": 1.092549172023949e-06, |
|
"loss": 0.1465, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0564980813175807e-06, |
|
"loss": 0.2335, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 1.0210486461881164e-06, |
|
"loss": 0.2136, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.862010821964696e-07, |
|
"loss": 0.2296, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.519556012436815e-07, |
|
"loss": 0.129, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 9.183124115696439e-07, |
|
"loss": 0.1272, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.852717177518455e-07, |
|
"loss": 0.1747, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.52833720704116e-07, |
|
"loss": 0.2419, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 8.209986176753948e-07, |
|
"loss": 0.1598, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.897666022485539e-07, |
|
"loss": 0.2341, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.591378643392211e-07, |
|
"loss": 0.1968, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.291125901946027e-07, |
|
"loss": 0.1352, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.996909623923631e-07, |
|
"loss": 0.1498, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.708731598395023e-07, |
|
"loss": 0.1641, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 6.426593577713136e-07, |
|
"loss": 0.1855, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 6.150497277502609e-07, |
|
"loss": 0.228, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.880444376649585e-07, |
|
"loss": 0.1935, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.616436517291602e-07, |
|
"loss": 0.2432, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 5.358475304807375e-07, |
|
"loss": 0.1484, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 5.106562307807261e-07, |
|
"loss": 0.1192, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.860699058123697e-07, |
|
"loss": 0.2084, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.62088705080177e-07, |
|
"loss": 0.2581, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 4.3871277440902246e-07, |
|
"loss": 0.2945, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 4.1594225594323575e-07, |
|
"loss": 0.0984, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.937772881457691e-07, |
|
"loss": 0.145, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.7221800579735346e-07, |
|
"loss": 0.1107, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.512645399956549e-07, |
|
"loss": 0.1353, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 3.309170181544863e-07, |
|
"loss": 0.1983, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 3.1117556400306336e-07, |
|
"loss": 0.1906, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.9204029758517216e-07, |
|
"loss": 0.1499, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.7351133525855833e-07, |
|
"loss": 0.1461, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 2.555887896941167e-07, |
|
"loss": 0.0998, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.382727698752474e-07, |
|
"loss": 0.1744, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.2156338109717844e-07, |
|
"loss": 0.145, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 2.054607249663665e-07, |
|
"loss": 0.2109, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 2.94, |
|
"learning_rate": 1.899648993998304e-07, |
|
"loss": 0.1631, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.7507599862456315e-07, |
|
"loss": 0.0877, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.6079411317698745e-07, |
|
"loss": 0.2158, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.4711932990238987e-07, |
|
"loss": 0.2376, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.340517319543877e-07, |
|
"loss": 0.1312, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.2159139879439618e-07, |
|
"loss": 0.2186, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 1.0973840619121766e-07, |
|
"loss": 0.2251, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 9.849282622053092e-08, |
|
"loss": 0.1674, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 8.785472726442479e-08, |
|
"loss": 0.2063, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.782417401106523e-08, |
|
"loss": 0.1794, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 6.840122745422894e-08, |
|
"loss": 0.1144, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.958594489295921e-08, |
|
"loss": 0.1742, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 5.137837993121064e-08, |
|
"loss": 0.0999, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 4.37785824775605e-08, |
|
"loss": 0.2051, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 3.678659874483126e-08, |
|
"loss": 0.176, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 3.0402471249890705e-08, |
|
"loss": 0.1654, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 2.4626238813318936e-08, |
|
"loss": 0.2154, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.9457936559241775e-08, |
|
"loss": 0.1299, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 1.4897595915053242e-08, |
|
"loss": 0.1524, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 1.0945244611293426e-08, |
|
"loss": 0.2642, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 7.600906681382025e-09, |
|
"loss": 0.1853, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 4.8646024615961465e-09, |
|
"loss": 0.2353, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.99, |
|
"learning_rate": 2.7363485908371565e-09, |
|
"loss": 0.0933, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 1.2161580105973792e-09, |
|
"loss": 0.1867, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 3.0403996484906773e-10, |
|
"loss": 0.2633, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 0.0, |
|
"loss": 0.1789, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1314, |
|
"total_flos": 4.5566885955528294e+17, |
|
"train_loss": 0.6405287658054342, |
|
"train_runtime": 9827.3567, |
|
"train_samples_per_second": 2.137, |
|
"train_steps_per_second": 0.134 |
|
} |
|
], |
|
"logging_steps": 1.0, |
|
"max_steps": 1314, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 50000, |
|
"total_flos": 4.5566885955528294e+17, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|