|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.26737967914438504, |
|
"eval_steps": 500, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0005347593582887701, |
|
"grad_norm": 10.25, |
|
"learning_rate": 0.0002, |
|
"loss": 5.5244, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0010695187165775401, |
|
"grad_norm": 5.65625, |
|
"learning_rate": 0.0002, |
|
"loss": 1.5152, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0016042780748663102, |
|
"grad_norm": 5.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.9678, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0021390374331550803, |
|
"grad_norm": 1.09375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2586, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.00267379679144385, |
|
"grad_norm": 2.21875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8375, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0032085561497326204, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8124, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0037433155080213902, |
|
"grad_norm": 1.3515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4274, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0042780748663101605, |
|
"grad_norm": 1.9765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6398, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.004812834224598931, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4485, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0053475935828877, |
|
"grad_norm": 0.48828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3639, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0058823529411764705, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3139, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.006417112299465241, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1684, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.006951871657754011, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4753, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0074866310160427805, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0907, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.008021390374331552, |
|
"grad_norm": 1.9375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3021, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.008556149732620321, |
|
"grad_norm": 1.421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.292, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.00909090909090909, |
|
"grad_norm": 0.4453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2465, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.009625668449197862, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2335, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.010160427807486631, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2976, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0106951871657754, |
|
"grad_norm": 4.84375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6077, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.011229946524064172, |
|
"grad_norm": 2.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3396, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.011764705882352941, |
|
"grad_norm": 1.4453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.218, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.01229946524064171, |
|
"grad_norm": 1.1640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0862, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.012834224598930482, |
|
"grad_norm": 2.65625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4492, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.013368983957219251, |
|
"grad_norm": 1.1796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1986, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.013903743315508022, |
|
"grad_norm": 1.03125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2902, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.014438502673796792, |
|
"grad_norm": 1.5703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.6038, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.014973262032085561, |
|
"grad_norm": 1.359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2869, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.015508021390374332, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2646, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.016042780748663103, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1653, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.016577540106951873, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2535, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.017112299465240642, |
|
"grad_norm": 1.1015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2485, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.01764705882352941, |
|
"grad_norm": 0.74609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2425, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.01818181818181818, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0913, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.01871657754010695, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1082, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.019251336898395723, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4729, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.019786096256684493, |
|
"grad_norm": 1.265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3255, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.020320855614973262, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4583, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.02085561497326203, |
|
"grad_norm": 0.203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1099, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0213903743315508, |
|
"grad_norm": 0.71875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1941, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.021925133689839574, |
|
"grad_norm": 0.64453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.278, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.022459893048128343, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4062, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.022994652406417113, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4801, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.023529411764705882, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2161, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.02406417112299465, |
|
"grad_norm": 1.109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2423, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.02459893048128342, |
|
"grad_norm": 0.69921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.208, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.025133689839572194, |
|
"grad_norm": 0.37890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2149, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.025668449197860963, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2528, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.026203208556149733, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2094, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.026737967914438502, |
|
"grad_norm": 0.228515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1544, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.02727272727272727, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2201, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.027807486631016044, |
|
"grad_norm": 2.859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3119, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.028342245989304814, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2003, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.028877005347593583, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2708, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.029411764705882353, |
|
"grad_norm": 1.515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2502, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.029946524064171122, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2379, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.03048128342245989, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.031016042780748664, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0705, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.03155080213903743, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2252, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.03208556149732621, |
|
"grad_norm": 1.8359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2479, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.032620320855614976, |
|
"grad_norm": 1.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3067, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.033155080213903745, |
|
"grad_norm": 1.390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2026, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.033689839572192515, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1771, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.034224598930481284, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1886, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.034759358288770054, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1603, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.03529411764705882, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1577, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.03582887700534759, |
|
"grad_norm": 0.48046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2385, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.03636363636363636, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2457, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.03689839572192513, |
|
"grad_norm": 1.203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2403, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0374331550802139, |
|
"grad_norm": 1.34375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2959, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.03796791443850268, |
|
"grad_norm": 0.47265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2408, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.038502673796791446, |
|
"grad_norm": 2.0625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5305, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.039037433155080216, |
|
"grad_norm": 0.87109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1871, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.039572192513368985, |
|
"grad_norm": 0.98046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2837, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.040106951871657755, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.133, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.040641711229946524, |
|
"grad_norm": 0.59375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2151, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.041176470588235294, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1379, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.04171122994652406, |
|
"grad_norm": 0.62109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1798, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.04224598930481283, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1735, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.0427807486631016, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2279, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.04331550802139037, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2083, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.04385026737967915, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2038, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.04438502673796792, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2185, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.044919786096256686, |
|
"grad_norm": 0.1044921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1184, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.045454545454545456, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1767, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.045989304812834225, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1461, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.046524064171122995, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1723, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 0.314453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1796, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.04759358288770053, |
|
"grad_norm": 0.98828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1747, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.0481283422459893, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1606, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.04866310160427807, |
|
"grad_norm": 0.1640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1675, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.04919786096256684, |
|
"grad_norm": 2.203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3444, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.04973262032085562, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2217, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.05026737967914439, |
|
"grad_norm": 0.27734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2684, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.05080213903743316, |
|
"grad_norm": 1.4921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2776, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.051336898395721926, |
|
"grad_norm": 0.10107421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1626, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.051871657754010696, |
|
"grad_norm": 0.6953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1733, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.052406417112299465, |
|
"grad_norm": 0.46875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2631, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.052941176470588235, |
|
"grad_norm": 5.78125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3128, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.053475935828877004, |
|
"grad_norm": 1.7421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3108, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.05401069518716577, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4231, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.05454545454545454, |
|
"grad_norm": 0.171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1636, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.05508021390374331, |
|
"grad_norm": 0.8203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4548, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.05561497326203209, |
|
"grad_norm": 0.208984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1718, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.05614973262032086, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1438, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.05668449197860963, |
|
"grad_norm": 0.55078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2534, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.0572192513368984, |
|
"grad_norm": 0.412109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2303, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.057754010695187166, |
|
"grad_norm": 0.322265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2488, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.058288770053475936, |
|
"grad_norm": 0.85546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2962, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.058823529411764705, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2266, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.059358288770053474, |
|
"grad_norm": 0.236328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2208, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.059893048128342244, |
|
"grad_norm": 0.56640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2489, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.06042780748663101, |
|
"grad_norm": 0.2490234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3115, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.06096256684491978, |
|
"grad_norm": 0.498046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2222, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.06149732620320856, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.389, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.06203208556149733, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2148, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.06256684491978609, |
|
"grad_norm": 0.3515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2527, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.06310160427807486, |
|
"grad_norm": 0.341796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2395, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.06363636363636363, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3902, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.06417112299465241, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2973, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.06470588235294118, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2476, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.06524064171122995, |
|
"grad_norm": 0.462890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.186, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.06577540106951872, |
|
"grad_norm": 0.244140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.165, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.06631016042780749, |
|
"grad_norm": 1.546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5423, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.06684491978609626, |
|
"grad_norm": 0.95703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4157, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.06737967914438503, |
|
"grad_norm": 3.40625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5118, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.0679144385026738, |
|
"grad_norm": 0.5546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1738, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.06844919786096257, |
|
"grad_norm": 0.1396484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0812, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.06898395721925134, |
|
"grad_norm": 0.2392578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1073, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.06951871657754011, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1118, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.07005347593582888, |
|
"grad_norm": 1.015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2042, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.07058823529411765, |
|
"grad_norm": 0.400390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1226, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.07112299465240642, |
|
"grad_norm": 0.234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1684, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.07165775401069518, |
|
"grad_norm": 0.1630859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1795, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.07219251336898395, |
|
"grad_norm": 0.60546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2594, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 0.138671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0331, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.0732620320855615, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1815, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.07379679144385026, |
|
"grad_norm": 0.9375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.458, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.07433155080213903, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.266, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.0748663101604278, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2798, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.07540106951871657, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2428, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.07593582887700535, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1931, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.07647058823529412, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2107, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.07700534759358289, |
|
"grad_norm": 0.2490234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1649, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.07754010695187166, |
|
"grad_norm": 0.1396484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1408, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.07807486631016043, |
|
"grad_norm": 1.5234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3663, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0786096256684492, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2539, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.07914438502673797, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2954, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.07967914438502674, |
|
"grad_norm": 0.404296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3071, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.08021390374331551, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1121, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.08074866310160428, |
|
"grad_norm": 0.251953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1059, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.08128342245989305, |
|
"grad_norm": 0.490234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2265, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.08181818181818182, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1875, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.08235294117647059, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2944, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.08288770053475936, |
|
"grad_norm": 0.6796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2663, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.08342245989304813, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1909, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.0839572192513369, |
|
"grad_norm": 0.24609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1816, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.08449197860962566, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0903, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.08502673796791443, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1757, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.0855614973262032, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1636, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.08609625668449197, |
|
"grad_norm": 0.2451171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1363, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.08663101604278074, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1262, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.08716577540106951, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2162, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.0877005347593583, |
|
"grad_norm": 0.1123046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0586, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.08823529411764706, |
|
"grad_norm": 0.0771484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0908, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.08877005347593583, |
|
"grad_norm": 0.486328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.109, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.0893048128342246, |
|
"grad_norm": 0.2333984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1283, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.08983957219251337, |
|
"grad_norm": 0.0023651123046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0003, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.09037433155080214, |
|
"grad_norm": 0.87890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0709, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.09090909090909091, |
|
"grad_norm": 0.1845703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1616, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.09144385026737968, |
|
"grad_norm": 0.83203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1496, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.09197860962566845, |
|
"grad_norm": 1.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3648, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.09251336898395722, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4267, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.09304812834224599, |
|
"grad_norm": 1.71875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1812, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.09358288770053476, |
|
"grad_norm": 3.34375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3108, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.09411764705882353, |
|
"grad_norm": 1.21875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2502, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.0946524064171123, |
|
"grad_norm": 0.8671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3469, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.09518716577540107, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1502, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.09572192513368984, |
|
"grad_norm": 0.859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3546, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.0962566844919786, |
|
"grad_norm": 0.42578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2236, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.09679144385026738, |
|
"grad_norm": 0.2109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0742, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.09732620320855614, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2046, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.09786096256684491, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1355, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.09839572192513368, |
|
"grad_norm": 0.40234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2228, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.09893048128342247, |
|
"grad_norm": 0.8046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2756, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.09946524064171124, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.228, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.201171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.122, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.10053475935828877, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1864, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.10106951871657754, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1586, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.10160427807486631, |
|
"grad_norm": 0.2734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1432, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.10213903743315508, |
|
"grad_norm": 0.2265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1971, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.10267379679144385, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2811, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.10320855614973262, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1839, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.10374331550802139, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2092, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.10427807486631016, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2743, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.10481283422459893, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2752, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.1053475935828877, |
|
"grad_norm": 0.8359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1735, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.10588235294117647, |
|
"grad_norm": 0.369140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.137, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.10641711229946524, |
|
"grad_norm": 0.53515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2498, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.10695187165775401, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1308, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.10748663101604278, |
|
"grad_norm": 0.88671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2849, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.10802139037433155, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1342, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.10855614973262032, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.105, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.10909090909090909, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1172, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.10962566844919786, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0579, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.11016042780748662, |
|
"grad_norm": 0.2060546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1235, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.11069518716577541, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.5436, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.11122994652406418, |
|
"grad_norm": 0.21875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2197, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.11176470588235295, |
|
"grad_norm": 0.75, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2319, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.11229946524064172, |
|
"grad_norm": 0.43359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1091, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.11283422459893049, |
|
"grad_norm": 0.154296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1932, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.11336898395721925, |
|
"grad_norm": 0.10009765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1829, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.11390374331550802, |
|
"grad_norm": 0.0986328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1738, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.1144385026737968, |
|
"grad_norm": 0.53125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1746, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.11497326203208556, |
|
"grad_norm": 0.484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1831, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.11550802139037433, |
|
"grad_norm": 1.609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4379, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.1160427807486631, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1707, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.11657754010695187, |
|
"grad_norm": 1.5625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1901, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.11711229946524064, |
|
"grad_norm": 0.1904296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2282, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.11764705882352941, |
|
"grad_norm": 0.1982421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1478, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.11818181818181818, |
|
"grad_norm": 0.408203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2262, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.11871657754010695, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1373, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.11925133689839572, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.123, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.11978609625668449, |
|
"grad_norm": 0.08837890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1712, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.12032085561497326, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1837, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.12085561497326203, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3184, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.1213903743315508, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1646, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.12192513368983957, |
|
"grad_norm": 0.765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1659, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.12245989304812835, |
|
"grad_norm": 0.126953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.151, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.12299465240641712, |
|
"grad_norm": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1841, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.12352941176470589, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0834, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.12406417112299466, |
|
"grad_norm": 0.1728515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1417, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.12459893048128343, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1227, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.12513368983957218, |
|
"grad_norm": 0.8125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1835, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.12566844919786097, |
|
"grad_norm": 1.2109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3382, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.12620320855614972, |
|
"grad_norm": 1.4296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4302, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.1267379679144385, |
|
"grad_norm": 0.546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1895, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.12727272727272726, |
|
"grad_norm": 2.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3654, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.12780748663101604, |
|
"grad_norm": 1.0859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4818, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.12834224598930483, |
|
"grad_norm": 0.65625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3575, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.12887700534759358, |
|
"grad_norm": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0789, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.12941176470588237, |
|
"grad_norm": 0.4765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2638, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.12994652406417112, |
|
"grad_norm": 0.267578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1225, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.1304812834224599, |
|
"grad_norm": 0.6015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0733, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.13101604278074866, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0766, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.13155080213903744, |
|
"grad_norm": 0.7734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3096, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.1320855614973262, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2276, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.13262032085561498, |
|
"grad_norm": 0.345703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2357, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.13315508021390374, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1144, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.13368983957219252, |
|
"grad_norm": 0.2373046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1118, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.13422459893048128, |
|
"grad_norm": 0.306640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1682, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.13475935828877006, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0453, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.13529411764705881, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1895, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.1358288770053476, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4819, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.13636363636363635, |
|
"grad_norm": 0.185546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1763, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.13689839572192514, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.268, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.1374331550802139, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1276, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.13796791443850268, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1819, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.13850267379679143, |
|
"grad_norm": 0.076171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0893, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.13903743315508021, |
|
"grad_norm": 0.65234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2154, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.139572192513369, |
|
"grad_norm": 0.443359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3956, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.14010695187165775, |
|
"grad_norm": 0.150390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1585, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.14064171122994654, |
|
"grad_norm": 0.125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0876, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.1411764705882353, |
|
"grad_norm": 0.80078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0673, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.14171122994652408, |
|
"grad_norm": 4.0625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.184, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.14224598930481283, |
|
"grad_norm": 0.0830078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1651, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.14278074866310161, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0828, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.14331550802139037, |
|
"grad_norm": 0.5625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1545, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.14385026737967915, |
|
"grad_norm": 0.30859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1641, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.1443850267379679, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3245, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.1449197860962567, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1592, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1406, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.14598930481283423, |
|
"grad_norm": 0.40625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1832, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.146524064171123, |
|
"grad_norm": 0.099609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.091, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.14705882352941177, |
|
"grad_norm": 0.416015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0943, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.14759358288770053, |
|
"grad_norm": 2.359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3355, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.1481283422459893, |
|
"grad_norm": 0.2138671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1074, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.14866310160427806, |
|
"grad_norm": 0.3828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.157, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.14919786096256685, |
|
"grad_norm": 0.357421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.149, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.1497326203208556, |
|
"grad_norm": 0.478515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2129, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.15026737967914439, |
|
"grad_norm": 0.73828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3051, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.15080213903743314, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.147, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.15133689839572192, |
|
"grad_norm": 0.4375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1934, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.1518716577540107, |
|
"grad_norm": 0.2392578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0722, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.15240641711229946, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.062, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.15294117647058825, |
|
"grad_norm": 0.71484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2352, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.153475935828877, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2244, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.15401069518716579, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1342, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.15454545454545454, |
|
"grad_norm": 0.1962890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.206, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.15508021390374332, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3573, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.15561497326203208, |
|
"grad_norm": 0.294921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1673, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.15614973262032086, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0897, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.15668449197860962, |
|
"grad_norm": 0.1572265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0698, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.1572192513368984, |
|
"grad_norm": 0.0849609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0981, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.15775401069518716, |
|
"grad_norm": 0.1162109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0483, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.15828877005347594, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1486, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.1588235294117647, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0981, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.15935828877005348, |
|
"grad_norm": 0.94140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.282, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.15989304812834224, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1445, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.16042780748663102, |
|
"grad_norm": 0.25, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1235, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.16096256684491977, |
|
"grad_norm": 0.10693359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0835, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.16149732620320856, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0934, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.1620320855614973, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3041, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.1625668449197861, |
|
"grad_norm": 0.06640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1435, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.16310160427807488, |
|
"grad_norm": 0.66796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1957, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.16363636363636364, |
|
"grad_norm": 0.84765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3342, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.16417112299465242, |
|
"grad_norm": 0.7265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3299, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.16470588235294117, |
|
"grad_norm": 0.515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1717, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.16524064171122996, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2635, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.1657754010695187, |
|
"grad_norm": 0.1318359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0933, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.1663101604278075, |
|
"grad_norm": 0.08642578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0547, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.16684491978609625, |
|
"grad_norm": 0.46484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2123, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.16737967914438504, |
|
"grad_norm": 0.095703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1559, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.1679144385026738, |
|
"grad_norm": 1.0703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.243, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.16844919786096257, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2638, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.16898395721925133, |
|
"grad_norm": 0.1435546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1185, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.1695187165775401, |
|
"grad_norm": 0.5703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2719, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.17005347593582887, |
|
"grad_norm": 1.375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4247, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.17058823529411765, |
|
"grad_norm": 0.34765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3772, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.1711229946524064, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.212, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.1716577540106952, |
|
"grad_norm": 0.419921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1685, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.17219251336898395, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2688, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.17272727272727273, |
|
"grad_norm": 0.11376953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2073, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.17326203208556148, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2158, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.17379679144385027, |
|
"grad_norm": 1.2734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3103, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.17433155080213902, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2458, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.1748663101604278, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2203, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.1754010695187166, |
|
"grad_norm": 0.392578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2319, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.17593582887700535, |
|
"grad_norm": 0.625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2094, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.17647058823529413, |
|
"grad_norm": 0.396484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2354, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.17700534759358288, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3425, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.17754010695187167, |
|
"grad_norm": 0.37109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1983, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.17807486631016042, |
|
"grad_norm": 0.08984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1281, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.1786096256684492, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1543, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.17914438502673796, |
|
"grad_norm": 0.62890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2649, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.17967914438502675, |
|
"grad_norm": 0.11572265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1132, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.1802139037433155, |
|
"grad_norm": 1.0, |
|
"learning_rate": 0.0002, |
|
"loss": 0.428, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.18074866310160428, |
|
"grad_norm": 0.283203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2577, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.18128342245989304, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1888, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.18181818181818182, |
|
"grad_norm": 0.5859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2504, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.18235294117647058, |
|
"grad_norm": 0.90625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1937, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.18288770053475936, |
|
"grad_norm": 0.3671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1778, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.18342245989304812, |
|
"grad_norm": 0.06298828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0472, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.1839572192513369, |
|
"grad_norm": 0.455078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1782, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.18449197860962566, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2222, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.18502673796791444, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1891, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.1855614973262032, |
|
"grad_norm": 0.51953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3089, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.18609625668449198, |
|
"grad_norm": 2.734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4823, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.18663101604278076, |
|
"grad_norm": 1.2890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1879, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.18716577540106952, |
|
"grad_norm": 0.10546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1862, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1877005347593583, |
|
"grad_norm": 1.2578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2416, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.18823529411764706, |
|
"grad_norm": 0.16796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2126, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.18877005347593584, |
|
"grad_norm": 1.1484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1726, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.1893048128342246, |
|
"grad_norm": 0.58203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2085, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.18983957219251338, |
|
"grad_norm": 0.263671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1478, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.19037433155080213, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1796, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.19090909090909092, |
|
"grad_norm": 0.72265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3885, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.19144385026737967, |
|
"grad_norm": 0.2275390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1843, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.19197860962566846, |
|
"grad_norm": 0.333984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1804, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.1925133689839572, |
|
"grad_norm": 0.55859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.192, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.193048128342246, |
|
"grad_norm": 0.1220703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0813, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.19358288770053475, |
|
"grad_norm": 0.1171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0699, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.19411764705882353, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2945, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.1946524064171123, |
|
"grad_norm": 0.107421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1997, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.19518716577540107, |
|
"grad_norm": 0.28125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2366, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.19572192513368983, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2421, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.1962566844919786, |
|
"grad_norm": 0.2421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1364, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.19679144385026737, |
|
"grad_norm": 0.45703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1706, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.19732620320855615, |
|
"grad_norm": 0.1259765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1628, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.19786096256684493, |
|
"grad_norm": 0.0927734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1654, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.1983957219251337, |
|
"grad_norm": 0.29296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0769, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.19893048128342247, |
|
"grad_norm": 0.255859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2066, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.19946524064171123, |
|
"grad_norm": 0.376953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.12, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2080078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.164, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.20053475935828877, |
|
"grad_norm": 0.375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.251, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.20106951871657755, |
|
"grad_norm": 0.6640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1263, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.2016042780748663, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2442, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.2021390374331551, |
|
"grad_norm": 0.470703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0631, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.20267379679144384, |
|
"grad_norm": 0.326171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2238, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.20320855614973263, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.319, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.20374331550802138, |
|
"grad_norm": 0.298828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1655, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.20427807486631017, |
|
"grad_norm": 0.205078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1586, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.20481283422459892, |
|
"grad_norm": 0.318359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2062, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.2053475935828877, |
|
"grad_norm": 0.365234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2637, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.20588235294117646, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.203, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.20641711229946524, |
|
"grad_norm": 0.4140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3365, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.206951871657754, |
|
"grad_norm": 0.2578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2558, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.20748663101604278, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1074, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.20802139037433154, |
|
"grad_norm": 0.2470703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1507, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.20855614973262032, |
|
"grad_norm": 0.1474609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2006, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.20909090909090908, |
|
"grad_norm": 0.224609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2195, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.20962566844919786, |
|
"grad_norm": 0.193359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1366, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.21016042780748664, |
|
"grad_norm": 0.3359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2126, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.2106951871657754, |
|
"grad_norm": 0.20703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0699, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.21122994652406418, |
|
"grad_norm": 0.248046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1594, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.21176470588235294, |
|
"grad_norm": 0.265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1362, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.21229946524064172, |
|
"grad_norm": 0.1328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1458, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.21283422459893048, |
|
"grad_norm": 0.67578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2377, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.21336898395721926, |
|
"grad_norm": 1.1171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.4138, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.21390374331550802, |
|
"grad_norm": 0.2197265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1331, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.2144385026737968, |
|
"grad_norm": 0.703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3063, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.21497326203208555, |
|
"grad_norm": 0.49609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.15, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.21550802139037434, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1727, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.2160427807486631, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1833, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.21657754010695188, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0712, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.21711229946524063, |
|
"grad_norm": 0.134765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1766, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.21764705882352942, |
|
"grad_norm": 0.3203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2309, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 0.1298828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1819, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.21871657754010695, |
|
"grad_norm": 0.146484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1536, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.2192513368983957, |
|
"grad_norm": 0.177734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1115, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.2197860962566845, |
|
"grad_norm": 0.181640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1675, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.22032085561497325, |
|
"grad_norm": 1.171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1975, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.22085561497326203, |
|
"grad_norm": 0.609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2091, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.22139037433155082, |
|
"grad_norm": 0.431640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2046, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.22192513368983957, |
|
"grad_norm": 0.11474609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1062, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.22245989304812835, |
|
"grad_norm": 0.328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.252, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.2229946524064171, |
|
"grad_norm": 0.4296875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2299, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.2235294117647059, |
|
"grad_norm": 0.453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1805, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.22406417112299465, |
|
"grad_norm": 0.2314453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2925, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.22459893048128343, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2782, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.2251336898395722, |
|
"grad_norm": 0.162109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1329, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.22566844919786097, |
|
"grad_norm": 0.421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2275, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.22620320855614973, |
|
"grad_norm": 0.2158203125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1434, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.2267379679144385, |
|
"grad_norm": 0.2119140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1863, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.22727272727272727, |
|
"grad_norm": 0.69140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2988, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.22780748663101605, |
|
"grad_norm": 0.5, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2001, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.2283422459893048, |
|
"grad_norm": 0.251953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2114, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.2288770053475936, |
|
"grad_norm": 0.1796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1191, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.22941176470588234, |
|
"grad_norm": 0.52734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2393, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.22994652406417113, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1093, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.23048128342245988, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2828, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.23101604278074866, |
|
"grad_norm": 0.26953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2073, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.23155080213903742, |
|
"grad_norm": 0.578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3156, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.2320855614973262, |
|
"grad_norm": 0.2294921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1862, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.232620320855615, |
|
"grad_norm": 0.10791015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1061, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.23315508021390374, |
|
"grad_norm": 0.466796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2721, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.23368983957219253, |
|
"grad_norm": 0.12060546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0834, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.23422459893048128, |
|
"grad_norm": 0.380859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1377, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.23475935828877006, |
|
"grad_norm": 0.671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.21, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.2099609375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1004, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.2358288770053476, |
|
"grad_norm": 0.1611328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1424, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.23636363636363636, |
|
"grad_norm": 0.1826171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.12, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.23689839572192514, |
|
"grad_norm": 0.28515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.155, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.2374331550802139, |
|
"grad_norm": 0.2255859375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1647, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.23796791443850268, |
|
"grad_norm": 0.2890625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1718, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.23850267379679144, |
|
"grad_norm": 0.103515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0817, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.23903743315508022, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1277, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.23957219251336898, |
|
"grad_norm": 0.353515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1198, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.24010695187165776, |
|
"grad_norm": 0.06787109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0802, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.24064171122994651, |
|
"grad_norm": 0.2490234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0657, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.2411764705882353, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1076, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.24171122994652405, |
|
"grad_norm": 0.1181640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0829, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.24224598930481284, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1498, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.2427807486631016, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0773, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.24331550802139038, |
|
"grad_norm": 0.1357421875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0283, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.24385026737967913, |
|
"grad_norm": 0.73046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1534, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.24438502673796791, |
|
"grad_norm": 0.25390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2006, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.2449197860962567, |
|
"grad_norm": 0.09814453125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0201, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.24545454545454545, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3034, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.24598930481283424, |
|
"grad_norm": 0.63671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3538, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.246524064171123, |
|
"grad_norm": 0.2001953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.138, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.24705882352941178, |
|
"grad_norm": 1.3671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2199, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.24759358288770053, |
|
"grad_norm": 0.58984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3079, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.24812834224598931, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.36, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.24866310160427807, |
|
"grad_norm": 0.91015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3007, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.24919786096256685, |
|
"grad_norm": 1.0234375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2952, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.2497326203208556, |
|
"grad_norm": 0.2197265625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0584, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.25026737967914436, |
|
"grad_norm": 0.1923828125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0912, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.25080213903743315, |
|
"grad_norm": 0.21875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1412, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.25133689839572193, |
|
"grad_norm": 0.59765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3287, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.2518716577540107, |
|
"grad_norm": 0.1708984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2102, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.25240641711229944, |
|
"grad_norm": 0.259765625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2511, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.2529411764705882, |
|
"grad_norm": 0.388671875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2573, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.253475935828877, |
|
"grad_norm": 0.1025390625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1181, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.2540106951871658, |
|
"grad_norm": 0.291015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1183, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.2545454545454545, |
|
"grad_norm": 0.330078125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1341, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.2550802139037433, |
|
"grad_norm": 0.31640625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.16, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.2556149732620321, |
|
"grad_norm": 0.2392578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2133, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.25614973262032087, |
|
"grad_norm": 0.373046875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2048, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.25668449197860965, |
|
"grad_norm": 0.2353515625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2218, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.2572192513368984, |
|
"grad_norm": 0.36328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1959, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.25775401069518716, |
|
"grad_norm": 0.1318359375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1816, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.25828877005347595, |
|
"grad_norm": 0.458984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1479, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.25882352941176473, |
|
"grad_norm": 0.33984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.189, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.25935828877005346, |
|
"grad_norm": 1.1953125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2001, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.25989304812834224, |
|
"grad_norm": 0.06591796875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0943, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.260427807486631, |
|
"grad_norm": 0.287109375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1934, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.2609625668449198, |
|
"grad_norm": 0.51171875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2581, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.26149732620320854, |
|
"grad_norm": 0.15625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.0733, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.2620320855614973, |
|
"grad_norm": 0.494140625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2736, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.2625668449197861, |
|
"grad_norm": 0.271484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1819, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.2631016042780749, |
|
"grad_norm": 0.302734375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.2011, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.2636363636363636, |
|
"grad_norm": 0.3984375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1678, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.2641711229946524, |
|
"grad_norm": 0.94921875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1992, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.2647058823529412, |
|
"grad_norm": 1.0546875, |
|
"learning_rate": 0.0002, |
|
"loss": 0.3075, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.26524064171122996, |
|
"grad_norm": 0.21484375, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1961, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.2657754010695187, |
|
"grad_norm": 0.236328125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1905, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.2663101604278075, |
|
"grad_norm": 0.1015625, |
|
"learning_rate": 0.0002, |
|
"loss": 0.1238, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.26684491978609626, |
|
"grad_norm": 0.1767578125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.122, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.26737967914438504, |
|
"grad_norm": 0.70703125, |
|
"learning_rate": 0.0002, |
|
"loss": 0.192, |
|
"step": 500 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 100, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.729911211451351e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|