|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.9994397759103641, |
|
"eval_steps": 500, |
|
"global_step": 446, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.002240896358543417, |
|
"grad_norm": 0.22982779145240784, |
|
"learning_rate": 1.1235955056179775e-06, |
|
"loss": 1.1973, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.004481792717086834, |
|
"grad_norm": 0.2098255604505539, |
|
"learning_rate": 2.247191011235955e-06, |
|
"loss": 1.2685, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0067226890756302525, |
|
"grad_norm": 0.2404344230890274, |
|
"learning_rate": 3.3707865168539327e-06, |
|
"loss": 1.2292, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.008963585434173669, |
|
"grad_norm": 0.23824891448020935, |
|
"learning_rate": 4.49438202247191e-06, |
|
"loss": 1.1901, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.011204481792717087, |
|
"grad_norm": 0.22885526716709137, |
|
"learning_rate": 5.617977528089888e-06, |
|
"loss": 1.2695, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.013445378151260505, |
|
"grad_norm": 0.2576353847980499, |
|
"learning_rate": 6.741573033707865e-06, |
|
"loss": 1.2872, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"grad_norm": 0.2190636247396469, |
|
"learning_rate": 7.865168539325843e-06, |
|
"loss": 1.1952, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.017927170868347338, |
|
"grad_norm": 0.21357473731040955, |
|
"learning_rate": 8.98876404494382e-06, |
|
"loss": 1.2597, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.020168067226890758, |
|
"grad_norm": 0.24502670764923096, |
|
"learning_rate": 1.0112359550561798e-05, |
|
"loss": 1.279, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.022408963585434174, |
|
"grad_norm": 0.25532686710357666, |
|
"learning_rate": 1.1235955056179776e-05, |
|
"loss": 1.2163, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.02464985994397759, |
|
"grad_norm": 0.250683456659317, |
|
"learning_rate": 1.2359550561797752e-05, |
|
"loss": 1.2735, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.02689075630252101, |
|
"grad_norm": 0.2523595988750458, |
|
"learning_rate": 1.348314606741573e-05, |
|
"loss": 1.3471, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.029131652661064426, |
|
"grad_norm": 0.23875640332698822, |
|
"learning_rate": 1.4606741573033709e-05, |
|
"loss": 1.2595, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.03137254901960784, |
|
"grad_norm": 0.24998927116394043, |
|
"learning_rate": 1.5730337078651687e-05, |
|
"loss": 1.2142, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.03361344537815126, |
|
"grad_norm": 0.2783028483390808, |
|
"learning_rate": 1.6853932584269665e-05, |
|
"loss": 1.185, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.035854341736694675, |
|
"grad_norm": 0.2796604633331299, |
|
"learning_rate": 1.797752808988764e-05, |
|
"loss": 1.226, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0380952380952381, |
|
"grad_norm": 0.2899184823036194, |
|
"learning_rate": 1.9101123595505618e-05, |
|
"loss": 1.1672, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.040336134453781515, |
|
"grad_norm": 0.2601150572299957, |
|
"learning_rate": 2.0224719101123596e-05, |
|
"loss": 1.1127, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.04257703081232493, |
|
"grad_norm": 0.3052348792552948, |
|
"learning_rate": 2.1348314606741574e-05, |
|
"loss": 1.2865, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.04481792717086835, |
|
"grad_norm": 0.24783451855182648, |
|
"learning_rate": 2.2471910112359552e-05, |
|
"loss": 1.0888, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 0.24703693389892578, |
|
"learning_rate": 2.359550561797753e-05, |
|
"loss": 1.059, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.04929971988795518, |
|
"grad_norm": 0.2503679096698761, |
|
"learning_rate": 2.4719101123595505e-05, |
|
"loss": 1.1602, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.0515406162464986, |
|
"grad_norm": 0.2639842927455902, |
|
"learning_rate": 2.5842696629213486e-05, |
|
"loss": 1.0911, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.05378151260504202, |
|
"grad_norm": 0.2942507266998291, |
|
"learning_rate": 2.696629213483146e-05, |
|
"loss": 1.0777, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.056022408963585436, |
|
"grad_norm": 0.28088828921318054, |
|
"learning_rate": 2.8089887640449443e-05, |
|
"loss": 1.1111, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.05826330532212885, |
|
"grad_norm": 0.26791295409202576, |
|
"learning_rate": 2.9213483146067417e-05, |
|
"loss": 1.0016, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.06050420168067227, |
|
"grad_norm": 0.2685791850090027, |
|
"learning_rate": 3.0337078651685396e-05, |
|
"loss": 1.1085, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.06274509803921569, |
|
"grad_norm": 0.2627420127391815, |
|
"learning_rate": 3.1460674157303374e-05, |
|
"loss": 1.0704, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.06498599439775911, |
|
"grad_norm": 0.3000424802303314, |
|
"learning_rate": 3.258426966292135e-05, |
|
"loss": 1.0786, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.06722689075630252, |
|
"grad_norm": 0.3018706738948822, |
|
"learning_rate": 3.370786516853933e-05, |
|
"loss": 1.0423, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06946778711484594, |
|
"grad_norm": 0.27565667033195496, |
|
"learning_rate": 3.483146067415731e-05, |
|
"loss": 0.9834, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.07170868347338935, |
|
"grad_norm": 0.30244842171669006, |
|
"learning_rate": 3.595505617977528e-05, |
|
"loss": 1.0927, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.07394957983193277, |
|
"grad_norm": 0.3654678165912628, |
|
"learning_rate": 3.7078651685393264e-05, |
|
"loss": 0.9812, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0761904761904762, |
|
"grad_norm": 0.36483272910118103, |
|
"learning_rate": 3.8202247191011236e-05, |
|
"loss": 0.9693, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.2949022054672241, |
|
"learning_rate": 3.9325842696629214e-05, |
|
"loss": 0.9647, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.08067226890756303, |
|
"grad_norm": 0.36239683628082275, |
|
"learning_rate": 4.044943820224719e-05, |
|
"loss": 0.9725, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.08291316526610644, |
|
"grad_norm": 0.32511067390441895, |
|
"learning_rate": 4.157303370786517e-05, |
|
"loss": 1.0136, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.08515406162464986, |
|
"grad_norm": 0.32111090421676636, |
|
"learning_rate": 4.269662921348315e-05, |
|
"loss": 0.9207, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.08739495798319327, |
|
"grad_norm": 0.3080519735813141, |
|
"learning_rate": 4.3820224719101126e-05, |
|
"loss": 0.9729, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0896358543417367, |
|
"grad_norm": 0.2933235764503479, |
|
"learning_rate": 4.4943820224719104e-05, |
|
"loss": 0.9403, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09187675070028012, |
|
"grad_norm": 0.3132348358631134, |
|
"learning_rate": 4.606741573033708e-05, |
|
"loss": 0.9587, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.09411764705882353, |
|
"grad_norm": 0.3013119101524353, |
|
"learning_rate": 4.719101123595506e-05, |
|
"loss": 0.9162, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.09635854341736695, |
|
"grad_norm": 0.300485223531723, |
|
"learning_rate": 4.831460674157304e-05, |
|
"loss": 0.8984, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.09859943977591036, |
|
"grad_norm": 0.2726304233074188, |
|
"learning_rate": 4.943820224719101e-05, |
|
"loss": 0.987, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.10084033613445378, |
|
"grad_norm": 0.2733825445175171, |
|
"learning_rate": 5.0561797752808995e-05, |
|
"loss": 0.9438, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.1030812324929972, |
|
"grad_norm": 0.28922319412231445, |
|
"learning_rate": 5.168539325842697e-05, |
|
"loss": 0.8674, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.10532212885154062, |
|
"grad_norm": 0.2743085026741028, |
|
"learning_rate": 5.2808988764044944e-05, |
|
"loss": 0.9041, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.10756302521008404, |
|
"grad_norm": 0.28649550676345825, |
|
"learning_rate": 5.393258426966292e-05, |
|
"loss": 0.9057, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.10980392156862745, |
|
"grad_norm": 0.2877427339553833, |
|
"learning_rate": 5.50561797752809e-05, |
|
"loss": 0.9139, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.11204481792717087, |
|
"grad_norm": 0.27738648653030396, |
|
"learning_rate": 5.6179775280898885e-05, |
|
"loss": 0.8518, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11428571428571428, |
|
"grad_norm": 0.2839404046535492, |
|
"learning_rate": 5.730337078651685e-05, |
|
"loss": 0.8634, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.1165266106442577, |
|
"grad_norm": 0.2748688757419586, |
|
"learning_rate": 5.8426966292134835e-05, |
|
"loss": 0.9504, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.11876750700280111, |
|
"grad_norm": 0.2953556776046753, |
|
"learning_rate": 5.955056179775281e-05, |
|
"loss": 0.861, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.12100840336134454, |
|
"grad_norm": 0.2947392165660858, |
|
"learning_rate": 6.067415730337079e-05, |
|
"loss": 0.8578, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.12324929971988796, |
|
"grad_norm": 0.3029733896255493, |
|
"learning_rate": 6.179775280898876e-05, |
|
"loss": 0.895, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.12549019607843137, |
|
"grad_norm": 0.28483161330223083, |
|
"learning_rate": 6.292134831460675e-05, |
|
"loss": 0.9316, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.12773109243697478, |
|
"grad_norm": 0.280499666929245, |
|
"learning_rate": 6.404494382022472e-05, |
|
"loss": 0.8763, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.12997198879551822, |
|
"grad_norm": 0.2687634825706482, |
|
"learning_rate": 6.51685393258427e-05, |
|
"loss": 0.9216, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.13221288515406163, |
|
"grad_norm": 0.2869422435760498, |
|
"learning_rate": 6.629213483146067e-05, |
|
"loss": 0.8783, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.13445378151260504, |
|
"grad_norm": 0.29140859842300415, |
|
"learning_rate": 6.741573033707866e-05, |
|
"loss": 0.9613, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.13669467787114845, |
|
"grad_norm": 0.29342207312583923, |
|
"learning_rate": 6.853932584269663e-05, |
|
"loss": 0.982, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.13893557422969188, |
|
"grad_norm": 0.29143285751342773, |
|
"learning_rate": 6.966292134831462e-05, |
|
"loss": 0.8857, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.1411764705882353, |
|
"grad_norm": 0.2944166660308838, |
|
"learning_rate": 7.078651685393259e-05, |
|
"loss": 0.8668, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.1434173669467787, |
|
"grad_norm": 0.27430447936058044, |
|
"learning_rate": 7.191011235955056e-05, |
|
"loss": 0.9403, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.14565826330532214, |
|
"grad_norm": 0.274800568819046, |
|
"learning_rate": 7.303370786516854e-05, |
|
"loss": 0.8689, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.14789915966386555, |
|
"grad_norm": 0.3063383102416992, |
|
"learning_rate": 7.415730337078653e-05, |
|
"loss": 0.9217, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.15014005602240896, |
|
"grad_norm": 0.26958584785461426, |
|
"learning_rate": 7.52808988764045e-05, |
|
"loss": 0.949, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.1523809523809524, |
|
"grad_norm": 0.3030094504356384, |
|
"learning_rate": 7.640449438202247e-05, |
|
"loss": 0.8814, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.1546218487394958, |
|
"grad_norm": 0.26790764927864075, |
|
"learning_rate": 7.752808988764046e-05, |
|
"loss": 0.9814, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.28143975138664246, |
|
"learning_rate": 7.865168539325843e-05, |
|
"loss": 0.8667, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.15910364145658262, |
|
"grad_norm": 0.32539746165275574, |
|
"learning_rate": 7.97752808988764e-05, |
|
"loss": 1.0709, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.16134453781512606, |
|
"grad_norm": 0.28114452958106995, |
|
"learning_rate": 8.089887640449438e-05, |
|
"loss": 0.9253, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.16358543417366947, |
|
"grad_norm": 0.2680191993713379, |
|
"learning_rate": 8.202247191011237e-05, |
|
"loss": 0.9345, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.16582633053221288, |
|
"grad_norm": 0.2928783893585205, |
|
"learning_rate": 8.314606741573034e-05, |
|
"loss": 0.906, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.16806722689075632, |
|
"grad_norm": 0.2979438900947571, |
|
"learning_rate": 8.426966292134831e-05, |
|
"loss": 0.9581, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.17030812324929973, |
|
"grad_norm": 0.27883946895599365, |
|
"learning_rate": 8.53932584269663e-05, |
|
"loss": 0.9203, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.17254901960784313, |
|
"grad_norm": 0.3945503234863281, |
|
"learning_rate": 8.651685393258427e-05, |
|
"loss": 0.8136, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.17478991596638654, |
|
"grad_norm": 0.27328184247016907, |
|
"learning_rate": 8.764044943820225e-05, |
|
"loss": 0.8966, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.17703081232492998, |
|
"grad_norm": 0.3049486577510834, |
|
"learning_rate": 8.876404494382022e-05, |
|
"loss": 0.9658, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1792717086834734, |
|
"grad_norm": 0.3143669366836548, |
|
"learning_rate": 8.988764044943821e-05, |
|
"loss": 0.9867, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1815126050420168, |
|
"grad_norm": 0.30048221349716187, |
|
"learning_rate": 9.101123595505618e-05, |
|
"loss": 0.9249, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.18375350140056024, |
|
"grad_norm": 0.28177568316459656, |
|
"learning_rate": 9.213483146067416e-05, |
|
"loss": 0.9899, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.18599439775910365, |
|
"grad_norm": 0.296478807926178, |
|
"learning_rate": 9.325842696629214e-05, |
|
"loss": 0.8476, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.18823529411764706, |
|
"grad_norm": 0.2855791449546814, |
|
"learning_rate": 9.438202247191012e-05, |
|
"loss": 0.8869, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.19047619047619047, |
|
"grad_norm": 0.2829147279262543, |
|
"learning_rate": 9.550561797752809e-05, |
|
"loss": 0.9274, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1927170868347339, |
|
"grad_norm": 0.2549046277999878, |
|
"learning_rate": 9.662921348314608e-05, |
|
"loss": 0.8535, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.1949579831932773, |
|
"grad_norm": 0.2655014991760254, |
|
"learning_rate": 9.775280898876405e-05, |
|
"loss": 0.8905, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.19719887955182072, |
|
"grad_norm": 0.27607807517051697, |
|
"learning_rate": 9.887640449438202e-05, |
|
"loss": 0.8788, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.19943977591036416, |
|
"grad_norm": 0.24970707297325134, |
|
"learning_rate": 0.0001, |
|
"loss": 0.7632, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.20168067226890757, |
|
"grad_norm": 0.2821432650089264, |
|
"learning_rate": 0.00010112359550561799, |
|
"loss": 0.9314, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.20392156862745098, |
|
"grad_norm": 0.28368303179740906, |
|
"learning_rate": 0.00010224719101123596, |
|
"loss": 0.8424, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.2061624649859944, |
|
"grad_norm": 0.3087138235569, |
|
"learning_rate": 0.00010337078651685395, |
|
"loss": 0.9787, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.20840336134453782, |
|
"grad_norm": 0.2952103614807129, |
|
"learning_rate": 0.00010449438202247193, |
|
"loss": 0.9679, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.21064425770308123, |
|
"grad_norm": 0.2875281572341919, |
|
"learning_rate": 0.00010561797752808989, |
|
"loss": 0.9181, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.21288515406162464, |
|
"grad_norm": 0.3084465265274048, |
|
"learning_rate": 0.00010674157303370786, |
|
"loss": 0.9615, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.21512605042016808, |
|
"grad_norm": 0.28005871176719666, |
|
"learning_rate": 0.00010786516853932584, |
|
"loss": 0.8757, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.2173669467787115, |
|
"grad_norm": 0.2795560657978058, |
|
"learning_rate": 0.00010898876404494383, |
|
"loss": 0.8266, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.2196078431372549, |
|
"grad_norm": 0.2671497166156769, |
|
"learning_rate": 0.0001101123595505618, |
|
"loss": 0.8693, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.2218487394957983, |
|
"grad_norm": 0.24397605657577515, |
|
"learning_rate": 0.00011123595505617979, |
|
"loss": 0.8334, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.22408963585434175, |
|
"grad_norm": 0.26632094383239746, |
|
"learning_rate": 0.00011235955056179777, |
|
"loss": 0.966, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.22633053221288515, |
|
"grad_norm": 0.2785671055316925, |
|
"learning_rate": 0.00011348314606741574, |
|
"loss": 0.8823, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.22857142857142856, |
|
"grad_norm": 0.26920077204704285, |
|
"learning_rate": 0.0001146067415730337, |
|
"loss": 0.9726, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.230812324929972, |
|
"grad_norm": 0.2633483409881592, |
|
"learning_rate": 0.00011573033707865168, |
|
"loss": 0.8849, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.2330532212885154, |
|
"grad_norm": 0.26563239097595215, |
|
"learning_rate": 0.00011685393258426967, |
|
"loss": 0.7896, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 0.27475470304489136, |
|
"learning_rate": 0.00011797752808988764, |
|
"loss": 0.9659, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.23753501400560223, |
|
"grad_norm": 0.2691081166267395, |
|
"learning_rate": 0.00011910112359550563, |
|
"loss": 0.9758, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.23977591036414567, |
|
"grad_norm": 0.2714226543903351, |
|
"learning_rate": 0.00012022471910112361, |
|
"loss": 0.8736, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.24201680672268908, |
|
"grad_norm": 0.2638857960700989, |
|
"learning_rate": 0.00012134831460674158, |
|
"loss": 0.8792, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.24425770308123249, |
|
"grad_norm": 0.25696009397506714, |
|
"learning_rate": 0.00012247191011235955, |
|
"loss": 0.8813, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.24649859943977592, |
|
"grad_norm": 0.27648770809173584, |
|
"learning_rate": 0.00012359550561797752, |
|
"loss": 0.9341, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.24873949579831933, |
|
"grad_norm": 0.27543121576309204, |
|
"learning_rate": 0.0001247191011235955, |
|
"loss": 0.8102, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.25098039215686274, |
|
"grad_norm": 0.2804222106933594, |
|
"learning_rate": 0.0001258426966292135, |
|
"loss": 0.9004, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.25322128851540615, |
|
"grad_norm": 0.27446436882019043, |
|
"learning_rate": 0.00012696629213483147, |
|
"loss": 0.8432, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.25546218487394956, |
|
"grad_norm": 0.27675163745880127, |
|
"learning_rate": 0.00012808988764044944, |
|
"loss": 0.8868, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.25770308123249297, |
|
"grad_norm": 0.24772736430168152, |
|
"learning_rate": 0.00012921348314606744, |
|
"loss": 0.8711, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.25994397759103643, |
|
"grad_norm": 0.2736036479473114, |
|
"learning_rate": 0.0001303370786516854, |
|
"loss": 0.922, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.26218487394957984, |
|
"grad_norm": 0.23815171420574188, |
|
"learning_rate": 0.00013146067415730338, |
|
"loss": 0.8469, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.26442577030812325, |
|
"grad_norm": 0.2564987540245056, |
|
"learning_rate": 0.00013258426966292135, |
|
"loss": 0.7626, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.26105883717536926, |
|
"learning_rate": 0.00013370786516853932, |
|
"loss": 0.7911, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.2689075630252101, |
|
"grad_norm": 0.2915550768375397, |
|
"learning_rate": 0.00013483146067415732, |
|
"loss": 0.9643, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.2711484593837535, |
|
"grad_norm": 0.28031301498413086, |
|
"learning_rate": 0.0001359550561797753, |
|
"loss": 0.8797, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.2733893557422969, |
|
"grad_norm": 0.2468908429145813, |
|
"learning_rate": 0.00013707865168539326, |
|
"loss": 0.8497, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.27563025210084036, |
|
"grad_norm": 0.2498752921819687, |
|
"learning_rate": 0.00013820224719101123, |
|
"loss": 0.842, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.27787114845938377, |
|
"grad_norm": 0.2517074942588806, |
|
"learning_rate": 0.00013932584269662923, |
|
"loss": 0.8072, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.2801120448179272, |
|
"grad_norm": 0.24205273389816284, |
|
"learning_rate": 0.0001404494382022472, |
|
"loss": 0.796, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.2823529411764706, |
|
"grad_norm": 0.259343683719635, |
|
"learning_rate": 0.00014157303370786517, |
|
"loss": 0.8609, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.284593837535014, |
|
"grad_norm": 0.2564795911312103, |
|
"learning_rate": 0.00014269662921348315, |
|
"loss": 0.8976, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.2868347338935574, |
|
"grad_norm": 0.2650969922542572, |
|
"learning_rate": 0.00014382022471910112, |
|
"loss": 0.8239, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.28907563025210087, |
|
"grad_norm": 0.26443368196487427, |
|
"learning_rate": 0.00014494382022471912, |
|
"loss": 0.8479, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.2913165266106443, |
|
"grad_norm": 0.26897069811820984, |
|
"learning_rate": 0.0001460674157303371, |
|
"loss": 0.9104, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.2935574229691877, |
|
"grad_norm": 0.2578631341457367, |
|
"learning_rate": 0.00014719101123595506, |
|
"loss": 0.8151, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.2957983193277311, |
|
"grad_norm": 0.2454749345779419, |
|
"learning_rate": 0.00014831460674157306, |
|
"loss": 0.7915, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.2980392156862745, |
|
"grad_norm": 0.25589731335639954, |
|
"learning_rate": 0.00014943820224719103, |
|
"loss": 0.8952, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.3002801120448179, |
|
"grad_norm": 0.2591662108898163, |
|
"learning_rate": 0.000150561797752809, |
|
"loss": 0.8112, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.3025210084033613, |
|
"grad_norm": 0.26816102862358093, |
|
"learning_rate": 0.00015168539325842697, |
|
"loss": 0.8893, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.3047619047619048, |
|
"grad_norm": 0.24405767023563385, |
|
"learning_rate": 0.00015280898876404494, |
|
"loss": 0.877, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.3070028011204482, |
|
"grad_norm": 0.2588540315628052, |
|
"learning_rate": 0.00015393258426966294, |
|
"loss": 0.7779, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.3092436974789916, |
|
"grad_norm": 0.27598896622657776, |
|
"learning_rate": 0.0001550561797752809, |
|
"loss": 0.7752, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.311484593837535, |
|
"grad_norm": 0.24622836709022522, |
|
"learning_rate": 0.00015617977528089888, |
|
"loss": 0.8849, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 0.2502545118331909, |
|
"learning_rate": 0.00015730337078651685, |
|
"loss": 0.8012, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.31596638655462184, |
|
"grad_norm": 0.25384724140167236, |
|
"learning_rate": 0.00015842696629213485, |
|
"loss": 0.8395, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.31820728291316525, |
|
"grad_norm": 0.2527698874473572, |
|
"learning_rate": 0.0001595505617977528, |
|
"loss": 0.831, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.3204481792717087, |
|
"grad_norm": 0.24567513167858124, |
|
"learning_rate": 0.0001606741573033708, |
|
"loss": 0.7959, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.3226890756302521, |
|
"grad_norm": 0.24283619225025177, |
|
"learning_rate": 0.00016179775280898877, |
|
"loss": 0.8146, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.32492997198879553, |
|
"grad_norm": 0.2708129286766052, |
|
"learning_rate": 0.00016292134831460674, |
|
"loss": 0.905, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.32717086834733894, |
|
"grad_norm": 0.27091729640960693, |
|
"learning_rate": 0.00016404494382022474, |
|
"loss": 0.8681, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.32941176470588235, |
|
"grad_norm": 0.2502163350582123, |
|
"learning_rate": 0.0001651685393258427, |
|
"loss": 0.8859, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.33165266106442576, |
|
"grad_norm": 0.23066553473472595, |
|
"learning_rate": 0.00016629213483146068, |
|
"loss": 0.9034, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.33389355742296917, |
|
"grad_norm": 0.2503213882446289, |
|
"learning_rate": 0.00016741573033707868, |
|
"loss": 0.8348, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.33613445378151263, |
|
"grad_norm": 0.25126519799232483, |
|
"learning_rate": 0.00016853932584269662, |
|
"loss": 0.8553, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.33837535014005604, |
|
"grad_norm": 0.241397425532341, |
|
"learning_rate": 0.00016966292134831462, |
|
"loss": 0.9132, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.34061624649859945, |
|
"grad_norm": 0.25682827830314636, |
|
"learning_rate": 0.0001707865168539326, |
|
"loss": 0.9481, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.34285714285714286, |
|
"grad_norm": 0.24024637043476105, |
|
"learning_rate": 0.00017191011235955056, |
|
"loss": 0.943, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.34509803921568627, |
|
"grad_norm": 0.2626055181026459, |
|
"learning_rate": 0.00017303370786516853, |
|
"loss": 0.7863, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.3473389355742297, |
|
"grad_norm": 0.24571183323860168, |
|
"learning_rate": 0.00017415730337078653, |
|
"loss": 0.926, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.3495798319327731, |
|
"grad_norm": 0.2499912977218628, |
|
"learning_rate": 0.0001752808988764045, |
|
"loss": 0.8232, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.35182072829131655, |
|
"grad_norm": 0.25473934412002563, |
|
"learning_rate": 0.00017640449438202248, |
|
"loss": 0.9021, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.35406162464985996, |
|
"grad_norm": 0.25837019085884094, |
|
"learning_rate": 0.00017752808988764045, |
|
"loss": 0.7753, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.3563025210084034, |
|
"grad_norm": 0.255958616733551, |
|
"learning_rate": 0.00017865168539325842, |
|
"loss": 0.9155, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.3585434173669468, |
|
"grad_norm": 0.24315786361694336, |
|
"learning_rate": 0.00017977528089887642, |
|
"loss": 0.8039, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3607843137254902, |
|
"grad_norm": 0.24614644050598145, |
|
"learning_rate": 0.0001808988764044944, |
|
"loss": 0.8857, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.3630252100840336, |
|
"grad_norm": 0.24280671775341034, |
|
"learning_rate": 0.00018202247191011236, |
|
"loss": 0.8354, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.365266106442577, |
|
"grad_norm": 0.2597411572933197, |
|
"learning_rate": 0.00018314606741573036, |
|
"loss": 0.8852, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.3675070028011205, |
|
"grad_norm": 0.2597702145576477, |
|
"learning_rate": 0.00018426966292134833, |
|
"loss": 0.9029, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.3697478991596639, |
|
"grad_norm": 0.23551709949970245, |
|
"learning_rate": 0.0001853932584269663, |
|
"loss": 0.9118, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.3719887955182073, |
|
"grad_norm": 0.2516990303993225, |
|
"learning_rate": 0.00018651685393258427, |
|
"loss": 0.7997, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.3742296918767507, |
|
"grad_norm": 0.2297232747077942, |
|
"learning_rate": 0.00018764044943820224, |
|
"loss": 0.7823, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.3764705882352941, |
|
"grad_norm": 0.22287945449352264, |
|
"learning_rate": 0.00018876404494382024, |
|
"loss": 0.8609, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.3787114845938375, |
|
"grad_norm": 0.2428523749113083, |
|
"learning_rate": 0.0001898876404494382, |
|
"loss": 0.8842, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.38095238095238093, |
|
"grad_norm": 0.25949063897132874, |
|
"learning_rate": 0.00019101123595505618, |
|
"loss": 0.7737, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3831932773109244, |
|
"grad_norm": 0.2532324492931366, |
|
"learning_rate": 0.00019213483146067416, |
|
"loss": 0.7548, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.3854341736694678, |
|
"grad_norm": 0.2657643258571625, |
|
"learning_rate": 0.00019325842696629215, |
|
"loss": 0.8375, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.3876750700280112, |
|
"grad_norm": 0.2578238546848297, |
|
"learning_rate": 0.00019438202247191013, |
|
"loss": 0.8591, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.3899159663865546, |
|
"grad_norm": 0.24303969740867615, |
|
"learning_rate": 0.0001955056179775281, |
|
"loss": 0.8131, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.271139919757843, |
|
"learning_rate": 0.00019662921348314607, |
|
"loss": 0.8234, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.39439775910364144, |
|
"grad_norm": 0.2569217085838318, |
|
"learning_rate": 0.00019775280898876404, |
|
"loss": 0.8907, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.39663865546218485, |
|
"grad_norm": 0.23687879741191864, |
|
"learning_rate": 0.00019887640449438204, |
|
"loss": 0.8082, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.3988795518207283, |
|
"grad_norm": 0.24828100204467773, |
|
"learning_rate": 0.0002, |
|
"loss": 0.8575, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.4011204481792717, |
|
"grad_norm": 0.24300478398799896, |
|
"learning_rate": 0.00019999980867200105, |
|
"loss": 0.7963, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.40336134453781514, |
|
"grad_norm": 0.2662373483181, |
|
"learning_rate": 0.00019999923468873635, |
|
"loss": 0.8871, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.40560224089635855, |
|
"grad_norm": 0.2600310146808624, |
|
"learning_rate": 0.00019999827805240226, |
|
"loss": 0.8477, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.40784313725490196, |
|
"grad_norm": 0.26801761984825134, |
|
"learning_rate": 0.00019999693876665938, |
|
"loss": 0.8978, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.41008403361344536, |
|
"grad_norm": 0.26510775089263916, |
|
"learning_rate": 0.00019999521683663262, |
|
"loss": 0.8387, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.4123249299719888, |
|
"grad_norm": 0.25927454233169556, |
|
"learning_rate": 0.00019999311226891103, |
|
"loss": 0.884, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.41456582633053224, |
|
"grad_norm": 0.2489653080701828, |
|
"learning_rate": 0.00019999062507154784, |
|
"loss": 0.9473, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.41680672268907565, |
|
"grad_norm": 0.2461009919643402, |
|
"learning_rate": 0.0001999877552540605, |
|
"loss": 0.8609, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.41904761904761906, |
|
"grad_norm": 0.24591152369976044, |
|
"learning_rate": 0.00019998450282743052, |
|
"loss": 0.9284, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.42128851540616247, |
|
"grad_norm": 0.23085853457450867, |
|
"learning_rate": 0.00019998086780410353, |
|
"loss": 0.8898, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.4235294117647059, |
|
"grad_norm": 0.2512655556201935, |
|
"learning_rate": 0.00019997685019798912, |
|
"loss": 0.8275, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.4257703081232493, |
|
"grad_norm": 0.24720723927021027, |
|
"learning_rate": 0.0001999724500244609, |
|
"loss": 0.8809, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.4280112044817927, |
|
"grad_norm": 0.25972652435302734, |
|
"learning_rate": 0.00019996766730035642, |
|
"loss": 0.891, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.43025210084033616, |
|
"grad_norm": 0.26756009459495544, |
|
"learning_rate": 0.0001999625020439771, |
|
"loss": 0.923, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.43249299719887957, |
|
"grad_norm": 0.24553890526294708, |
|
"learning_rate": 0.000199956954275088, |
|
"loss": 0.8222, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.434733893557423, |
|
"grad_norm": 0.24937503039836884, |
|
"learning_rate": 0.0001999510240149181, |
|
"loss": 0.788, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.4369747899159664, |
|
"grad_norm": 0.2749722898006439, |
|
"learning_rate": 0.00019994471128615985, |
|
"loss": 0.9383, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.4392156862745098, |
|
"grad_norm": 0.25508439540863037, |
|
"learning_rate": 0.00019993801611296923, |
|
"loss": 0.8234, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.4414565826330532, |
|
"grad_norm": 0.26953256130218506, |
|
"learning_rate": 0.00019993093852096582, |
|
"loss": 0.9895, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.4436974789915966, |
|
"grad_norm": 0.26215213537216187, |
|
"learning_rate": 0.0001999234785372324, |
|
"loss": 0.8432, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.4459383753501401, |
|
"grad_norm": 0.24869990348815918, |
|
"learning_rate": 0.00019991563619031508, |
|
"loss": 0.8786, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.4481792717086835, |
|
"grad_norm": 0.23765677213668823, |
|
"learning_rate": 0.00019990741151022301, |
|
"loss": 0.9402, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.4504201680672269, |
|
"grad_norm": 0.24099862575531006, |
|
"learning_rate": 0.00019989880452842847, |
|
"loss": 0.9192, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.4526610644257703, |
|
"grad_norm": 0.2570018172264099, |
|
"learning_rate": 0.00019988981527786654, |
|
"loss": 0.846, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.4549019607843137, |
|
"grad_norm": 0.2420913279056549, |
|
"learning_rate": 0.00019988044379293523, |
|
"loss": 0.9021, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.45714285714285713, |
|
"grad_norm": 0.25546249747276306, |
|
"learning_rate": 0.00019987069010949496, |
|
"loss": 0.9191, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.45938375350140054, |
|
"grad_norm": 0.25439539551734924, |
|
"learning_rate": 0.00019986055426486887, |
|
"loss": 0.9046, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.461624649859944, |
|
"grad_norm": 0.23050928115844727, |
|
"learning_rate": 0.00019985003629784237, |
|
"loss": 0.7777, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.4638655462184874, |
|
"grad_norm": 0.25798794627189636, |
|
"learning_rate": 0.00019983913624866304, |
|
"loss": 0.9232, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.4661064425770308, |
|
"grad_norm": 0.2508363127708435, |
|
"learning_rate": 0.00019982785415904064, |
|
"loss": 0.9924, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.46834733893557423, |
|
"grad_norm": 0.25454050302505493, |
|
"learning_rate": 0.00019981619007214673, |
|
"loss": 0.9056, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 0.24120093882083893, |
|
"learning_rate": 0.0001998041440326146, |
|
"loss": 0.8339, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.47282913165266105, |
|
"grad_norm": 0.2521376609802246, |
|
"learning_rate": 0.00019979171608653924, |
|
"loss": 0.7907, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.47507002801120446, |
|
"grad_norm": 0.2513487637042999, |
|
"learning_rate": 0.00019977890628147682, |
|
"loss": 0.9142, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.4773109243697479, |
|
"grad_norm": 0.2808438241481781, |
|
"learning_rate": 0.00019976571466644492, |
|
"loss": 0.8708, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.47955182072829133, |
|
"grad_norm": 0.2564331293106079, |
|
"learning_rate": 0.00019975214129192196, |
|
"loss": 0.8726, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.48179271708683474, |
|
"grad_norm": 0.2591661214828491, |
|
"learning_rate": 0.00019973818620984738, |
|
"loss": 0.791, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.48403361344537815, |
|
"grad_norm": 0.2555089592933655, |
|
"learning_rate": 0.00019972384947362101, |
|
"loss": 0.9129, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.48627450980392156, |
|
"grad_norm": 0.2542738616466522, |
|
"learning_rate": 0.00019970913113810334, |
|
"loss": 0.8097, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.48851540616246497, |
|
"grad_norm": 0.26333680748939514, |
|
"learning_rate": 0.0001996940312596149, |
|
"loss": 0.8342, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.4907563025210084, |
|
"grad_norm": 0.24918504059314728, |
|
"learning_rate": 0.00019967854989593633, |
|
"loss": 0.8441, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.49299719887955185, |
|
"grad_norm": 0.26157376170158386, |
|
"learning_rate": 0.00019966268710630797, |
|
"loss": 0.8891, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.49523809523809526, |
|
"grad_norm": 0.277658611536026, |
|
"learning_rate": 0.00019964644295142968, |
|
"loss": 0.8862, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.49747899159663866, |
|
"grad_norm": 0.2798251211643219, |
|
"learning_rate": 0.00019962981749346078, |
|
"loss": 0.8742, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.4997198879551821, |
|
"grad_norm": 0.28391703963279724, |
|
"learning_rate": 0.00019961281079601957, |
|
"loss": 0.8928, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.5019607843137255, |
|
"grad_norm": 0.2569884955883026, |
|
"learning_rate": 0.00019959542292418317, |
|
"loss": 0.8289, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.5042016806722689, |
|
"grad_norm": 0.2689199447631836, |
|
"learning_rate": 0.00019957765394448728, |
|
"loss": 0.8002, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.5064425770308123, |
|
"grad_norm": 0.2564886510372162, |
|
"learning_rate": 0.00019955950392492604, |
|
"loss": 0.8817, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.5086834733893557, |
|
"grad_norm": 0.2417922168970108, |
|
"learning_rate": 0.00019954097293495155, |
|
"loss": 0.8838, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.5109243697478991, |
|
"grad_norm": 0.22530929744243622, |
|
"learning_rate": 0.00019952206104547376, |
|
"loss": 0.7967, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.5131652661064425, |
|
"grad_norm": 0.23360571265220642, |
|
"learning_rate": 0.00019950276832886017, |
|
"loss": 0.8364, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.5154061624649859, |
|
"grad_norm": 0.2611772418022156, |
|
"learning_rate": 0.00019948309485893549, |
|
"loss": 0.8786, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.5176470588235295, |
|
"grad_norm": 0.24788020551204681, |
|
"learning_rate": 0.00019946304071098142, |
|
"loss": 0.86, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.5198879551820729, |
|
"grad_norm": 0.23259863257408142, |
|
"learning_rate": 0.00019944260596173641, |
|
"loss": 0.822, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.5221288515406163, |
|
"grad_norm": 0.2678159773349762, |
|
"learning_rate": 0.0001994217906893952, |
|
"loss": 0.8522, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.5243697478991597, |
|
"grad_norm": 0.268537312746048, |
|
"learning_rate": 0.00019940059497360873, |
|
"loss": 0.8638, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.5266106442577031, |
|
"grad_norm": 0.26122671365737915, |
|
"learning_rate": 0.0001993790188954836, |
|
"loss": 0.8509, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.5288515406162465, |
|
"grad_norm": 0.38401761651039124, |
|
"learning_rate": 0.00019935706253758207, |
|
"loss": 0.8184, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.5310924369747899, |
|
"grad_norm": 0.2459888607263565, |
|
"learning_rate": 0.00019933472598392138, |
|
"loss": 0.8042, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.27041634917259216, |
|
"learning_rate": 0.0001993120093199737, |
|
"loss": 0.9426, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.5355742296918767, |
|
"grad_norm": 0.2503630220890045, |
|
"learning_rate": 0.00019928891263266578, |
|
"loss": 0.901, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.5378151260504201, |
|
"grad_norm": 0.2358655333518982, |
|
"learning_rate": 0.00019926543601037842, |
|
"loss": 0.8165, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.5400560224089636, |
|
"grad_norm": 0.24630171060562134, |
|
"learning_rate": 0.00019924157954294628, |
|
"loss": 0.8083, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.542296918767507, |
|
"grad_norm": 0.2504923939704895, |
|
"learning_rate": 0.00019921734332165766, |
|
"loss": 0.7973, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.5445378151260504, |
|
"grad_norm": 0.2728249132633209, |
|
"learning_rate": 0.00019919272743925385, |
|
"loss": 0.8817, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.5467787114845938, |
|
"grad_norm": 0.2694825828075409, |
|
"learning_rate": 0.000199167731989929, |
|
"loss": 0.8458, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.31109780073165894, |
|
"learning_rate": 0.00019914235706932972, |
|
"loss": 0.939, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.5512605042016807, |
|
"grad_norm": 0.25847890973091125, |
|
"learning_rate": 0.0001991166027745547, |
|
"loss": 0.8704, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.5535014005602241, |
|
"grad_norm": 0.2678435742855072, |
|
"learning_rate": 0.00019909046920415423, |
|
"loss": 0.7445, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.5557422969187675, |
|
"grad_norm": 0.25951921939849854, |
|
"learning_rate": 0.00019906395645812998, |
|
"loss": 0.8869, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.5579831932773109, |
|
"grad_norm": 0.25794005393981934, |
|
"learning_rate": 0.00019903706463793462, |
|
"loss": 0.9506, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.5602240896358543, |
|
"grad_norm": 0.2340375930070877, |
|
"learning_rate": 0.00019900979384647127, |
|
"loss": 0.811, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.5624649859943978, |
|
"grad_norm": 0.25867462158203125, |
|
"learning_rate": 0.0001989821441880933, |
|
"loss": 0.8621, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.5647058823529412, |
|
"grad_norm": 0.2387334555387497, |
|
"learning_rate": 0.0001989541157686037, |
|
"loss": 0.8514, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.5669467787114846, |
|
"grad_norm": 0.26006007194519043, |
|
"learning_rate": 0.00019892570869525496, |
|
"loss": 0.8946, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.569187675070028, |
|
"grad_norm": 0.2735587954521179, |
|
"learning_rate": 0.00019889692307674845, |
|
"loss": 0.8605, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 0.28292015194892883, |
|
"learning_rate": 0.00019886775902323405, |
|
"loss": 1.0231, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.5736694677871148, |
|
"grad_norm": 0.2500753402709961, |
|
"learning_rate": 0.00019883821664630976, |
|
"loss": 0.9155, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.5759103641456582, |
|
"grad_norm": 0.23072321712970734, |
|
"learning_rate": 0.00019880829605902126, |
|
"loss": 0.8268, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.5781512605042017, |
|
"grad_norm": 0.25353720784187317, |
|
"learning_rate": 0.0001987779973758615, |
|
"loss": 0.9315, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.5803921568627451, |
|
"grad_norm": 0.24684756994247437, |
|
"learning_rate": 0.00019874732071277013, |
|
"loss": 0.8652, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.5826330532212886, |
|
"grad_norm": 0.2583218812942505, |
|
"learning_rate": 0.0001987162661871333, |
|
"loss": 0.7722, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.584873949579832, |
|
"grad_norm": 0.23789426684379578, |
|
"learning_rate": 0.00019868483391778302, |
|
"loss": 0.9084, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.5871148459383754, |
|
"grad_norm": 0.24503661692142487, |
|
"learning_rate": 0.00019865302402499678, |
|
"loss": 0.8683, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.5893557422969188, |
|
"grad_norm": 0.2724620997905731, |
|
"learning_rate": 0.00019862083663049694, |
|
"loss": 0.8323, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.5915966386554622, |
|
"grad_norm": 0.27704504132270813, |
|
"learning_rate": 0.0001985882718574506, |
|
"loss": 0.9132, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.5938375350140056, |
|
"grad_norm": 0.2760598063468933, |
|
"learning_rate": 0.00019855532983046876, |
|
"loss": 0.7416, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.596078431372549, |
|
"grad_norm": 0.26945289969444275, |
|
"learning_rate": 0.00019852201067560606, |
|
"loss": 0.965, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.5983193277310924, |
|
"grad_norm": 0.2512185573577881, |
|
"learning_rate": 0.0001984883145203603, |
|
"loss": 0.8582, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.6005602240896358, |
|
"grad_norm": 0.24201013147830963, |
|
"learning_rate": 0.00019845424149367177, |
|
"loss": 0.8433, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.6028011204481792, |
|
"grad_norm": 0.24099834263324738, |
|
"learning_rate": 0.000198419791725923, |
|
"loss": 0.9501, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.6050420168067226, |
|
"grad_norm": 0.24521470069885254, |
|
"learning_rate": 0.00019838496534893806, |
|
"loss": 0.8458, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.6072829131652661, |
|
"grad_norm": 0.2377120554447174, |
|
"learning_rate": 0.00019834976249598221, |
|
"loss": 0.8934, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.6095238095238096, |
|
"grad_norm": 0.2444024235010147, |
|
"learning_rate": 0.00019831418330176125, |
|
"loss": 0.8326, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.611764705882353, |
|
"grad_norm": 0.23458163440227509, |
|
"learning_rate": 0.0001982782279024211, |
|
"loss": 0.8743, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.6140056022408964, |
|
"grad_norm": 0.2571447491645813, |
|
"learning_rate": 0.00019824189643554725, |
|
"loss": 0.8265, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.6162464985994398, |
|
"grad_norm": 0.25711604952812195, |
|
"learning_rate": 0.00019820518904016426, |
|
"loss": 0.8418, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.6184873949579832, |
|
"grad_norm": 0.26878827810287476, |
|
"learning_rate": 0.00019816810585673514, |
|
"loss": 0.8007, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.6207282913165266, |
|
"grad_norm": 0.2555851638317108, |
|
"learning_rate": 0.00019813064702716094, |
|
"loss": 0.8536, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.62296918767507, |
|
"grad_norm": 0.2718588411808014, |
|
"learning_rate": 0.00019809281269478012, |
|
"loss": 0.8884, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.6252100840336134, |
|
"grad_norm": 0.25706928968429565, |
|
"learning_rate": 0.00019805460300436803, |
|
"loss": 0.8729, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.23707084357738495, |
|
"learning_rate": 0.00019801601810213635, |
|
"loss": 0.8268, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.6296918767507003, |
|
"grad_norm": 0.24094390869140625, |
|
"learning_rate": 0.00019797705813573245, |
|
"loss": 0.8457, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.6319327731092437, |
|
"grad_norm": 0.24794656038284302, |
|
"learning_rate": 0.00019793772325423908, |
|
"loss": 0.8495, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.6341736694677871, |
|
"grad_norm": 0.2355436086654663, |
|
"learning_rate": 0.00019789801360817346, |
|
"loss": 0.8167, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.6364145658263305, |
|
"grad_norm": 0.24097253382205963, |
|
"learning_rate": 0.00019785792934948695, |
|
"loss": 0.8887, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.6386554621848739, |
|
"grad_norm": 0.23715715110301971, |
|
"learning_rate": 0.00019781747063156435, |
|
"loss": 0.8205, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.6408963585434174, |
|
"grad_norm": 0.2627374529838562, |
|
"learning_rate": 0.00019777663760922343, |
|
"loss": 0.9175, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.6431372549019608, |
|
"grad_norm": 0.25734400749206543, |
|
"learning_rate": 0.00019773543043871412, |
|
"loss": 0.8104, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.6453781512605042, |
|
"grad_norm": 0.2477787584066391, |
|
"learning_rate": 0.0001976938492777182, |
|
"loss": 0.9969, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.6476190476190476, |
|
"grad_norm": 0.2440386414527893, |
|
"learning_rate": 0.0001976518942853484, |
|
"loss": 0.8084, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.6498599439775911, |
|
"grad_norm": 0.2703987956047058, |
|
"learning_rate": 0.00019760956562214806, |
|
"loss": 0.7827, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.6521008403361345, |
|
"grad_norm": 0.29450032114982605, |
|
"learning_rate": 0.0001975668634500904, |
|
"loss": 0.8807, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.6543417366946779, |
|
"grad_norm": 0.27439677715301514, |
|
"learning_rate": 0.00019752378793257776, |
|
"loss": 0.8152, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.6565826330532213, |
|
"grad_norm": 0.2940295338630676, |
|
"learning_rate": 0.00019748033923444122, |
|
"loss": 0.9051, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.6588235294117647, |
|
"grad_norm": 0.2529405355453491, |
|
"learning_rate": 0.00019743651752193982, |
|
"loss": 0.7429, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.6610644257703081, |
|
"grad_norm": 0.26964515447616577, |
|
"learning_rate": 0.00019739232296276003, |
|
"loss": 0.8584, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.6633053221288515, |
|
"grad_norm": 0.24733929336071014, |
|
"learning_rate": 0.00019734775572601487, |
|
"loss": 0.8465, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.6655462184873949, |
|
"grad_norm": 0.2559841275215149, |
|
"learning_rate": 0.00019730281598224364, |
|
"loss": 0.883, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.6677871148459383, |
|
"grad_norm": 0.24375739693641663, |
|
"learning_rate": 0.00019725750390341094, |
|
"loss": 0.8042, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.6700280112044817, |
|
"grad_norm": 0.25603991746902466, |
|
"learning_rate": 0.00019721181966290613, |
|
"loss": 0.9363, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.6722689075630253, |
|
"grad_norm": 0.2391873151063919, |
|
"learning_rate": 0.00019716576343554274, |
|
"loss": 0.9364, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.6745098039215687, |
|
"grad_norm": 0.24241115152835846, |
|
"learning_rate": 0.00019711933539755765, |
|
"loss": 0.8168, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.6767507002801121, |
|
"grad_norm": 0.2441554218530655, |
|
"learning_rate": 0.00019707253572661055, |
|
"loss": 0.9179, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.6789915966386555, |
|
"grad_norm": 0.2645583748817444, |
|
"learning_rate": 0.00019702536460178318, |
|
"loss": 0.7971, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.6812324929971989, |
|
"grad_norm": 0.2557383179664612, |
|
"learning_rate": 0.0001969778222035787, |
|
"loss": 0.9112, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.6834733893557423, |
|
"grad_norm": 0.25000718235969543, |
|
"learning_rate": 0.0001969299087139209, |
|
"loss": 0.7842, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.6857142857142857, |
|
"grad_norm": 0.23475897312164307, |
|
"learning_rate": 0.00019688162431615367, |
|
"loss": 0.7806, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.6879551820728291, |
|
"grad_norm": 0.2505607306957245, |
|
"learning_rate": 0.00019683296919504012, |
|
"loss": 0.922, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.6901960784313725, |
|
"grad_norm": 0.2522655725479126, |
|
"learning_rate": 0.00019678394353676203, |
|
"loss": 0.7934, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.692436974789916, |
|
"grad_norm": 0.2722029387950897, |
|
"learning_rate": 0.000196734547528919, |
|
"loss": 0.8775, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.6946778711484594, |
|
"grad_norm": 0.26442402601242065, |
|
"learning_rate": 0.00019668478136052774, |
|
"loss": 0.8696, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.6969187675070028, |
|
"grad_norm": 0.26388052105903625, |
|
"learning_rate": 0.00019663464522202162, |
|
"loss": 0.8569, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.6991596638655462, |
|
"grad_norm": 0.26141613721847534, |
|
"learning_rate": 0.00019658413930524952, |
|
"loss": 0.8485, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.7014005602240896, |
|
"grad_norm": 0.23456323146820068, |
|
"learning_rate": 0.00019653326380347533, |
|
"loss": 0.8913, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.7036414565826331, |
|
"grad_norm": 0.24977454543113708, |
|
"learning_rate": 0.00019648201891137723, |
|
"loss": 0.8729, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 0.22706039249897003, |
|
"learning_rate": 0.0001964304048250469, |
|
"loss": 0.7885, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.7081232492997199, |
|
"grad_norm": 0.24601607024669647, |
|
"learning_rate": 0.00019637842174198867, |
|
"loss": 0.8387, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.7103641456582633, |
|
"grad_norm": 0.2545998990535736, |
|
"learning_rate": 0.000196326069861119, |
|
"loss": 0.8114, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.7126050420168067, |
|
"grad_norm": 0.24375082552433014, |
|
"learning_rate": 0.00019627334938276546, |
|
"loss": 0.843, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.7148459383753502, |
|
"grad_norm": 0.2519029378890991, |
|
"learning_rate": 0.00019622026050866614, |
|
"loss": 0.9088, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.7170868347338936, |
|
"grad_norm": 0.27405208349227905, |
|
"learning_rate": 0.0001961668034419688, |
|
"loss": 0.958, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.719327731092437, |
|
"grad_norm": 0.25491318106651306, |
|
"learning_rate": 0.0001961129783872301, |
|
"loss": 0.7692, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.7215686274509804, |
|
"grad_norm": 0.250347375869751, |
|
"learning_rate": 0.00019605878555041485, |
|
"loss": 0.8321, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.7238095238095238, |
|
"grad_norm": 0.24250715970993042, |
|
"learning_rate": 0.00019600422513889516, |
|
"loss": 0.7832, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.7260504201680672, |
|
"grad_norm": 0.2599019706249237, |
|
"learning_rate": 0.00019594929736144976, |
|
"loss": 0.8736, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.7282913165266106, |
|
"grad_norm": 0.25766611099243164, |
|
"learning_rate": 0.00019589400242826305, |
|
"loss": 0.7419, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.730532212885154, |
|
"grad_norm": 0.254658967256546, |
|
"learning_rate": 0.00019583834055092445, |
|
"loss": 0.9058, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.7327731092436974, |
|
"grad_norm": 0.2731349468231201, |
|
"learning_rate": 0.00019578231194242743, |
|
"loss": 0.9683, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.735014005602241, |
|
"grad_norm": 0.22153563797473907, |
|
"learning_rate": 0.00019572591681716887, |
|
"loss": 0.8146, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.7372549019607844, |
|
"grad_norm": 0.24707463383674622, |
|
"learning_rate": 0.00019566915539094803, |
|
"loss": 0.8204, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.7394957983193278, |
|
"grad_norm": 0.2541884779930115, |
|
"learning_rate": 0.00019561202788096597, |
|
"loss": 0.7638, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.7417366946778712, |
|
"grad_norm": 0.2314218282699585, |
|
"learning_rate": 0.00019555453450582452, |
|
"loss": 0.7502, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.7439775910364146, |
|
"grad_norm": 0.24334260821342468, |
|
"learning_rate": 0.00019549667548552556, |
|
"loss": 0.912, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.746218487394958, |
|
"grad_norm": 0.24473878741264343, |
|
"learning_rate": 0.00019543845104147, |
|
"loss": 0.7558, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.7484593837535014, |
|
"grad_norm": 0.25403597950935364, |
|
"learning_rate": 0.00019537986139645726, |
|
"loss": 0.8406, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.7507002801120448, |
|
"grad_norm": 0.24638508260250092, |
|
"learning_rate": 0.0001953209067746841, |
|
"loss": 0.736, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.7529411764705882, |
|
"grad_norm": 0.25604501366615295, |
|
"learning_rate": 0.00019526158740174393, |
|
"loss": 0.867, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.7551820728291316, |
|
"grad_norm": 0.2617231011390686, |
|
"learning_rate": 0.00019520190350462584, |
|
"loss": 0.8654, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.757422969187675, |
|
"grad_norm": 0.2620690166950226, |
|
"learning_rate": 0.0001951418553117139, |
|
"loss": 0.8428, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.7596638655462185, |
|
"grad_norm": 0.25454917550086975, |
|
"learning_rate": 0.0001950814430527861, |
|
"loss": 0.8676, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.7619047619047619, |
|
"grad_norm": 0.26449549198150635, |
|
"learning_rate": 0.00019502066695901358, |
|
"loss": 0.8709, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.7641456582633053, |
|
"grad_norm": 0.24709953367710114, |
|
"learning_rate": 0.0001949595272629597, |
|
"loss": 0.8312, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.7663865546218488, |
|
"grad_norm": 0.2593318819999695, |
|
"learning_rate": 0.00019489802419857917, |
|
"loss": 0.8274, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.7686274509803922, |
|
"grad_norm": 0.2448015958070755, |
|
"learning_rate": 0.00019483615800121716, |
|
"loss": 0.8226, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.7708683473389356, |
|
"grad_norm": 0.26445460319519043, |
|
"learning_rate": 0.00019477392890760839, |
|
"loss": 0.8492, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.773109243697479, |
|
"grad_norm": 0.2573208808898926, |
|
"learning_rate": 0.00019471133715587622, |
|
"loss": 0.8294, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.7753501400560224, |
|
"grad_norm": 0.26753151416778564, |
|
"learning_rate": 0.00019464838298553173, |
|
"loss": 0.8446, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.7775910364145658, |
|
"grad_norm": 0.2667967975139618, |
|
"learning_rate": 0.00019458506663747285, |
|
"loss": 0.8388, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.7798319327731092, |
|
"grad_norm": 0.25934740900993347, |
|
"learning_rate": 0.00019452138835398332, |
|
"loss": 0.8149, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.7820728291316527, |
|
"grad_norm": 0.2525850832462311, |
|
"learning_rate": 0.00019445734837873202, |
|
"loss": 0.8325, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.2837854325771332, |
|
"learning_rate": 0.00019439294695677167, |
|
"loss": 0.8795, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.7865546218487395, |
|
"grad_norm": 0.26006975769996643, |
|
"learning_rate": 0.00019432818433453818, |
|
"loss": 0.8267, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.7887955182072829, |
|
"grad_norm": 0.24872632324695587, |
|
"learning_rate": 0.00019426306075984965, |
|
"loss": 0.8553, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.7910364145658263, |
|
"grad_norm": 0.23633168637752533, |
|
"learning_rate": 0.00019419757648190533, |
|
"loss": 0.8544, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.7932773109243697, |
|
"grad_norm": 0.2425261288881302, |
|
"learning_rate": 0.00019413173175128473, |
|
"loss": 0.8645, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.7955182072829131, |
|
"grad_norm": 0.23861053586006165, |
|
"learning_rate": 0.00019406552681994663, |
|
"loss": 0.8363, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.7977591036414566, |
|
"grad_norm": 0.24729354679584503, |
|
"learning_rate": 0.00019399896194122822, |
|
"loss": 0.8401, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.23853589594364166, |
|
"learning_rate": 0.000193932037369844, |
|
"loss": 0.8237, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.8022408963585435, |
|
"grad_norm": 0.2654939293861389, |
|
"learning_rate": 0.00019386475336188484, |
|
"loss": 0.7491, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.8044817927170869, |
|
"grad_norm": 0.27639004588127136, |
|
"learning_rate": 0.000193797110174817, |
|
"loss": 0.8755, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.8067226890756303, |
|
"grad_norm": 0.23675435781478882, |
|
"learning_rate": 0.00019372910806748125, |
|
"loss": 0.8292, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.8089635854341737, |
|
"grad_norm": 0.2371446043252945, |
|
"learning_rate": 0.0001936607473000917, |
|
"loss": 0.809, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.8112044817927171, |
|
"grad_norm": 0.2529555559158325, |
|
"learning_rate": 0.0001935920281342349, |
|
"loss": 0.8403, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.8134453781512605, |
|
"grad_norm": 0.2568047344684601, |
|
"learning_rate": 0.00019352295083286896, |
|
"loss": 0.7848, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.8156862745098039, |
|
"grad_norm": 0.2613024413585663, |
|
"learning_rate": 0.0001934535156603222, |
|
"loss": 0.844, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.8179271708683473, |
|
"grad_norm": 0.24079741537570953, |
|
"learning_rate": 0.0001933837228822925, |
|
"loss": 0.8029, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.8201680672268907, |
|
"grad_norm": 0.24890519678592682, |
|
"learning_rate": 0.0001933135727658462, |
|
"loss": 0.8712, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.8224089635854341, |
|
"grad_norm": 0.25198838114738464, |
|
"learning_rate": 0.00019324306557941682, |
|
"loss": 0.8368, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.8246498599439775, |
|
"grad_norm": 0.2470208704471588, |
|
"learning_rate": 0.0001931722015928044, |
|
"loss": 0.8896, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.826890756302521, |
|
"grad_norm": 0.25046589970588684, |
|
"learning_rate": 0.00019310098107717418, |
|
"loss": 0.7602, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.8291316526610645, |
|
"grad_norm": 0.28005844354629517, |
|
"learning_rate": 0.0001930294043050558, |
|
"loss": 0.9443, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.8313725490196079, |
|
"grad_norm": 0.6102829575538635, |
|
"learning_rate": 0.00019295747155034202, |
|
"loss": 0.911, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.8336134453781513, |
|
"grad_norm": 0.2472289353609085, |
|
"learning_rate": 0.0001928851830882879, |
|
"loss": 0.8271, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.8358543417366947, |
|
"grad_norm": 0.24470177292823792, |
|
"learning_rate": 0.0001928125391955095, |
|
"loss": 0.7837, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.8380952380952381, |
|
"grad_norm": 0.2458019107580185, |
|
"learning_rate": 0.00019273954014998308, |
|
"loss": 0.8496, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.8403361344537815, |
|
"grad_norm": 0.249577596783638, |
|
"learning_rate": 0.00019266618623104385, |
|
"loss": 0.7729, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.8425770308123249, |
|
"grad_norm": 0.2461290806531906, |
|
"learning_rate": 0.000192592477719385, |
|
"loss": 0.7955, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.8448179271708683, |
|
"grad_norm": 0.27021750807762146, |
|
"learning_rate": 0.00019251841489705655, |
|
"loss": 0.9384, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.8470588235294118, |
|
"grad_norm": 0.2547335624694824, |
|
"learning_rate": 0.00019244399804746435, |
|
"loss": 0.8254, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.8492997198879552, |
|
"grad_norm": 0.2620961368083954, |
|
"learning_rate": 0.0001923692274553689, |
|
"loss": 0.8587, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.8515406162464986, |
|
"grad_norm": 0.25756919384002686, |
|
"learning_rate": 0.0001922941034068844, |
|
"loss": 0.7906, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.853781512605042, |
|
"grad_norm": 0.25694355368614197, |
|
"learning_rate": 0.0001922186261894775, |
|
"loss": 0.7887, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.8560224089635854, |
|
"grad_norm": 0.2538197636604309, |
|
"learning_rate": 0.0001921427960919663, |
|
"loss": 0.8861, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.8582633053221288, |
|
"grad_norm": 0.2463122308254242, |
|
"learning_rate": 0.00019206661340451925, |
|
"loss": 0.8175, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.8605042016806723, |
|
"grad_norm": 0.23905646800994873, |
|
"learning_rate": 0.00019199007841865396, |
|
"loss": 0.7914, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.2590409517288208, |
|
"learning_rate": 0.0001919131914272361, |
|
"loss": 0.8385, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.8649859943977591, |
|
"grad_norm": 0.2520500719547272, |
|
"learning_rate": 0.00019183595272447842, |
|
"loss": 0.7961, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.8672268907563025, |
|
"grad_norm": 0.2568177878856659, |
|
"learning_rate": 0.00019175836260593938, |
|
"loss": 0.8051, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.869467787114846, |
|
"grad_norm": 0.27684271335601807, |
|
"learning_rate": 0.00019168042136852228, |
|
"loss": 0.8953, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.8717086834733894, |
|
"grad_norm": 0.26281246542930603, |
|
"learning_rate": 0.0001916021293104739, |
|
"loss": 0.8602, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.8739495798319328, |
|
"grad_norm": 0.29199978709220886, |
|
"learning_rate": 0.00019152348673138353, |
|
"loss": 0.9776, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.8761904761904762, |
|
"grad_norm": 0.25914543867111206, |
|
"learning_rate": 0.0001914444939321817, |
|
"loss": 0.8394, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.8784313725490196, |
|
"grad_norm": 0.2472960352897644, |
|
"learning_rate": 0.0001913651512151391, |
|
"loss": 0.7995, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.880672268907563, |
|
"grad_norm": 0.24599507451057434, |
|
"learning_rate": 0.00019128545888386536, |
|
"loss": 0.7801, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.8829131652661064, |
|
"grad_norm": 0.23776014149188995, |
|
"learning_rate": 0.00019120541724330803, |
|
"loss": 0.8068, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.8851540616246498, |
|
"grad_norm": 0.27298274636268616, |
|
"learning_rate": 0.0001911250265997512, |
|
"loss": 0.9139, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.8873949579831932, |
|
"grad_norm": 0.2653828561306, |
|
"learning_rate": 0.0001910442872608145, |
|
"loss": 0.8917, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.8896358543417366, |
|
"grad_norm": 0.25794684886932373, |
|
"learning_rate": 0.00019096319953545185, |
|
"loss": 0.819, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.8918767507002802, |
|
"grad_norm": 0.24696165323257446, |
|
"learning_rate": 0.0001908817637339503, |
|
"loss": 0.7535, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.8941176470588236, |
|
"grad_norm": 0.2529006898403168, |
|
"learning_rate": 0.00019079998016792885, |
|
"loss": 0.7379, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.896358543417367, |
|
"grad_norm": 0.24874147772789001, |
|
"learning_rate": 0.00019071784915033717, |
|
"loss": 0.8973, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.8985994397759104, |
|
"grad_norm": 0.27747422456741333, |
|
"learning_rate": 0.00019063537099545455, |
|
"loss": 0.8685, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.9008403361344538, |
|
"grad_norm": 0.25184211134910583, |
|
"learning_rate": 0.00019055254601888866, |
|
"loss": 0.7937, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.9030812324929972, |
|
"grad_norm": 0.2565883994102478, |
|
"learning_rate": 0.00019046937453757413, |
|
"loss": 0.8677, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.9053221288515406, |
|
"grad_norm": 0.24833756685256958, |
|
"learning_rate": 0.00019038585686977167, |
|
"loss": 0.8777, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.907563025210084, |
|
"grad_norm": 0.24929295480251312, |
|
"learning_rate": 0.00019030199333506666, |
|
"loss": 0.8167, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.9098039215686274, |
|
"grad_norm": 0.24500809609889984, |
|
"learning_rate": 0.00019021778425436795, |
|
"loss": 0.8675, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.9120448179271708, |
|
"grad_norm": 0.25895681977272034, |
|
"learning_rate": 0.0001901332299499066, |
|
"loss": 0.8718, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.9142857142857143, |
|
"grad_norm": 0.22836971282958984, |
|
"learning_rate": 0.00019004833074523478, |
|
"loss": 0.8602, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.9165266106442577, |
|
"grad_norm": 0.26318010687828064, |
|
"learning_rate": 0.00018996308696522433, |
|
"loss": 0.838, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.9187675070028011, |
|
"grad_norm": 0.24389663338661194, |
|
"learning_rate": 0.00018987749893606575, |
|
"loss": 0.7798, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.9210084033613445, |
|
"grad_norm": 0.26448702812194824, |
|
"learning_rate": 0.0001897915669852667, |
|
"loss": 0.8053, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.923249299719888, |
|
"grad_norm": 0.25245535373687744, |
|
"learning_rate": 0.000189705291441651, |
|
"loss": 0.7915, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.9254901960784314, |
|
"grad_norm": 0.25623124837875366, |
|
"learning_rate": 0.00018961867263535715, |
|
"loss": 0.9167, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.9277310924369748, |
|
"grad_norm": 0.25271835923194885, |
|
"learning_rate": 0.00018953171089783723, |
|
"loss": 0.8663, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.9299719887955182, |
|
"grad_norm": 0.24757803976535797, |
|
"learning_rate": 0.00018944440656185556, |
|
"loss": 0.8411, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.9322128851540616, |
|
"grad_norm": 0.23879915475845337, |
|
"learning_rate": 0.00018935675996148738, |
|
"loss": 0.8071, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.934453781512605, |
|
"grad_norm": 0.3934721350669861, |
|
"learning_rate": 0.0001892687714321177, |
|
"loss": 0.7911, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.9366946778711485, |
|
"grad_norm": 0.27968111634254456, |
|
"learning_rate": 0.00018918044131043985, |
|
"loss": 0.8452, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.9389355742296919, |
|
"grad_norm": 0.2701101303100586, |
|
"learning_rate": 0.00018909176993445442, |
|
"loss": 0.8723, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.2606533169746399, |
|
"learning_rate": 0.00018900275764346768, |
|
"loss": 0.7908, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.9434173669467787, |
|
"grad_norm": 0.2672193944454193, |
|
"learning_rate": 0.00018891340477809055, |
|
"loss": 0.9491, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.9456582633053221, |
|
"grad_norm": 0.2676648795604706, |
|
"learning_rate": 0.00018882371168023706, |
|
"loss": 0.8352, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.9478991596638655, |
|
"grad_norm": 0.265023410320282, |
|
"learning_rate": 0.0001887336786931233, |
|
"loss": 0.7894, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.9501400560224089, |
|
"grad_norm": 0.24014434218406677, |
|
"learning_rate": 0.00018864330616126586, |
|
"loss": 0.8394, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.9523809523809523, |
|
"grad_norm": 0.24389366805553436, |
|
"learning_rate": 0.00018855259443048067, |
|
"loss": 0.7857, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.9546218487394958, |
|
"grad_norm": 0.2471270114183426, |
|
"learning_rate": 0.00018846154384788162, |
|
"loss": 0.8576, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.9568627450980393, |
|
"grad_norm": 0.2577723264694214, |
|
"learning_rate": 0.00018837015476187916, |
|
"loss": 0.8377, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.9591036414565827, |
|
"grad_norm": 0.3224787414073944, |
|
"learning_rate": 0.00018827842752217917, |
|
"loss": 0.8801, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.9613445378151261, |
|
"grad_norm": 0.25494757294654846, |
|
"learning_rate": 0.00018818636247978145, |
|
"loss": 0.8173, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.9635854341736695, |
|
"grad_norm": 0.24220331013202667, |
|
"learning_rate": 0.00018809395998697833, |
|
"loss": 0.7747, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.9658263305322129, |
|
"grad_norm": 0.2741996645927429, |
|
"learning_rate": 0.00018800122039735358, |
|
"loss": 0.8636, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.9680672268907563, |
|
"grad_norm": 0.2558667063713074, |
|
"learning_rate": 0.0001879081440657807, |
|
"loss": 0.8456, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.9703081232492997, |
|
"grad_norm": 0.25905126333236694, |
|
"learning_rate": 0.00018781473134842197, |
|
"loss": 0.7961, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.9725490196078431, |
|
"grad_norm": 0.23688547313213348, |
|
"learning_rate": 0.00018772098260272667, |
|
"loss": 0.7555, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.9747899159663865, |
|
"grad_norm": 0.2701859176158905, |
|
"learning_rate": 0.00018762689818743007, |
|
"loss": 0.9353, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.9770308123249299, |
|
"grad_norm": 0.24914319813251495, |
|
"learning_rate": 0.00018753247846255174, |
|
"loss": 0.7678, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.9792717086834734, |
|
"grad_norm": 0.26182475686073303, |
|
"learning_rate": 0.00018743772378939448, |
|
"loss": 0.8374, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.9815126050420168, |
|
"grad_norm": 0.2661891281604767, |
|
"learning_rate": 0.00018734263453054273, |
|
"loss": 0.8816, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.9837535014005602, |
|
"grad_norm": 0.24797801673412323, |
|
"learning_rate": 0.0001872472110498612, |
|
"loss": 0.8069, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.9859943977591037, |
|
"grad_norm": 0.24370808899402618, |
|
"learning_rate": 0.0001871514537124936, |
|
"loss": 0.7405, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.9882352941176471, |
|
"grad_norm": 0.2744685709476471, |
|
"learning_rate": 0.00018705536288486118, |
|
"loss": 0.7706, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.9904761904761905, |
|
"grad_norm": 0.265438437461853, |
|
"learning_rate": 0.0001869589389346611, |
|
"loss": 0.8649, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.9927170868347339, |
|
"grad_norm": 0.24661187827587128, |
|
"learning_rate": 0.0001868621822308655, |
|
"loss": 0.8138, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.9949579831932773, |
|
"grad_norm": 0.2580495774745941, |
|
"learning_rate": 0.00018676509314371974, |
|
"loss": 0.7765, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.9971988795518207, |
|
"grad_norm": 0.2546556293964386, |
|
"learning_rate": 0.00018666767204474094, |
|
"loss": 0.8873, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.9994397759103641, |
|
"grad_norm": 0.25944411754608154, |
|
"learning_rate": 0.00018656991930671686, |
|
"loss": 0.8651, |
|
"step": 446 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1784, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 446, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.310622116052992e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|