|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.3260780957039211, |
|
"eval_steps": 500, |
|
"global_step": 1000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.00032607809570392107, |
|
"grad_norm": 0.0851932093501091, |
|
"learning_rate": 0.00019999950652018584, |
|
"loss": 1.0001, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0006521561914078421, |
|
"grad_norm": 0.09610063582658768, |
|
"learning_rate": 0.0001999980260856137, |
|
"loss": 0.8702, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0009782342871117633, |
|
"grad_norm": 0.14597254991531372, |
|
"learning_rate": 0.000199995558710895, |
|
"loss": 1.0361, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.0013043123828156843, |
|
"grad_norm": 0.2123030424118042, |
|
"learning_rate": 0.00019999210442038162, |
|
"loss": 0.9474, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0016303904785196055, |
|
"grad_norm": 0.2290404736995697, |
|
"learning_rate": 0.00019998766324816607, |
|
"loss": 0.859, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0019564685742235266, |
|
"grad_norm": 0.20041389763355255, |
|
"learning_rate": 0.0001999822352380809, |
|
"loss": 0.8485, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0022825466699274476, |
|
"grad_norm": 0.32096758484840393, |
|
"learning_rate": 0.00019997582044369843, |
|
"loss": 0.9402, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0026086247656313686, |
|
"grad_norm": 0.16999490559101105, |
|
"learning_rate": 0.00019996841892833, |
|
"loss": 0.8291, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.00293470286133529, |
|
"grad_norm": 0.2701761722564697, |
|
"learning_rate": 0.00019996003076502565, |
|
"loss": 0.8865, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.003260780957039211, |
|
"grad_norm": 0.18380843102931976, |
|
"learning_rate": 0.00019995065603657316, |
|
"loss": 0.7624, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.003586859052743132, |
|
"grad_norm": 0.19820477068424225, |
|
"learning_rate": 0.0001999402948354973, |
|
"loss": 0.6386, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.003912937148447053, |
|
"grad_norm": 0.3971559703350067, |
|
"learning_rate": 0.00019992894726405893, |
|
"loss": 0.817, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.004239015244150974, |
|
"grad_norm": 0.26180964708328247, |
|
"learning_rate": 0.000199916613434254, |
|
"loss": 0.7921, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.004565093339854895, |
|
"grad_norm": 0.31556156277656555, |
|
"learning_rate": 0.0001999032934678125, |
|
"loss": 0.8024, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.004891171435558816, |
|
"grad_norm": 0.22404684126377106, |
|
"learning_rate": 0.00019988898749619702, |
|
"loss": 0.8024, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.005217249531262737, |
|
"grad_norm": 0.2191840410232544, |
|
"learning_rate": 0.00019987369566060176, |
|
"loss": 0.7378, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.005543327626966659, |
|
"grad_norm": 0.21089725196361542, |
|
"learning_rate": 0.00019985741811195097, |
|
"loss": 0.8019, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.00586940572267058, |
|
"grad_norm": 0.2955284118652344, |
|
"learning_rate": 0.00019984015501089752, |
|
"loss": 0.8887, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.006195483818374501, |
|
"grad_norm": 0.23684082925319672, |
|
"learning_rate": 0.0001998219065278212, |
|
"loss": 0.817, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.006521561914078422, |
|
"grad_norm": 0.2309175729751587, |
|
"learning_rate": 0.00019980267284282717, |
|
"loss": 0.7905, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.006847640009782343, |
|
"grad_norm": 0.22210851311683655, |
|
"learning_rate": 0.00019978245414574417, |
|
"loss": 0.7685, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.007173718105486264, |
|
"grad_norm": 0.18386678397655487, |
|
"learning_rate": 0.00019976125063612252, |
|
"loss": 0.6951, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.007499796201190185, |
|
"grad_norm": 0.2344907522201538, |
|
"learning_rate": 0.00019973906252323238, |
|
"loss": 0.8061, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.007825874296894107, |
|
"grad_norm": 0.23887653648853302, |
|
"learning_rate": 0.0001997158900260614, |
|
"loss": 0.8196, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.008151952392598028, |
|
"grad_norm": 0.21526898443698883, |
|
"learning_rate": 0.0001996917333733128, |
|
"loss": 0.7566, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.008478030488301948, |
|
"grad_norm": 0.28801462054252625, |
|
"learning_rate": 0.00019966659280340297, |
|
"loss": 0.8329, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.00880410858400587, |
|
"grad_norm": 0.2439056932926178, |
|
"learning_rate": 0.00019964046856445924, |
|
"loss": 0.6542, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.00913018667970979, |
|
"grad_norm": 0.33892586827278137, |
|
"learning_rate": 0.00019961336091431727, |
|
"loss": 0.669, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.009456264775413711, |
|
"grad_norm": 0.37088584899902344, |
|
"learning_rate": 0.00019958527012051857, |
|
"loss": 0.7722, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.009782342871117632, |
|
"grad_norm": 0.27336689829826355, |
|
"learning_rate": 0.00019955619646030802, |
|
"loss": 0.6662, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.010108420966821553, |
|
"grad_norm": 0.24305802583694458, |
|
"learning_rate": 0.00019952614022063084, |
|
"loss": 0.7181, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.010434499062525474, |
|
"grad_norm": 0.2589680552482605, |
|
"learning_rate": 0.00019949510169813003, |
|
"loss": 0.7044, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.010760577158229395, |
|
"grad_norm": 0.27365782856941223, |
|
"learning_rate": 0.00019946308119914323, |
|
"loss": 0.7446, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.011086655253933318, |
|
"grad_norm": 0.34046655893325806, |
|
"learning_rate": 0.0001994300790396999, |
|
"loss": 0.7853, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.011412733349637239, |
|
"grad_norm": 0.19071121513843536, |
|
"learning_rate": 0.000199396095545518, |
|
"loss": 0.5924, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.01173881144534116, |
|
"grad_norm": 0.36323902010917664, |
|
"learning_rate": 0.00019936113105200085, |
|
"loss": 0.8158, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.01206488954104508, |
|
"grad_norm": 0.19574415683746338, |
|
"learning_rate": 0.00019932518590423394, |
|
"loss": 0.5983, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.012390967636749002, |
|
"grad_norm": 0.24282293021678925, |
|
"learning_rate": 0.00019928826045698136, |
|
"loss": 0.8028, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.012717045732452923, |
|
"grad_norm": 0.30920177698135376, |
|
"learning_rate": 0.0001992503550746824, |
|
"loss": 0.7311, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.013043123828156844, |
|
"grad_norm": 0.3591444194316864, |
|
"learning_rate": 0.0001992114701314478, |
|
"loss": 0.7909, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.013369201923860765, |
|
"grad_norm": 0.30144479870796204, |
|
"learning_rate": 0.0001991716060110563, |
|
"loss": 0.6631, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.013695280019564686, |
|
"grad_norm": 0.25601276755332947, |
|
"learning_rate": 0.00019913076310695068, |
|
"loss": 0.7587, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.014021358115268607, |
|
"grad_norm": 0.269055038690567, |
|
"learning_rate": 0.00019908894182223388, |
|
"loss": 0.7726, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.014347436210972528, |
|
"grad_norm": 0.35874077677726746, |
|
"learning_rate": 0.00019904614256966512, |
|
"loss": 0.7229, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.014673514306676448, |
|
"grad_norm": 0.2554594576358795, |
|
"learning_rate": 0.00019900236577165576, |
|
"loss": 0.6733, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.01499959240238037, |
|
"grad_norm": 0.20462395250797272, |
|
"learning_rate": 0.0001989576118602651, |
|
"loss": 0.6097, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.01532567049808429, |
|
"grad_norm": 0.31137871742248535, |
|
"learning_rate": 0.00019891188127719618, |
|
"loss": 0.7929, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.015651748593788213, |
|
"grad_norm": 0.22744540870189667, |
|
"learning_rate": 0.0001988651744737914, |
|
"loss": 0.7205, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.015977826689492134, |
|
"grad_norm": 0.23255527019500732, |
|
"learning_rate": 0.00019881749191102808, |
|
"loss": 0.6804, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.016303904785196055, |
|
"grad_norm": 0.2959457337856293, |
|
"learning_rate": 0.00019876883405951377, |
|
"loss": 0.8133, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.016629982880899976, |
|
"grad_norm": 0.2753635346889496, |
|
"learning_rate": 0.00019871920139948192, |
|
"loss": 0.6824, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.016956060976603897, |
|
"grad_norm": 0.23834997415542603, |
|
"learning_rate": 0.0001986685944207868, |
|
"loss": 0.5754, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.017282139072307818, |
|
"grad_norm": 0.385995477437973, |
|
"learning_rate": 0.0001986170136228989, |
|
"loss": 0.7432, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.01760821716801174, |
|
"grad_norm": 0.26523032784461975, |
|
"learning_rate": 0.00019856445951489982, |
|
"loss": 0.6528, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.01793429526371566, |
|
"grad_norm": 0.25099095702171326, |
|
"learning_rate": 0.0001985109326154774, |
|
"loss": 0.6592, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.01826037335941958, |
|
"grad_norm": 0.2450903058052063, |
|
"learning_rate": 0.00019845643345292054, |
|
"loss": 0.7029, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.018586451455123502, |
|
"grad_norm": 0.21463742852210999, |
|
"learning_rate": 0.00019840096256511398, |
|
"loss": 0.6694, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.018912529550827423, |
|
"grad_norm": 0.2570534348487854, |
|
"learning_rate": 0.00019834452049953297, |
|
"loss": 0.7292, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.019238607646531344, |
|
"grad_norm": 0.263324111700058, |
|
"learning_rate": 0.00019828710781323792, |
|
"loss": 0.6538, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.019564685742235265, |
|
"grad_norm": 0.33475950360298157, |
|
"learning_rate": 0.0001982287250728689, |
|
"loss": 0.725, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.019890763837939186, |
|
"grad_norm": 0.30244898796081543, |
|
"learning_rate": 0.0001981693728546399, |
|
"loss": 0.7368, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.020216841933643107, |
|
"grad_norm": 0.3100570738315582, |
|
"learning_rate": 0.0001981090517443334, |
|
"loss": 0.7252, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.020542920029347028, |
|
"grad_norm": 0.21203063428401947, |
|
"learning_rate": 0.00019804776233729444, |
|
"loss": 0.6033, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.02086899812505095, |
|
"grad_norm": 0.3419407904148102, |
|
"learning_rate": 0.0001979855052384247, |
|
"loss": 0.7105, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.02119507622075487, |
|
"grad_norm": 0.23826122283935547, |
|
"learning_rate": 0.00019792228106217658, |
|
"loss": 0.6479, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.02152115431645879, |
|
"grad_norm": 0.26379406452178955, |
|
"learning_rate": 0.00019785809043254722, |
|
"loss": 0.6375, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.02184723241216271, |
|
"grad_norm": 0.27263978123664856, |
|
"learning_rate": 0.0001977929339830722, |
|
"loss": 0.6962, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.022173310507866636, |
|
"grad_norm": 0.23443619906902313, |
|
"learning_rate": 0.00019772681235681936, |
|
"loss": 0.6485, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.022499388603570557, |
|
"grad_norm": 0.22951410710811615, |
|
"learning_rate": 0.00019765972620638248, |
|
"loss": 0.6682, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.022825466699274478, |
|
"grad_norm": 0.27327480912208557, |
|
"learning_rate": 0.00019759167619387476, |
|
"loss": 0.6075, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.0231515447949784, |
|
"grad_norm": 0.2695823013782501, |
|
"learning_rate": 0.00019752266299092236, |
|
"loss": 0.6932, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.02347762289068232, |
|
"grad_norm": 0.25518912076950073, |
|
"learning_rate": 0.00019745268727865774, |
|
"loss": 0.7467, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.02380370098638624, |
|
"grad_norm": 0.20592662692070007, |
|
"learning_rate": 0.0001973817497477129, |
|
"loss": 0.6205, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.02412977908209016, |
|
"grad_norm": 0.26220276951789856, |
|
"learning_rate": 0.00019730985109821266, |
|
"loss": 0.6708, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.024455857177794083, |
|
"grad_norm": 0.21277664601802826, |
|
"learning_rate": 0.00019723699203976766, |
|
"loss": 0.6089, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.024781935273498003, |
|
"grad_norm": 0.24941107630729675, |
|
"learning_rate": 0.0001971631732914674, |
|
"loss": 0.653, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.025108013369201924, |
|
"grad_norm": 0.2536955177783966, |
|
"learning_rate": 0.0001970883955818731, |
|
"loss": 0.6835, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.025434091464905845, |
|
"grad_norm": 0.2956002354621887, |
|
"learning_rate": 0.0001970126596490106, |
|
"loss": 0.7187, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.025760169560609766, |
|
"grad_norm": 0.4782562553882599, |
|
"learning_rate": 0.00019693596624036292, |
|
"loss": 0.7792, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.026086247656313687, |
|
"grad_norm": 0.2989657521247864, |
|
"learning_rate": 0.0001968583161128631, |
|
"loss": 0.6652, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.02641232575201761, |
|
"grad_norm": 0.2501319348812103, |
|
"learning_rate": 0.00019677971003288655, |
|
"loss": 0.6437, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.02673840384772153, |
|
"grad_norm": 0.25894472002983093, |
|
"learning_rate": 0.00019670014877624353, |
|
"loss": 0.643, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.02706448194342545, |
|
"grad_norm": 0.26122599840164185, |
|
"learning_rate": 0.00019661963312817148, |
|
"loss": 0.6002, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.02739056003912937, |
|
"grad_norm": 0.30422499775886536, |
|
"learning_rate": 0.0001965381638833274, |
|
"loss": 0.7274, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.027716638134833292, |
|
"grad_norm": 0.22934012115001678, |
|
"learning_rate": 0.00019645574184577982, |
|
"loss": 0.6188, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.028042716230537213, |
|
"grad_norm": 0.2155739665031433, |
|
"learning_rate": 0.000196372367829001, |
|
"loss": 0.6079, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.028368794326241134, |
|
"grad_norm": 0.2555270791053772, |
|
"learning_rate": 0.00019628804265585877, |
|
"loss": 0.6143, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.028694872421945055, |
|
"grad_norm": 0.2770369350910187, |
|
"learning_rate": 0.0001962027671586086, |
|
"loss": 0.7382, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.029020950517648976, |
|
"grad_norm": 0.2100551426410675, |
|
"learning_rate": 0.0001961165421788852, |
|
"loss": 0.609, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.029347028613352897, |
|
"grad_norm": 0.29590874910354614, |
|
"learning_rate": 0.0001960293685676943, |
|
"loss": 0.7482, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.029673106709056818, |
|
"grad_norm": 0.33272069692611694, |
|
"learning_rate": 0.0001959412471854043, |
|
"loss": 0.7156, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.02999918480476074, |
|
"grad_norm": 0.2670280337333679, |
|
"learning_rate": 0.0001958521789017376, |
|
"loss": 0.7449, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.03032526290046466, |
|
"grad_norm": 0.2555425763130188, |
|
"learning_rate": 0.00019576216459576222, |
|
"loss": 0.7356, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.03065134099616858, |
|
"grad_norm": 0.2829780578613281, |
|
"learning_rate": 0.00019567120515588308, |
|
"loss": 0.6172, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.030977419091872502, |
|
"grad_norm": 0.24343392252922058, |
|
"learning_rate": 0.00019557930147983302, |
|
"loss": 0.575, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.031303497187576426, |
|
"grad_norm": 0.27918389439582825, |
|
"learning_rate": 0.00019548645447466431, |
|
"loss": 0.6214, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.031629575283280344, |
|
"grad_norm": 0.4070862829685211, |
|
"learning_rate": 0.00019539266505673938, |
|
"loss": 0.7078, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.03195565337898427, |
|
"grad_norm": 0.27973341941833496, |
|
"learning_rate": 0.00019529793415172192, |
|
"loss": 0.6759, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.032281731474688186, |
|
"grad_norm": 0.3154214322566986, |
|
"learning_rate": 0.00019520226269456768, |
|
"loss": 0.7701, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.03260780957039211, |
|
"grad_norm": 0.2046702355146408, |
|
"learning_rate": 0.00019510565162951537, |
|
"loss": 0.5448, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.03293388766609603, |
|
"grad_norm": 0.2343735247850418, |
|
"learning_rate": 0.00019500810191007718, |
|
"loss": 0.6245, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.03325996576179995, |
|
"grad_norm": 0.2645024061203003, |
|
"learning_rate": 0.00019490961449902946, |
|
"loss": 0.646, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.03358604385750387, |
|
"grad_norm": 0.28727737069129944, |
|
"learning_rate": 0.0001948101903684032, |
|
"loss": 0.6788, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.033912121953207794, |
|
"grad_norm": 0.2755887806415558, |
|
"learning_rate": 0.00019470983049947444, |
|
"loss": 0.6203, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.03423820004891171, |
|
"grad_norm": 0.2064685970544815, |
|
"learning_rate": 0.00019460853588275454, |
|
"loss": 0.6794, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.034564278144615636, |
|
"grad_norm": 0.2290899008512497, |
|
"learning_rate": 0.00019450630751798048, |
|
"loss": 0.5644, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.03489035624031955, |
|
"grad_norm": 0.26528871059417725, |
|
"learning_rate": 0.000194403146414105, |
|
"loss": 0.6283, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.03521643433602348, |
|
"grad_norm": 0.28655117750167847, |
|
"learning_rate": 0.00019429905358928646, |
|
"loss": 0.7008, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.0355425124317274, |
|
"grad_norm": 0.2962978780269623, |
|
"learning_rate": 0.00019419403007087907, |
|
"loss": 0.5863, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.03586859052743132, |
|
"grad_norm": 0.2612760365009308, |
|
"learning_rate": 0.00019408807689542257, |
|
"loss": 0.6873, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.036194668623135244, |
|
"grad_norm": 0.2672303020954132, |
|
"learning_rate": 0.00019398119510863197, |
|
"loss": 0.6674, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.03652074671883916, |
|
"grad_norm": 0.27370524406433105, |
|
"learning_rate": 0.00019387338576538744, |
|
"loss": 0.716, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.036846824814543086, |
|
"grad_norm": 0.23076298832893372, |
|
"learning_rate": 0.00019376464992972356, |
|
"loss": 0.6218, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.037172902910247003, |
|
"grad_norm": 0.31532204151153564, |
|
"learning_rate": 0.00019365498867481923, |
|
"loss": 0.7019, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.03749898100595093, |
|
"grad_norm": 0.2647252678871155, |
|
"learning_rate": 0.00019354440308298675, |
|
"loss": 0.7017, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.037825059101654845, |
|
"grad_norm": 0.283130407333374, |
|
"learning_rate": 0.00019343289424566122, |
|
"loss": 0.6234, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.03815113719735877, |
|
"grad_norm": 0.25351446866989136, |
|
"learning_rate": 0.00019332046326338986, |
|
"loss": 0.5254, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.03847721529306269, |
|
"grad_norm": 0.3242204785346985, |
|
"learning_rate": 0.0001932071112458211, |
|
"loss": 0.7043, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.03880329338876661, |
|
"grad_norm": 0.34721121191978455, |
|
"learning_rate": 0.00019309283931169356, |
|
"loss": 0.7116, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.03912937148447053, |
|
"grad_norm": 0.3425745964050293, |
|
"learning_rate": 0.00019297764858882514, |
|
"loss": 0.7402, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.039455449580174454, |
|
"grad_norm": 0.2215355932712555, |
|
"learning_rate": 0.00019286154021410173, |
|
"loss": 0.583, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.03978152767587837, |
|
"grad_norm": 0.2788958251476288, |
|
"learning_rate": 0.00019274451533346615, |
|
"loss": 0.6151, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.040107605771582296, |
|
"grad_norm": 0.22468440234661102, |
|
"learning_rate": 0.00019262657510190666, |
|
"loss": 0.5505, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.04043368386728621, |
|
"grad_norm": 0.23359987139701843, |
|
"learning_rate": 0.0001925077206834458, |
|
"loss": 0.6524, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.04075976196299014, |
|
"grad_norm": 0.2379453182220459, |
|
"learning_rate": 0.0001923879532511287, |
|
"loss": 0.5616, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.041085840058694055, |
|
"grad_norm": 0.24692785739898682, |
|
"learning_rate": 0.0001922672739870115, |
|
"loss": 0.6027, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.04141191815439798, |
|
"grad_norm": 0.2335415929555893, |
|
"learning_rate": 0.00019214568408214985, |
|
"loss": 0.5839, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.0417379962501019, |
|
"grad_norm": 0.34272462129592896, |
|
"learning_rate": 0.00019202318473658705, |
|
"loss": 0.6932, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.04206407434580582, |
|
"grad_norm": 0.23207567632198334, |
|
"learning_rate": 0.00019189977715934213, |
|
"loss": 0.6163, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.04239015244150974, |
|
"grad_norm": 0.18426033854484558, |
|
"learning_rate": 0.00019177546256839812, |
|
"loss": 0.522, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.04271623053721366, |
|
"grad_norm": 0.2792806625366211, |
|
"learning_rate": 0.0001916502421906898, |
|
"loss": 0.6846, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.04304230863291758, |
|
"grad_norm": 0.2858756184577942, |
|
"learning_rate": 0.00019152411726209176, |
|
"loss": 0.7003, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.043368386728621505, |
|
"grad_norm": 0.22560660541057587, |
|
"learning_rate": 0.00019139708902740613, |
|
"loss": 0.5148, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.04369446482432542, |
|
"grad_norm": 0.2962760329246521, |
|
"learning_rate": 0.0001912691587403503, |
|
"loss": 0.7421, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.04402054292002935, |
|
"grad_norm": 0.23213566839694977, |
|
"learning_rate": 0.00019114032766354453, |
|
"loss": 0.5798, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.04434662101573327, |
|
"grad_norm": 0.3144775629043579, |
|
"learning_rate": 0.00019101059706849957, |
|
"loss": 0.6473, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.04467269911143719, |
|
"grad_norm": 0.27858781814575195, |
|
"learning_rate": 0.00019087996823560402, |
|
"loss": 0.6913, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.044998777207141114, |
|
"grad_norm": 0.24840930104255676, |
|
"learning_rate": 0.0001907484424541117, |
|
"loss": 0.5928, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.04532485530284503, |
|
"grad_norm": 0.20698444545269012, |
|
"learning_rate": 0.00019061602102212898, |
|
"loss": 0.5884, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.045650933398548955, |
|
"grad_norm": 0.3082457482814789, |
|
"learning_rate": 0.00019048270524660196, |
|
"loss": 0.6686, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.04597701149425287, |
|
"grad_norm": 0.3713861107826233, |
|
"learning_rate": 0.0001903484964433035, |
|
"loss": 0.7604, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.0463030895899568, |
|
"grad_norm": 0.41255638003349304, |
|
"learning_rate": 0.00019021339593682028, |
|
"loss": 0.698, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.046629167685660715, |
|
"grad_norm": 0.3440454602241516, |
|
"learning_rate": 0.00019007740506053983, |
|
"loss": 0.5728, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.04695524578136464, |
|
"grad_norm": 0.2812284529209137, |
|
"learning_rate": 0.0001899405251566371, |
|
"loss": 0.6911, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.04728132387706856, |
|
"grad_norm": 0.29430341720581055, |
|
"learning_rate": 0.00018980275757606157, |
|
"loss": 0.682, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.04760740197277248, |
|
"grad_norm": 0.39614227414131165, |
|
"learning_rate": 0.00018966410367852362, |
|
"loss": 0.6881, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.0479334800684764, |
|
"grad_norm": 0.2873314917087555, |
|
"learning_rate": 0.00018952456483248119, |
|
"loss": 0.6422, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.04825955816418032, |
|
"grad_norm": 0.2976369857788086, |
|
"learning_rate": 0.0001893841424151264, |
|
"loss": 0.5745, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.04858563625988424, |
|
"grad_norm": 0.3633524477481842, |
|
"learning_rate": 0.0001892428378123718, |
|
"loss": 0.69, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.048911714355588165, |
|
"grad_norm": 0.275623619556427, |
|
"learning_rate": 0.0001891006524188368, |
|
"loss": 0.7673, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.04923779245129208, |
|
"grad_norm": 0.26629534363746643, |
|
"learning_rate": 0.00018895758763783383, |
|
"loss": 0.7725, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.04956387054699601, |
|
"grad_norm": 0.2281135469675064, |
|
"learning_rate": 0.00018881364488135448, |
|
"loss": 0.6021, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.049889948642699924, |
|
"grad_norm": 0.24734225869178772, |
|
"learning_rate": 0.00018866882557005567, |
|
"loss": 0.6097, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.05021602673840385, |
|
"grad_norm": 0.2616749405860901, |
|
"learning_rate": 0.00018852313113324552, |
|
"loss": 0.7009, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.050542104834107766, |
|
"grad_norm": 0.2619054913520813, |
|
"learning_rate": 0.00018837656300886937, |
|
"loss": 0.6613, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.05086818292981169, |
|
"grad_norm": 0.3681729733943939, |
|
"learning_rate": 0.00018822912264349534, |
|
"loss": 0.6548, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.05119426102551561, |
|
"grad_norm": 0.2739261984825134, |
|
"learning_rate": 0.00018808081149230036, |
|
"loss": 0.6548, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.05152033912121953, |
|
"grad_norm": 0.28752514719963074, |
|
"learning_rate": 0.00018793163101905563, |
|
"loss": 0.6172, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.05184641721692345, |
|
"grad_norm": 0.249834805727005, |
|
"learning_rate": 0.00018778158269611218, |
|
"loss": 0.6068, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.052172495312627375, |
|
"grad_norm": 0.3328039348125458, |
|
"learning_rate": 0.00018763066800438636, |
|
"loss": 0.6898, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.05249857340833129, |
|
"grad_norm": 0.3795255422592163, |
|
"learning_rate": 0.0001874788884333453, |
|
"loss": 0.6397, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.05282465150403522, |
|
"grad_norm": 0.30090928077697754, |
|
"learning_rate": 0.00018732624548099204, |
|
"loss": 0.6447, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.053150729599739134, |
|
"grad_norm": 0.22852292656898499, |
|
"learning_rate": 0.0001871727406538509, |
|
"loss": 0.5784, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.05347680769544306, |
|
"grad_norm": 0.24613617360591888, |
|
"learning_rate": 0.0001870183754669526, |
|
"loss": 0.6584, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.05380288579114698, |
|
"grad_norm": 0.3243715465068817, |
|
"learning_rate": 0.00018686315144381913, |
|
"loss": 0.6972, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.0541289638868509, |
|
"grad_norm": 0.29082372784614563, |
|
"learning_rate": 0.000186707070116449, |
|
"loss": 0.5996, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.054455041982554825, |
|
"grad_norm": 0.19261658191680908, |
|
"learning_rate": 0.0001865501330253019, |
|
"loss": 0.5629, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.05478112007825874, |
|
"grad_norm": 0.3714055120944977, |
|
"learning_rate": 0.00018639234171928353, |
|
"loss": 0.7309, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.05510719817396267, |
|
"grad_norm": 0.21879787743091583, |
|
"learning_rate": 0.0001862336977557304, |
|
"loss": 0.5907, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.055433276269666584, |
|
"grad_norm": 0.2829175293445587, |
|
"learning_rate": 0.0001860742027003944, |
|
"loss": 0.6705, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.05575935436537051, |
|
"grad_norm": 0.2817269563674927, |
|
"learning_rate": 0.00018591385812742725, |
|
"loss": 0.6662, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.056085432461074426, |
|
"grad_norm": 0.21578969061374664, |
|
"learning_rate": 0.00018575266561936523, |
|
"loss": 0.5573, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.05641151055677835, |
|
"grad_norm": 0.2769645154476166, |
|
"learning_rate": 0.00018559062676711332, |
|
"loss": 0.5742, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.05673758865248227, |
|
"grad_norm": 0.221250519156456, |
|
"learning_rate": 0.0001854277431699295, |
|
"loss": 0.6006, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.05706366674818619, |
|
"grad_norm": 0.391968309879303, |
|
"learning_rate": 0.00018526401643540922, |
|
"loss": 0.7147, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.05738974484389011, |
|
"grad_norm": 0.20950183272361755, |
|
"learning_rate": 0.00018509944817946922, |
|
"loss": 0.5665, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.057715822939594034, |
|
"grad_norm": 0.4676218330860138, |
|
"learning_rate": 0.00018493404002633166, |
|
"loss": 0.6862, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.05804190103529795, |
|
"grad_norm": 0.30444085597991943, |
|
"learning_rate": 0.00018476779360850832, |
|
"loss": 0.6271, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.058367979131001876, |
|
"grad_norm": 0.30755650997161865, |
|
"learning_rate": 0.00018460071056678422, |
|
"loss": 0.7104, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.058694057226705794, |
|
"grad_norm": 0.266366571187973, |
|
"learning_rate": 0.00018443279255020152, |
|
"loss": 0.7298, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.05902013532240972, |
|
"grad_norm": 0.3003389537334442, |
|
"learning_rate": 0.00018426404121604323, |
|
"loss": 0.7506, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.059346213418113636, |
|
"grad_norm": 0.30031818151474, |
|
"learning_rate": 0.00018409445822981693, |
|
"loss": 0.6368, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.05967229151381756, |
|
"grad_norm": 0.23968857526779175, |
|
"learning_rate": 0.00018392404526523817, |
|
"loss": 0.5467, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.05999836960952148, |
|
"grad_norm": 0.24690750241279602, |
|
"learning_rate": 0.0001837528040042142, |
|
"loss": 0.6422, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.0603244477052254, |
|
"grad_norm": 0.2936251759529114, |
|
"learning_rate": 0.00018358073613682706, |
|
"loss": 0.7603, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.06065052580092932, |
|
"grad_norm": 0.33426618576049805, |
|
"learning_rate": 0.00018340784336131713, |
|
"loss": 0.82, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.060976603896633244, |
|
"grad_norm": 0.29302123188972473, |
|
"learning_rate": 0.00018323412738406635, |
|
"loss": 0.643, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.06130268199233716, |
|
"grad_norm": 0.23355039954185486, |
|
"learning_rate": 0.00018305958991958127, |
|
"loss": 0.6233, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.061628760088041086, |
|
"grad_norm": 0.503938615322113, |
|
"learning_rate": 0.0001828842326904762, |
|
"loss": 0.717, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.061954838183745004, |
|
"grad_norm": 0.23740191757678986, |
|
"learning_rate": 0.00018270805742745617, |
|
"loss": 0.6435, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.06228091627944893, |
|
"grad_norm": 0.23052096366882324, |
|
"learning_rate": 0.00018253106586929997, |
|
"loss": 0.6395, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.06260699437515285, |
|
"grad_norm": 0.2588057219982147, |
|
"learning_rate": 0.00018235325976284275, |
|
"loss": 0.5851, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.06293307247085678, |
|
"grad_norm": 0.26309555768966675, |
|
"learning_rate": 0.00018217464086295904, |
|
"loss": 0.623, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.06325915056656069, |
|
"grad_norm": 0.30716729164123535, |
|
"learning_rate": 0.00018199521093254523, |
|
"loss": 0.7346, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.06358522866226461, |
|
"grad_norm": 0.2664608061313629, |
|
"learning_rate": 0.00018181497174250236, |
|
"loss": 0.737, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.06391130675796854, |
|
"grad_norm": 0.33862313628196716, |
|
"learning_rate": 0.00018163392507171842, |
|
"loss": 0.6553, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.06423738485367246, |
|
"grad_norm": 0.29159292578697205, |
|
"learning_rate": 0.00018145207270705096, |
|
"loss": 0.6077, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.06456346294937637, |
|
"grad_norm": 0.28656378388404846, |
|
"learning_rate": 0.0001812694164433094, |
|
"loss": 0.7094, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.0648895410450803, |
|
"grad_norm": 0.3513023853302002, |
|
"learning_rate": 0.00018108595808323736, |
|
"loss": 0.6811, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.06521561914078422, |
|
"grad_norm": 0.44914510846138, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 0.5998, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.06554169723648814, |
|
"grad_norm": 0.25120270252227783, |
|
"learning_rate": 0.00018071664232464002, |
|
"loss": 0.5754, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.06586777533219206, |
|
"grad_norm": 0.3265678584575653, |
|
"learning_rate": 0.0001805307885711122, |
|
"loss": 0.6652, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.06619385342789598, |
|
"grad_norm": 0.2386000156402588, |
|
"learning_rate": 0.00018034414001121278, |
|
"loss": 0.6503, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.0665199315235999, |
|
"grad_norm": 0.3131890892982483, |
|
"learning_rate": 0.00018015669848708767, |
|
"loss": 0.6924, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.06684600961930383, |
|
"grad_norm": 0.2541196048259735, |
|
"learning_rate": 0.00017996846584870908, |
|
"loss": 0.7512, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.06717208771500774, |
|
"grad_norm": 0.2935178577899933, |
|
"learning_rate": 0.0001797794439538571, |
|
"loss": 0.6095, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.06749816581071166, |
|
"grad_norm": 0.25794175267219543, |
|
"learning_rate": 0.0001795896346681016, |
|
"loss": 0.5873, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.06782424390641559, |
|
"grad_norm": 0.34597495198249817, |
|
"learning_rate": 0.00017939903986478355, |
|
"loss": 0.6594, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.06815032200211951, |
|
"grad_norm": 0.26853707432746887, |
|
"learning_rate": 0.00017920766142499672, |
|
"loss": 0.6388, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.06847640009782342, |
|
"grad_norm": 0.2943996787071228, |
|
"learning_rate": 0.00017901550123756906, |
|
"loss": 0.6592, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.06880247819352735, |
|
"grad_norm": 0.2579059600830078, |
|
"learning_rate": 0.00017882256119904403, |
|
"loss": 0.7697, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.06912855628923127, |
|
"grad_norm": 0.30710938572883606, |
|
"learning_rate": 0.00017862884321366188, |
|
"loss": 0.6289, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.0694546343849352, |
|
"grad_norm": 0.5300430059432983, |
|
"learning_rate": 0.000178434349193341, |
|
"loss": 0.7128, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.0697807124806391, |
|
"grad_norm": 0.34696683287620544, |
|
"learning_rate": 0.0001782390810576588, |
|
"loss": 0.7006, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.07010679057634303, |
|
"grad_norm": 0.2758086621761322, |
|
"learning_rate": 0.000178043040733833, |
|
"loss": 0.6286, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.07043286867204696, |
|
"grad_norm": 0.3609292507171631, |
|
"learning_rate": 0.00017784623015670238, |
|
"loss": 0.6675, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.07075894676775088, |
|
"grad_norm": 0.2829347848892212, |
|
"learning_rate": 0.00017764865126870786, |
|
"loss": 0.6893, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.0710850248634548, |
|
"grad_norm": 0.5950068235397339, |
|
"learning_rate": 0.00017745030601987337, |
|
"loss": 0.7254, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.07141110295915871, |
|
"grad_norm": 0.30742985010147095, |
|
"learning_rate": 0.00017725119636778644, |
|
"loss": 0.6998, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.07173718105486264, |
|
"grad_norm": 0.4018792510032654, |
|
"learning_rate": 0.00017705132427757895, |
|
"loss": 0.8801, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.07206325915056656, |
|
"grad_norm": 0.3693302273750305, |
|
"learning_rate": 0.00017685069172190766, |
|
"loss": 0.6576, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.07238933724627049, |
|
"grad_norm": 0.23466476798057556, |
|
"learning_rate": 0.00017664930068093498, |
|
"loss": 0.6179, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.0727154153419744, |
|
"grad_norm": 0.4448927938938141, |
|
"learning_rate": 0.00017644715314230918, |
|
"loss": 0.7786, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.07304149343767832, |
|
"grad_norm": 0.25553828477859497, |
|
"learning_rate": 0.0001762442511011448, |
|
"loss": 0.5989, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.07336757153338225, |
|
"grad_norm": 0.253617525100708, |
|
"learning_rate": 0.0001760405965600031, |
|
"loss": 0.6312, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.07369364962908617, |
|
"grad_norm": 0.24342387914657593, |
|
"learning_rate": 0.0001758361915288722, |
|
"loss": 0.5719, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.07401972772479008, |
|
"grad_norm": 0.26518356800079346, |
|
"learning_rate": 0.0001756310380251472, |
|
"loss": 0.5901, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.07434580582049401, |
|
"grad_norm": 0.22742488980293274, |
|
"learning_rate": 0.00017542513807361037, |
|
"loss": 0.5329, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.07467188391619793, |
|
"grad_norm": 0.3179355263710022, |
|
"learning_rate": 0.00017521849370641114, |
|
"loss": 0.6908, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.07499796201190186, |
|
"grad_norm": 0.2648365795612335, |
|
"learning_rate": 0.00017501110696304596, |
|
"loss": 0.6078, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.07532404010760577, |
|
"grad_norm": 0.2643827497959137, |
|
"learning_rate": 0.00017480297989033825, |
|
"loss": 0.7111, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.07565011820330969, |
|
"grad_norm": 0.2998198866844177, |
|
"learning_rate": 0.00017459411454241822, |
|
"loss": 0.6759, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.07597619629901362, |
|
"grad_norm": 0.2904861867427826, |
|
"learning_rate": 0.00017438451298070252, |
|
"loss": 0.6876, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.07630227439471754, |
|
"grad_norm": 0.33539697527885437, |
|
"learning_rate": 0.00017417417727387394, |
|
"loss": 0.6744, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.07662835249042145, |
|
"grad_norm": 0.2617104947566986, |
|
"learning_rate": 0.000173963109497861, |
|
"loss": 0.5529, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.07695443058612537, |
|
"grad_norm": 0.23713330924510956, |
|
"learning_rate": 0.0001737513117358174, |
|
"loss": 0.606, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.0772805086818293, |
|
"grad_norm": 0.2559371888637543, |
|
"learning_rate": 0.0001735387860781016, |
|
"loss": 0.5811, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.07760658677753322, |
|
"grad_norm": 0.27742600440979004, |
|
"learning_rate": 0.00017332553462225602, |
|
"loss": 0.6415, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.07793266487323713, |
|
"grad_norm": 0.26602035760879517, |
|
"learning_rate": 0.00017311155947298643, |
|
"loss": 0.6722, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.07825874296894106, |
|
"grad_norm": 0.2551373839378357, |
|
"learning_rate": 0.00017289686274214118, |
|
"loss": 0.5842, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.07858482106464498, |
|
"grad_norm": 0.3161643147468567, |
|
"learning_rate": 0.0001726814465486903, |
|
"loss": 0.5979, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.07891089916034891, |
|
"grad_norm": 0.3652503788471222, |
|
"learning_rate": 0.0001724653130187047, |
|
"loss": 0.6652, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.07923697725605282, |
|
"grad_norm": 0.2684518098831177, |
|
"learning_rate": 0.00017224846428533499, |
|
"loss": 0.6001, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.07956305535175674, |
|
"grad_norm": 0.21544717252254486, |
|
"learning_rate": 0.0001720309024887907, |
|
"loss": 0.61, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.07988913344746067, |
|
"grad_norm": 0.2404128611087799, |
|
"learning_rate": 0.00017181262977631888, |
|
"loss": 0.6769, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.08021521154316459, |
|
"grad_norm": 0.21001021564006805, |
|
"learning_rate": 0.00017159364830218312, |
|
"loss": 0.6083, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.08054128963886852, |
|
"grad_norm": 0.36963921785354614, |
|
"learning_rate": 0.00017137396022764214, |
|
"loss": 0.6379, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.08086736773457243, |
|
"grad_norm": 0.259547621011734, |
|
"learning_rate": 0.00017115356772092857, |
|
"loss": 0.6637, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.08119344583027635, |
|
"grad_norm": 0.31841689348220825, |
|
"learning_rate": 0.0001709324729572274, |
|
"loss": 0.7091, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.08151952392598028, |
|
"grad_norm": 0.35306358337402344, |
|
"learning_rate": 0.00017071067811865476, |
|
"loss": 0.6489, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.0818456020216842, |
|
"grad_norm": 0.21508830785751343, |
|
"learning_rate": 0.00017048818539423615, |
|
"loss": 0.5831, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.08217168011738811, |
|
"grad_norm": 0.2714361250400543, |
|
"learning_rate": 0.00017026499697988493, |
|
"loss": 0.6528, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.08249775821309203, |
|
"grad_norm": 0.2910292148590088, |
|
"learning_rate": 0.00017004111507838064, |
|
"loss": 0.7083, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.08282383630879596, |
|
"grad_norm": 0.2184605747461319, |
|
"learning_rate": 0.00016981654189934727, |
|
"loss": 0.5559, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.08314991440449988, |
|
"grad_norm": 0.222558856010437, |
|
"learning_rate": 0.00016959127965923142, |
|
"loss": 0.6332, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.0834759925002038, |
|
"grad_norm": 0.37371590733528137, |
|
"learning_rate": 0.0001693653305812805, |
|
"loss": 0.5877, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.08380207059590772, |
|
"grad_norm": 0.3450126051902771, |
|
"learning_rate": 0.00016913869689552064, |
|
"loss": 0.7029, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.08412814869161164, |
|
"grad_norm": 0.24566428363323212, |
|
"learning_rate": 0.00016891138083873487, |
|
"loss": 0.5703, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.08445422678731557, |
|
"grad_norm": 0.21838568150997162, |
|
"learning_rate": 0.00016868338465444085, |
|
"loss": 0.5863, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.08478030488301948, |
|
"grad_norm": 0.2161954939365387, |
|
"learning_rate": 0.00016845471059286887, |
|
"loss": 0.5707, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 0.29614922404289246, |
|
"learning_rate": 0.00016822536091093965, |
|
"loss": 0.6503, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.08543246107442733, |
|
"grad_norm": 0.32689064741134644, |
|
"learning_rate": 0.00016799533787224192, |
|
"loss": 0.6301, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.08575853917013125, |
|
"grad_norm": 0.2884279787540436, |
|
"learning_rate": 0.00016776464374701025, |
|
"loss": 0.5587, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.08608461726583516, |
|
"grad_norm": 0.435344398021698, |
|
"learning_rate": 0.00016753328081210245, |
|
"loss": 0.8083, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.08641069536153909, |
|
"grad_norm": 0.2473522573709488, |
|
"learning_rate": 0.00016730125135097735, |
|
"loss": 0.6562, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.08673677345724301, |
|
"grad_norm": 0.3048084080219269, |
|
"learning_rate": 0.000167068557653672, |
|
"loss": 0.6652, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.08706285155294693, |
|
"grad_norm": 0.3015248775482178, |
|
"learning_rate": 0.0001668352020167793, |
|
"loss": 0.5716, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.08738892964865085, |
|
"grad_norm": 0.26765477657318115, |
|
"learning_rate": 0.00016660118674342517, |
|
"loss": 0.6112, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.08771500774435477, |
|
"grad_norm": 0.2710655629634857, |
|
"learning_rate": 0.00016636651414324587, |
|
"loss": 0.6236, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.0880410858400587, |
|
"grad_norm": 0.3481336534023285, |
|
"learning_rate": 0.00016613118653236518, |
|
"loss": 0.6561, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.08836716393576262, |
|
"grad_norm": 0.39478129148483276, |
|
"learning_rate": 0.0001658952062333717, |
|
"loss": 0.6904, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.08869324203146654, |
|
"grad_norm": 0.24365971982479095, |
|
"learning_rate": 0.00016565857557529566, |
|
"loss": 0.7372, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.08901932012717045, |
|
"grad_norm": 0.2448200285434723, |
|
"learning_rate": 0.00016542129689358612, |
|
"loss": 0.6072, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.08934539822287438, |
|
"grad_norm": 0.23193484544754028, |
|
"learning_rate": 0.0001651833725300879, |
|
"loss": 0.6249, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.0896714763185783, |
|
"grad_norm": 0.3657950460910797, |
|
"learning_rate": 0.00016494480483301836, |
|
"loss": 0.7056, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.08999755441428223, |
|
"grad_norm": 0.22652854025363922, |
|
"learning_rate": 0.00016470559615694446, |
|
"loss": 0.6169, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.09032363250998614, |
|
"grad_norm": 0.23606394231319427, |
|
"learning_rate": 0.00016446574886275913, |
|
"loss": 0.6428, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.09064971060569006, |
|
"grad_norm": 0.21306492388248444, |
|
"learning_rate": 0.00016422526531765846, |
|
"loss": 0.6148, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.09097578870139399, |
|
"grad_norm": 0.2294398844242096, |
|
"learning_rate": 0.00016398414789511786, |
|
"loss": 0.6457, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.09130186679709791, |
|
"grad_norm": 0.3088754117488861, |
|
"learning_rate": 0.000163742398974869, |
|
"loss": 0.6488, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.09162794489280182, |
|
"grad_norm": 0.32271504402160645, |
|
"learning_rate": 0.00016350002094287609, |
|
"loss": 0.6858, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.09195402298850575, |
|
"grad_norm": 0.348728746175766, |
|
"learning_rate": 0.00016325701619131246, |
|
"loss": 0.671, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.09228010108420967, |
|
"grad_norm": 0.28784212470054626, |
|
"learning_rate": 0.00016301338711853693, |
|
"loss": 0.6244, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.0926061791799136, |
|
"grad_norm": 0.3205079734325409, |
|
"learning_rate": 0.00016276913612907007, |
|
"loss": 0.6944, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.0929322572756175, |
|
"grad_norm": 0.23461277782917023, |
|
"learning_rate": 0.00016252426563357055, |
|
"loss": 0.5726, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.09325833537132143, |
|
"grad_norm": 0.2635505795478821, |
|
"learning_rate": 0.00016227877804881127, |
|
"loss": 0.6212, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.09358441346702535, |
|
"grad_norm": 0.24859635531902313, |
|
"learning_rate": 0.00016203267579765563, |
|
"loss": 0.6139, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.09391049156272928, |
|
"grad_norm": 0.32381847500801086, |
|
"learning_rate": 0.00016178596130903344, |
|
"loss": 0.7499, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.09423656965843319, |
|
"grad_norm": 0.22918426990509033, |
|
"learning_rate": 0.00016153863701791717, |
|
"loss": 0.6352, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.09456264775413711, |
|
"grad_norm": 0.2531386911869049, |
|
"learning_rate": 0.00016129070536529766, |
|
"loss": 0.6116, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.09488872584984104, |
|
"grad_norm": 0.31397831439971924, |
|
"learning_rate": 0.00016104216879816026, |
|
"loss": 0.6613, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.09521480394554496, |
|
"grad_norm": 0.26814454793930054, |
|
"learning_rate": 0.00016079302976946055, |
|
"loss": 0.5927, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.09554088204124887, |
|
"grad_norm": 0.2622220516204834, |
|
"learning_rate": 0.00016054329073810015, |
|
"loss": 0.5552, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.0958669601369528, |
|
"grad_norm": 0.20141161978244781, |
|
"learning_rate": 0.00016029295416890248, |
|
"loss": 0.5583, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.09619303823265672, |
|
"grad_norm": 0.24076247215270996, |
|
"learning_rate": 0.00016004202253258842, |
|
"loss": 0.6295, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.09651911632836065, |
|
"grad_norm": 0.28313252329826355, |
|
"learning_rate": 0.0001597904983057519, |
|
"loss": 0.655, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.09684519442406456, |
|
"grad_norm": 0.3352673351764679, |
|
"learning_rate": 0.00015953838397083552, |
|
"loss": 0.6478, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.09717127251976848, |
|
"grad_norm": 0.2615090012550354, |
|
"learning_rate": 0.00015928568201610595, |
|
"loss": 0.5878, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.0974973506154724, |
|
"grad_norm": 0.22464829683303833, |
|
"learning_rate": 0.00015903239493562948, |
|
"loss": 0.5686, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.09782342871117633, |
|
"grad_norm": 0.24365228414535522, |
|
"learning_rate": 0.00015877852522924732, |
|
"loss": 0.4556, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.09814950680688025, |
|
"grad_norm": 0.22546721994876862, |
|
"learning_rate": 0.00015852407540255104, |
|
"loss": 0.5845, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.09847558490258417, |
|
"grad_norm": 0.3008763790130615, |
|
"learning_rate": 0.00015826904796685762, |
|
"loss": 0.5832, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.09880166299828809, |
|
"grad_norm": 0.37207353115081787, |
|
"learning_rate": 0.00015801344543918495, |
|
"loss": 0.8106, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.09912774109399201, |
|
"grad_norm": 0.27629679441452026, |
|
"learning_rate": 0.00015775727034222675, |
|
"loss": 0.6696, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.09945381918969594, |
|
"grad_norm": 0.32836827635765076, |
|
"learning_rate": 0.00015750052520432787, |
|
"loss": 0.6924, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.09977989728539985, |
|
"grad_norm": 0.2459554374217987, |
|
"learning_rate": 0.0001572432125594591, |
|
"loss": 0.6045, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.10010597538110377, |
|
"grad_norm": 0.2423304170370102, |
|
"learning_rate": 0.00015698533494719238, |
|
"loss": 0.6067, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.1004320534768077, |
|
"grad_norm": 0.33577221632003784, |
|
"learning_rate": 0.00015672689491267567, |
|
"loss": 0.6626, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.10075813157251162, |
|
"grad_norm": 0.2271413505077362, |
|
"learning_rate": 0.00015646789500660773, |
|
"loss": 0.6213, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.10108420966821553, |
|
"grad_norm": 0.3947584629058838, |
|
"learning_rate": 0.00015620833778521307, |
|
"loss": 0.738, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.10141028776391946, |
|
"grad_norm": 0.2767365872859955, |
|
"learning_rate": 0.0001559482258102167, |
|
"loss": 0.563, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.10173636585962338, |
|
"grad_norm": 0.26970767974853516, |
|
"learning_rate": 0.00015568756164881882, |
|
"loss": 0.7104, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.1020624439553273, |
|
"grad_norm": 0.26086539030075073, |
|
"learning_rate": 0.00015542634787366942, |
|
"loss": 0.6614, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.10238852205103122, |
|
"grad_norm": 0.17937611043453217, |
|
"learning_rate": 0.00015516458706284303, |
|
"loss": 0.5259, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.10271460014673514, |
|
"grad_norm": 0.17191629111766815, |
|
"learning_rate": 0.0001549022817998132, |
|
"loss": 0.5171, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.10304067824243907, |
|
"grad_norm": 0.2208365947008133, |
|
"learning_rate": 0.00015463943467342693, |
|
"loss": 0.5459, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.10336675633814299, |
|
"grad_norm": 0.28957873582839966, |
|
"learning_rate": 0.00015437604827787927, |
|
"loss": 0.6093, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.1036928344338469, |
|
"grad_norm": 0.2496960610151291, |
|
"learning_rate": 0.00015411212521268758, |
|
"loss": 0.6104, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.10401891252955082, |
|
"grad_norm": 0.2421676069498062, |
|
"learning_rate": 0.00015384766808266602, |
|
"loss": 0.6567, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.10434499062525475, |
|
"grad_norm": 0.2969132363796234, |
|
"learning_rate": 0.00015358267949789966, |
|
"loss": 0.6265, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.10467106872095867, |
|
"grad_norm": 0.2575230300426483, |
|
"learning_rate": 0.00015331716207371888, |
|
"loss": 0.6724, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.10499714681666258, |
|
"grad_norm": 0.364106148481369, |
|
"learning_rate": 0.0001530511184306734, |
|
"loss": 0.7101, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.10532322491236651, |
|
"grad_norm": 0.26639074087142944, |
|
"learning_rate": 0.00015278455119450664, |
|
"loss": 0.572, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.10564930300807043, |
|
"grad_norm": 0.24254579842090607, |
|
"learning_rate": 0.0001525174629961296, |
|
"loss": 0.5839, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.10597538110377436, |
|
"grad_norm": 0.2786118984222412, |
|
"learning_rate": 0.0001522498564715949, |
|
"loss": 0.6216, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.10630145919947827, |
|
"grad_norm": 0.2908596396446228, |
|
"learning_rate": 0.00015198173426207094, |
|
"loss": 0.5082, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.10662753729518219, |
|
"grad_norm": 0.26790931820869446, |
|
"learning_rate": 0.00015171309901381572, |
|
"loss": 0.6836, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.10695361539088612, |
|
"grad_norm": 0.25367841124534607, |
|
"learning_rate": 0.00015144395337815064, |
|
"loss": 0.5351, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.10727969348659004, |
|
"grad_norm": 0.23655357956886292, |
|
"learning_rate": 0.00015117430001143452, |
|
"loss": 0.6234, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.10760577158229397, |
|
"grad_norm": 0.29328015446662903, |
|
"learning_rate": 0.00015090414157503714, |
|
"loss": 0.6051, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.10793184967799788, |
|
"grad_norm": 0.2480703741312027, |
|
"learning_rate": 0.00015063348073531324, |
|
"loss": 0.6228, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.1082579277737018, |
|
"grad_norm": 0.21856042742729187, |
|
"learning_rate": 0.0001503623201635761, |
|
"loss": 0.5922, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.10858400586940573, |
|
"grad_norm": 0.27874556183815, |
|
"learning_rate": 0.000150090662536071, |
|
"loss": 0.648, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.10891008396510965, |
|
"grad_norm": 0.3144196569919586, |
|
"learning_rate": 0.0001498185105339491, |
|
"loss": 0.6663, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.10923616206081356, |
|
"grad_norm": 0.24514897167682648, |
|
"learning_rate": 0.00014954586684324078, |
|
"loss": 0.5493, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.10956224015651748, |
|
"grad_norm": 0.33484047651290894, |
|
"learning_rate": 0.00014927273415482915, |
|
"loss": 0.6464, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.10988831825222141, |
|
"grad_norm": 0.26239341497421265, |
|
"learning_rate": 0.00014899911516442365, |
|
"loss": 0.6216, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.11021439634792533, |
|
"grad_norm": 0.2449112832546234, |
|
"learning_rate": 0.00014872501257253323, |
|
"loss": 0.6243, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.11054047444362924, |
|
"grad_norm": 0.25176045298576355, |
|
"learning_rate": 0.0001484504290844398, |
|
"loss": 0.5852, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.11086655253933317, |
|
"grad_norm": 0.3558025360107422, |
|
"learning_rate": 0.00014817536741017152, |
|
"loss": 0.6305, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.11119263063503709, |
|
"grad_norm": 0.3823552131652832, |
|
"learning_rate": 0.00014789983026447612, |
|
"loss": 0.6243, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.11151870873074102, |
|
"grad_norm": 0.30289652943611145, |
|
"learning_rate": 0.0001476238203667939, |
|
"loss": 0.5996, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.11184478682644493, |
|
"grad_norm": 0.27371859550476074, |
|
"learning_rate": 0.0001473473404412312, |
|
"loss": 0.6582, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.11217086492214885, |
|
"grad_norm": 0.26179400086402893, |
|
"learning_rate": 0.0001470703932165333, |
|
"loss": 0.7194, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.11249694301785278, |
|
"grad_norm": 0.32942402362823486, |
|
"learning_rate": 0.00014679298142605734, |
|
"loss": 0.6949, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.1128230211135567, |
|
"grad_norm": 0.2867915630340576, |
|
"learning_rate": 0.00014651510780774583, |
|
"loss": 0.6307, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.11314909920926061, |
|
"grad_norm": 0.23128168284893036, |
|
"learning_rate": 0.00014623677510409918, |
|
"loss": 0.5856, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.11347517730496454, |
|
"grad_norm": 0.23641528189182281, |
|
"learning_rate": 0.00014595798606214882, |
|
"loss": 0.693, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.11380125540066846, |
|
"grad_norm": 0.23140428960323334, |
|
"learning_rate": 0.00014567874343342997, |
|
"loss": 0.6498, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.11412733349637239, |
|
"grad_norm": 0.3104979991912842, |
|
"learning_rate": 0.00014539904997395468, |
|
"loss": 0.6793, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.1144534115920763, |
|
"grad_norm": 0.28645816445350647, |
|
"learning_rate": 0.00014511890844418453, |
|
"loss": 0.7425, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.11477948968778022, |
|
"grad_norm": 0.31605294346809387, |
|
"learning_rate": 0.00014483832160900326, |
|
"loss": 0.5866, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.11510556778348414, |
|
"grad_norm": 0.27439796924591064, |
|
"learning_rate": 0.00014455729223768966, |
|
"loss": 0.5785, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.11543164587918807, |
|
"grad_norm": 0.3302454650402069, |
|
"learning_rate": 0.0001442758231038902, |
|
"loss": 0.658, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.115757723974892, |
|
"grad_norm": 0.18580487370491028, |
|
"learning_rate": 0.00014399391698559152, |
|
"loss": 0.439, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.1160838020705959, |
|
"grad_norm": 0.2431912124156952, |
|
"learning_rate": 0.0001437115766650933, |
|
"loss": 0.6042, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.11640988016629983, |
|
"grad_norm": 0.2991044223308563, |
|
"learning_rate": 0.00014342880492898048, |
|
"loss": 0.649, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.11673595826200375, |
|
"grad_norm": 0.31540584564208984, |
|
"learning_rate": 0.0001431456045680959, |
|
"loss": 0.5912, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.11706203635770768, |
|
"grad_norm": 0.2192750871181488, |
|
"learning_rate": 0.00014286197837751286, |
|
"loss": 0.6253, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.11738811445341159, |
|
"grad_norm": 0.23064061999320984, |
|
"learning_rate": 0.00014257792915650728, |
|
"loss": 0.5753, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.11771419254911551, |
|
"grad_norm": 0.29536232352256775, |
|
"learning_rate": 0.00014229345970853032, |
|
"loss": 0.6757, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.11804027064481944, |
|
"grad_norm": 0.30984580516815186, |
|
"learning_rate": 0.00014200857284118066, |
|
"loss": 0.601, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.11836634874052336, |
|
"grad_norm": 0.2990546226501465, |
|
"learning_rate": 0.00014172327136617656, |
|
"loss": 0.6723, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.11869242683622727, |
|
"grad_norm": 0.2658703029155731, |
|
"learning_rate": 0.00014143755809932845, |
|
"loss": 0.6146, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.1190185049319312, |
|
"grad_norm": 0.20527562499046326, |
|
"learning_rate": 0.00014115143586051088, |
|
"loss": 0.5963, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.11934458302763512, |
|
"grad_norm": 0.24419987201690674, |
|
"learning_rate": 0.00014086490747363493, |
|
"loss": 0.6174, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.11967066112333904, |
|
"grad_norm": 0.2944442629814148, |
|
"learning_rate": 0.00014057797576662, |
|
"loss": 0.6109, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.11999673921904296, |
|
"grad_norm": 0.26260483264923096, |
|
"learning_rate": 0.00014029064357136628, |
|
"loss": 0.681, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.12032281731474688, |
|
"grad_norm": 0.2430226057767868, |
|
"learning_rate": 0.00014000291372372647, |
|
"loss": 0.5597, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.1206488954104508, |
|
"grad_norm": 0.3209778666496277, |
|
"learning_rate": 0.00013971478906347806, |
|
"loss": 0.7486, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.12097497350615473, |
|
"grad_norm": 0.36402198672294617, |
|
"learning_rate": 0.00013942627243429512, |
|
"loss": 0.7214, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.12130105160185864, |
|
"grad_norm": 0.4271096885204315, |
|
"learning_rate": 0.00013913736668372026, |
|
"loss": 0.7041, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.12162712969756256, |
|
"grad_norm": 0.26176485419273376, |
|
"learning_rate": 0.00013884807466313663, |
|
"loss": 0.6308, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.12195320779326649, |
|
"grad_norm": 0.30541542172431946, |
|
"learning_rate": 0.00013855839922773968, |
|
"loss": 0.6901, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.12227928588897041, |
|
"grad_norm": 0.2745092511177063, |
|
"learning_rate": 0.000138268343236509, |
|
"loss": 0.7374, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.12260536398467432, |
|
"grad_norm": 0.23461657762527466, |
|
"learning_rate": 0.00013797790955218014, |
|
"loss": 0.5745, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.12293144208037825, |
|
"grad_norm": 0.2348318099975586, |
|
"learning_rate": 0.00013768710104121627, |
|
"loss": 0.5153, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.12325752017608217, |
|
"grad_norm": 0.22238482534885406, |
|
"learning_rate": 0.00013739592057378003, |
|
"loss": 0.6065, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.1235835982717861, |
|
"grad_norm": 0.34176740050315857, |
|
"learning_rate": 0.0001371043710237051, |
|
"loss": 0.6059, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.12390967636749001, |
|
"grad_norm": 0.33095425367355347, |
|
"learning_rate": 0.00013681245526846783, |
|
"loss": 0.6373, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.12423575446319393, |
|
"grad_norm": 0.28466686606407166, |
|
"learning_rate": 0.0001365201761891588, |
|
"loss": 0.5869, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.12456183255889786, |
|
"grad_norm": 0.37012436985969543, |
|
"learning_rate": 0.00013622753667045457, |
|
"loss": 0.6946, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.12488791065460178, |
|
"grad_norm": 0.2527792453765869, |
|
"learning_rate": 0.00013593453960058908, |
|
"loss": 0.5629, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.1252139887503057, |
|
"grad_norm": 0.3835316002368927, |
|
"learning_rate": 0.00013564118787132506, |
|
"loss": 0.7292, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.12554006684600963, |
|
"grad_norm": 0.22863373160362244, |
|
"learning_rate": 0.00013534748437792573, |
|
"loss": 0.5662, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.12586614494171355, |
|
"grad_norm": 0.2078106552362442, |
|
"learning_rate": 0.0001350534320191259, |
|
"loss": 0.614, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.12619222303741745, |
|
"grad_norm": 0.2273118942975998, |
|
"learning_rate": 0.0001347590336971037, |
|
"loss": 0.6229, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.12651830113312137, |
|
"grad_norm": 0.26143860816955566, |
|
"learning_rate": 0.0001344642923174517, |
|
"loss": 0.5764, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.1268443792288253, |
|
"grad_norm": 0.33049219846725464, |
|
"learning_rate": 0.00013416921078914835, |
|
"loss": 0.6867, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.12717045732452922, |
|
"grad_norm": 0.3913268446922302, |
|
"learning_rate": 0.00013387379202452917, |
|
"loss": 0.6019, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.12749653542023315, |
|
"grad_norm": 0.35554900765419006, |
|
"learning_rate": 0.00013357803893925807, |
|
"loss": 0.6319, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.12782261351593707, |
|
"grad_norm": 0.322226345539093, |
|
"learning_rate": 0.00013328195445229868, |
|
"loss": 0.6853, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.128148691611641, |
|
"grad_norm": 0.33744320273399353, |
|
"learning_rate": 0.00013298554148588528, |
|
"loss": 0.5545, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.12847476970734492, |
|
"grad_norm": 0.2647642493247986, |
|
"learning_rate": 0.00013268880296549425, |
|
"loss": 0.6286, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.12880084780304882, |
|
"grad_norm": 0.34832268953323364, |
|
"learning_rate": 0.00013239174181981495, |
|
"loss": 0.7018, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.12912692589875274, |
|
"grad_norm": 0.2576987147331238, |
|
"learning_rate": 0.00013209436098072095, |
|
"loss": 0.6339, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.12945300399445667, |
|
"grad_norm": 0.2219506800174713, |
|
"learning_rate": 0.00013179666338324108, |
|
"loss": 0.5791, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.1297790820901606, |
|
"grad_norm": 0.2386060357093811, |
|
"learning_rate": 0.0001314986519655305, |
|
"loss": 0.6095, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.13010516018586452, |
|
"grad_norm": 0.2350076287984848, |
|
"learning_rate": 0.0001312003296688415, |
|
"loss": 0.5839, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.13043123828156844, |
|
"grad_norm": 0.27746814489364624, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 0.6154, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.13075731637727236, |
|
"grad_norm": 0.2833282947540283, |
|
"learning_rate": 0.0001306027642188501, |
|
"loss": 0.658, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.1310833944729763, |
|
"grad_norm": 0.444374680519104, |
|
"learning_rate": 0.00013030352696327742, |
|
"loss": 0.5924, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.13140947256868019, |
|
"grad_norm": 0.30098557472229004, |
|
"learning_rate": 0.00013000399062412763, |
|
"loss": 0.7752, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.1317355506643841, |
|
"grad_norm": 0.3449711501598358, |
|
"learning_rate": 0.0001297041581577035, |
|
"loss": 0.6651, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.13206162876008803, |
|
"grad_norm": 0.273874431848526, |
|
"learning_rate": 0.0001294040325232304, |
|
"loss": 0.5093, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.13238770685579196, |
|
"grad_norm": 0.23658856749534607, |
|
"learning_rate": 0.00012910361668282719, |
|
"loss": 0.6612, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.13271378495149588, |
|
"grad_norm": 0.3019971251487732, |
|
"learning_rate": 0.00012880291360147693, |
|
"loss": 0.6271, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.1330398630471998, |
|
"grad_norm": 0.29895782470703125, |
|
"learning_rate": 0.0001285019262469976, |
|
"loss": 0.641, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.13336594114290373, |
|
"grad_norm": 0.2666492164134979, |
|
"learning_rate": 0.00012820065759001293, |
|
"loss": 0.6078, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.13369201923860766, |
|
"grad_norm": 0.28037238121032715, |
|
"learning_rate": 0.00012789911060392294, |
|
"loss": 0.6904, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.13401809733431158, |
|
"grad_norm": 0.34532639384269714, |
|
"learning_rate": 0.0001275972882648746, |
|
"loss": 0.6884, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.13434417543001548, |
|
"grad_norm": 0.2206190675497055, |
|
"learning_rate": 0.00012729519355173254, |
|
"loss": 0.5713, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.1346702535257194, |
|
"grad_norm": 0.2744779586791992, |
|
"learning_rate": 0.00012699282944604967, |
|
"loss": 0.6322, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.13499633162142333, |
|
"grad_norm": 0.2511173486709595, |
|
"learning_rate": 0.00012669019893203759, |
|
"loss": 0.7319, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.13532240971712725, |
|
"grad_norm": 0.24852187931537628, |
|
"learning_rate": 0.0001263873049965373, |
|
"loss": 0.6683, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.13564848781283118, |
|
"grad_norm": 0.2592228651046753, |
|
"learning_rate": 0.00012608415062898972, |
|
"loss": 0.5839, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.1359745659085351, |
|
"grad_norm": 0.34304317831993103, |
|
"learning_rate": 0.000125780738821406, |
|
"loss": 0.7071, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.13630064400423902, |
|
"grad_norm": 0.3506695628166199, |
|
"learning_rate": 0.00012547707256833823, |
|
"loss": 0.6805, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.13662672209994295, |
|
"grad_norm": 0.2388596534729004, |
|
"learning_rate": 0.00012517315486684972, |
|
"loss": 0.5318, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.13695280019564685, |
|
"grad_norm": 0.22909346222877502, |
|
"learning_rate": 0.0001248689887164855, |
|
"loss": 0.4896, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.13727887829135077, |
|
"grad_norm": 0.2821944057941437, |
|
"learning_rate": 0.00012456457711924266, |
|
"loss": 0.7472, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.1376049563870547, |
|
"grad_norm": 0.3464897572994232, |
|
"learning_rate": 0.00012425992307954075, |
|
"loss": 0.6315, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 0.513347864151001, |
|
"learning_rate": 0.0001239550296041922, |
|
"loss": 0.84, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.13825711257846254, |
|
"grad_norm": 0.2560180425643921, |
|
"learning_rate": 0.00012364989970237248, |
|
"loss": 0.647, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.13858319067416647, |
|
"grad_norm": 0.3197629749774933, |
|
"learning_rate": 0.00012334453638559057, |
|
"loss": 0.7108, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.1389092687698704, |
|
"grad_norm": 0.23635463416576385, |
|
"learning_rate": 0.00012303894266765908, |
|
"loss": 0.6198, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.13923534686557432, |
|
"grad_norm": 0.26680633425712585, |
|
"learning_rate": 0.00012273312156466464, |
|
"loss": 0.6366, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.1395614249612782, |
|
"grad_norm": 0.23483791947364807, |
|
"learning_rate": 0.00012242707609493814, |
|
"loss": 0.7021, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.13988750305698214, |
|
"grad_norm": 0.3056071996688843, |
|
"learning_rate": 0.00012212080927902474, |
|
"loss": 0.632, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.14021358115268606, |
|
"grad_norm": 0.24552787840366364, |
|
"learning_rate": 0.00012181432413965428, |
|
"loss": 0.5489, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.14053965924839, |
|
"grad_norm": 0.32417768239974976, |
|
"learning_rate": 0.00012150762370171136, |
|
"loss": 0.5433, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.1408657373440939, |
|
"grad_norm": 0.2855895161628723, |
|
"learning_rate": 0.00012120071099220549, |
|
"loss": 0.6434, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.14119181543979784, |
|
"grad_norm": 0.26909559965133667, |
|
"learning_rate": 0.00012089358904024117, |
|
"loss": 0.6011, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.14151789353550176, |
|
"grad_norm": 0.3203361928462982, |
|
"learning_rate": 0.00012058626087698814, |
|
"loss": 0.5739, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.14184397163120568, |
|
"grad_norm": 0.2709220349788666, |
|
"learning_rate": 0.00012027872953565125, |
|
"loss": 0.6286, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.1421700497269096, |
|
"grad_norm": 0.26857098937034607, |
|
"learning_rate": 0.00011997099805144069, |
|
"loss": 0.5723, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.1424961278226135, |
|
"grad_norm": 0.24100883305072784, |
|
"learning_rate": 0.000119663069461542, |
|
"loss": 0.5598, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.14282220591831743, |
|
"grad_norm": 0.21419620513916016, |
|
"learning_rate": 0.00011935494680508606, |
|
"loss": 0.5755, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.14314828401402135, |
|
"grad_norm": 0.28282371163368225, |
|
"learning_rate": 0.00011904663312311901, |
|
"loss": 0.6764, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.14347436210972528, |
|
"grad_norm": 0.18891361355781555, |
|
"learning_rate": 0.00011873813145857249, |
|
"loss": 0.5354, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.1438004402054292, |
|
"grad_norm": 0.245451420545578, |
|
"learning_rate": 0.00011842944485623335, |
|
"loss": 0.619, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.14412651830113313, |
|
"grad_norm": 0.2450558841228485, |
|
"learning_rate": 0.00011812057636271374, |
|
"loss": 0.7004, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.14445259639683705, |
|
"grad_norm": 0.27985724806785583, |
|
"learning_rate": 0.000117811529026421, |
|
"loss": 0.628, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.14477867449254098, |
|
"grad_norm": 0.2647560238838196, |
|
"learning_rate": 0.00011750230589752762, |
|
"loss": 0.6445, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.14510475258824487, |
|
"grad_norm": 0.28567782044410706, |
|
"learning_rate": 0.00011719291002794096, |
|
"loss": 0.5969, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.1454308306839488, |
|
"grad_norm": 0.26054808497428894, |
|
"learning_rate": 0.00011688334447127338, |
|
"loss": 0.6482, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.14575690877965272, |
|
"grad_norm": 0.2926684617996216, |
|
"learning_rate": 0.00011657361228281199, |
|
"loss": 0.5566, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.14608298687535665, |
|
"grad_norm": 0.24087432026863098, |
|
"learning_rate": 0.00011626371651948838, |
|
"loss": 0.651, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.14640906497106057, |
|
"grad_norm": 0.1882346272468567, |
|
"learning_rate": 0.00011595366023984864, |
|
"loss": 0.5317, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.1467351430667645, |
|
"grad_norm": 0.26859766244888306, |
|
"learning_rate": 0.0001156434465040231, |
|
"loss": 0.6414, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.14706122116246842, |
|
"grad_norm": 0.264344185590744, |
|
"learning_rate": 0.00011533307837369607, |
|
"loss": 0.5283, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.14738729925817234, |
|
"grad_norm": 0.2349400818347931, |
|
"learning_rate": 0.00011502255891207572, |
|
"loss": 0.5494, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.14771337735387624, |
|
"grad_norm": 0.23789772391319275, |
|
"learning_rate": 0.00011471189118386375, |
|
"loss": 0.6511, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.14803945544958017, |
|
"grad_norm": 0.2902562916278839, |
|
"learning_rate": 0.00011440107825522521, |
|
"loss": 0.5956, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.1483655335452841, |
|
"grad_norm": 0.33084043860435486, |
|
"learning_rate": 0.00011409012319375827, |
|
"loss": 0.7127, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.14869161164098801, |
|
"grad_norm": 0.2966511845588684, |
|
"learning_rate": 0.0001137790290684638, |
|
"loss": 0.6293, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.14901768973669194, |
|
"grad_norm": 0.2903098464012146, |
|
"learning_rate": 0.00011346779894971527, |
|
"loss": 0.5653, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.14934376783239586, |
|
"grad_norm": 0.30501604080200195, |
|
"learning_rate": 0.00011315643590922827, |
|
"loss": 0.6074, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.1496698459280998, |
|
"grad_norm": 0.24236589670181274, |
|
"learning_rate": 0.0001128449430200303, |
|
"loss": 0.5867, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.1499959240238037, |
|
"grad_norm": 0.2912566363811493, |
|
"learning_rate": 0.00011253332335643043, |
|
"loss": 0.6263, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.1503220021195076, |
|
"grad_norm": 0.2772620916366577, |
|
"learning_rate": 0.00011222157999398895, |
|
"loss": 0.6283, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.15064808021521153, |
|
"grad_norm": 0.2634068727493286, |
|
"learning_rate": 0.00011190971600948699, |
|
"loss": 0.6163, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.15097415831091546, |
|
"grad_norm": 0.2546554207801819, |
|
"learning_rate": 0.00011159773448089614, |
|
"loss": 0.6658, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.15130023640661938, |
|
"grad_norm": 0.24717013537883759, |
|
"learning_rate": 0.00011128563848734816, |
|
"loss": 0.6137, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.1516263145023233, |
|
"grad_norm": 0.3441718816757202, |
|
"learning_rate": 0.00011097343110910452, |
|
"loss": 0.6057, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.15195239259802723, |
|
"grad_norm": 0.24183110892772675, |
|
"learning_rate": 0.000110661115427526, |
|
"loss": 0.4939, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.15227847069373115, |
|
"grad_norm": 0.30775824189186096, |
|
"learning_rate": 0.00011034869452504226, |
|
"loss": 0.6185, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.15260454878943508, |
|
"grad_norm": 0.28553271293640137, |
|
"learning_rate": 0.00011003617148512149, |
|
"loss": 0.6158, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.152930626885139, |
|
"grad_norm": 0.2486485242843628, |
|
"learning_rate": 0.00010972354939223996, |
|
"loss": 0.5079, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.1532567049808429, |
|
"grad_norm": 0.30782243609428406, |
|
"learning_rate": 0.00010941083133185146, |
|
"loss": 0.5769, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.15358278307654682, |
|
"grad_norm": 0.3133150339126587, |
|
"learning_rate": 0.00010909802039035701, |
|
"loss": 0.679, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.15390886117225075, |
|
"grad_norm": 0.23745952546596527, |
|
"learning_rate": 0.00010878511965507434, |
|
"loss": 0.487, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.15423493926795467, |
|
"grad_norm": 0.29805904626846313, |
|
"learning_rate": 0.00010847213221420736, |
|
"loss": 0.7348, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.1545610173636586, |
|
"grad_norm": 0.22502878308296204, |
|
"learning_rate": 0.00010815906115681578, |
|
"loss": 0.5491, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.15488709545936252, |
|
"grad_norm": 0.4142134487628937, |
|
"learning_rate": 0.0001078459095727845, |
|
"loss": 0.644, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.15521317355506645, |
|
"grad_norm": 0.23912709951400757, |
|
"learning_rate": 0.00010753268055279329, |
|
"loss": 0.6337, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.15553925165077037, |
|
"grad_norm": 0.2665061354637146, |
|
"learning_rate": 0.0001072193771882861, |
|
"loss": 0.5108, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.15586532974647427, |
|
"grad_norm": 0.276652991771698, |
|
"learning_rate": 0.00010690600257144061, |
|
"loss": 0.6463, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.1561914078421782, |
|
"grad_norm": 0.2724633812904358, |
|
"learning_rate": 0.0001065925597951378, |
|
"loss": 0.6021, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.15651748593788212, |
|
"grad_norm": 0.2907373309135437, |
|
"learning_rate": 0.00010627905195293135, |
|
"loss": 0.68, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.15684356403358604, |
|
"grad_norm": 0.2480301856994629, |
|
"learning_rate": 0.00010596548213901708, |
|
"loss": 0.5805, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.15716964212928997, |
|
"grad_norm": 0.31199586391448975, |
|
"learning_rate": 0.00010565185344820247, |
|
"loss": 0.6244, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.1574957202249939, |
|
"grad_norm": 0.23156419396400452, |
|
"learning_rate": 0.00010533816897587606, |
|
"loss": 0.6027, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.15782179832069781, |
|
"grad_norm": 0.1928403526544571, |
|
"learning_rate": 0.00010502443181797697, |
|
"loss": 0.4903, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.15814787641640174, |
|
"grad_norm": 0.2800855338573456, |
|
"learning_rate": 0.00010471064507096426, |
|
"loss": 0.6735, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.15847395451210564, |
|
"grad_norm": 0.325988233089447, |
|
"learning_rate": 0.0001043968118317865, |
|
"loss": 0.6988, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.15880003260780956, |
|
"grad_norm": 0.26524752378463745, |
|
"learning_rate": 0.00010408293519785101, |
|
"loss": 0.6748, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.15912611070351348, |
|
"grad_norm": 0.368566632270813, |
|
"learning_rate": 0.00010376901826699348, |
|
"loss": 0.6704, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.1594521887992174, |
|
"grad_norm": 0.36456090211868286, |
|
"learning_rate": 0.00010345506413744726, |
|
"loss": 0.575, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.15977826689492133, |
|
"grad_norm": 0.2953360974788666, |
|
"learning_rate": 0.00010314107590781284, |
|
"loss": 0.5831, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.16010434499062526, |
|
"grad_norm": 0.22688160836696625, |
|
"learning_rate": 0.00010282705667702734, |
|
"loss": 0.5816, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.16043042308632918, |
|
"grad_norm": 0.34496596455574036, |
|
"learning_rate": 0.00010251300954433376, |
|
"loss": 0.6377, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.1607565011820331, |
|
"grad_norm": 0.249411940574646, |
|
"learning_rate": 0.00010219893760925052, |
|
"loss": 0.5299, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.16108257927773703, |
|
"grad_norm": 0.33367717266082764, |
|
"learning_rate": 0.00010188484397154084, |
|
"loss": 0.5045, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.16140865737344093, |
|
"grad_norm": 0.22329236567020416, |
|
"learning_rate": 0.00010157073173118208, |
|
"loss": 0.5531, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.16173473546914485, |
|
"grad_norm": 0.2595437467098236, |
|
"learning_rate": 0.00010125660398833528, |
|
"loss": 0.5787, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.16206081356484878, |
|
"grad_norm": 0.30315423011779785, |
|
"learning_rate": 0.00010094246384331442, |
|
"loss": 0.6634, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.1623868916605527, |
|
"grad_norm": 0.3824993073940277, |
|
"learning_rate": 0.00010062831439655591, |
|
"loss": 0.6479, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.16271296975625663, |
|
"grad_norm": 0.28295570611953735, |
|
"learning_rate": 0.00010031415874858797, |
|
"loss": 0.5965, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.16303904785196055, |
|
"grad_norm": 0.26669177412986755, |
|
"learning_rate": 0.0001, |
|
"loss": 0.5936, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.16336512594766447, |
|
"grad_norm": 0.2724348306655884, |
|
"learning_rate": 9.968584125141204e-05, |
|
"loss": 0.655, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.1636912040433684, |
|
"grad_norm": 0.2577010691165924, |
|
"learning_rate": 9.937168560344412e-05, |
|
"loss": 0.5863, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.1640172821390723, |
|
"grad_norm": 0.2395378202199936, |
|
"learning_rate": 9.90575361566856e-05, |
|
"loss": 0.5955, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.16434336023477622, |
|
"grad_norm": 0.2337670922279358, |
|
"learning_rate": 9.874339601166473e-05, |
|
"loss": 0.5532, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.16466943833048014, |
|
"grad_norm": 0.23461255431175232, |
|
"learning_rate": 9.842926826881796e-05, |
|
"loss": 0.4932, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.16499551642618407, |
|
"grad_norm": 0.3790539503097534, |
|
"learning_rate": 9.81151560284592e-05, |
|
"loss": 0.6278, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.165321594521888, |
|
"grad_norm": 0.24186423420906067, |
|
"learning_rate": 9.78010623907495e-05, |
|
"loss": 0.6043, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.16564767261759192, |
|
"grad_norm": 0.18181544542312622, |
|
"learning_rate": 9.748699045566626e-05, |
|
"loss": 0.5098, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.16597375071329584, |
|
"grad_norm": 0.26670199632644653, |
|
"learning_rate": 9.717294332297268e-05, |
|
"loss": 0.5501, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.16629982880899977, |
|
"grad_norm": 0.31869029998779297, |
|
"learning_rate": 9.685892409218717e-05, |
|
"loss": 0.6554, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.16662590690470366, |
|
"grad_norm": 0.2595357298851013, |
|
"learning_rate": 9.654493586255278e-05, |
|
"loss": 0.603, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.1669519850004076, |
|
"grad_norm": 0.23499582707881927, |
|
"learning_rate": 9.623098173300654e-05, |
|
"loss": 0.58, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.1672780630961115, |
|
"grad_norm": 0.2135399580001831, |
|
"learning_rate": 9.591706480214901e-05, |
|
"loss": 0.5097, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.16760414119181544, |
|
"grad_norm": 0.37453222274780273, |
|
"learning_rate": 9.560318816821353e-05, |
|
"loss": 0.7076, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.16793021928751936, |
|
"grad_norm": 0.23250311613082886, |
|
"learning_rate": 9.528935492903575e-05, |
|
"loss": 0.5374, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.16825629738322329, |
|
"grad_norm": 0.3057493567466736, |
|
"learning_rate": 9.497556818202306e-05, |
|
"loss": 0.7232, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.1685823754789272, |
|
"grad_norm": 0.3576625883579254, |
|
"learning_rate": 9.466183102412395e-05, |
|
"loss": 0.557, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.16890845357463113, |
|
"grad_norm": 0.24760064482688904, |
|
"learning_rate": 9.434814655179755e-05, |
|
"loss": 0.5358, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.16923453167033506, |
|
"grad_norm": 0.25141358375549316, |
|
"learning_rate": 9.403451786098294e-05, |
|
"loss": 0.5919, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.16956060976603896, |
|
"grad_norm": 0.31347140669822693, |
|
"learning_rate": 9.372094804706867e-05, |
|
"loss": 0.6379, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.16988668786174288, |
|
"grad_norm": 0.31334182620048523, |
|
"learning_rate": 9.340744020486222e-05, |
|
"loss": 0.6163, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.30304384231567383, |
|
"learning_rate": 9.309399742855942e-05, |
|
"loss": 0.7165, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.17053884405315073, |
|
"grad_norm": 0.48817434906959534, |
|
"learning_rate": 9.278062281171393e-05, |
|
"loss": 0.6673, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.17086492214885465, |
|
"grad_norm": 0.3089975118637085, |
|
"learning_rate": 9.246731944720675e-05, |
|
"loss": 0.7226, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.17119100024455858, |
|
"grad_norm": 0.2963530123233795, |
|
"learning_rate": 9.215409042721552e-05, |
|
"loss": 0.5804, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.1715170783402625, |
|
"grad_norm": 0.2515937387943268, |
|
"learning_rate": 9.184093884318425e-05, |
|
"loss": 0.5827, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.17184315643596643, |
|
"grad_norm": 0.2320161759853363, |
|
"learning_rate": 9.152786778579267e-05, |
|
"loss": 0.5849, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.17216923453167032, |
|
"grad_norm": 0.4115968644618988, |
|
"learning_rate": 9.121488034492569e-05, |
|
"loss": 0.5939, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.17249531262737425, |
|
"grad_norm": 0.24110546708106995, |
|
"learning_rate": 9.090197960964301e-05, |
|
"loss": 0.5599, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.17282139072307817, |
|
"grad_norm": 0.2787460684776306, |
|
"learning_rate": 9.058916866814858e-05, |
|
"loss": 0.6877, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.1731474688187821, |
|
"grad_norm": 0.2140059471130371, |
|
"learning_rate": 9.027645060776006e-05, |
|
"loss": 0.5474, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.17347354691448602, |
|
"grad_norm": 0.20941783487796783, |
|
"learning_rate": 8.99638285148785e-05, |
|
"loss": 0.6098, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.17379962501018995, |
|
"grad_norm": 0.3070947527885437, |
|
"learning_rate": 8.965130547495776e-05, |
|
"loss": 0.7024, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.17412570310589387, |
|
"grad_norm": 0.4354031980037689, |
|
"learning_rate": 8.933888457247402e-05, |
|
"loss": 0.6728, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.1744517812015978, |
|
"grad_norm": 0.22803445160388947, |
|
"learning_rate": 8.902656889089548e-05, |
|
"loss": 0.6227, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.1747778592973017, |
|
"grad_norm": 0.2915150821208954, |
|
"learning_rate": 8.871436151265184e-05, |
|
"loss": 0.6151, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.17510393739300562, |
|
"grad_norm": 0.2954872250556946, |
|
"learning_rate": 8.840226551910387e-05, |
|
"loss": 0.6851, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.17543001548870954, |
|
"grad_norm": 0.2495146542787552, |
|
"learning_rate": 8.809028399051302e-05, |
|
"loss": 0.5932, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.17575609358441346, |
|
"grad_norm": 0.2785921096801758, |
|
"learning_rate": 8.777842000601105e-05, |
|
"loss": 0.6403, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.1760821716801174, |
|
"grad_norm": 0.258619487285614, |
|
"learning_rate": 8.746667664356956e-05, |
|
"loss": 0.5545, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.1764082497758213, |
|
"grad_norm": 0.2323625683784485, |
|
"learning_rate": 8.715505697996971e-05, |
|
"loss": 0.6349, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.17673432787152524, |
|
"grad_norm": 0.2878716289997101, |
|
"learning_rate": 8.684356409077176e-05, |
|
"loss": 0.6123, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.17706040596722916, |
|
"grad_norm": 0.5154581665992737, |
|
"learning_rate": 8.653220105028474e-05, |
|
"loss": 0.8217, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.1773864840629331, |
|
"grad_norm": 0.2401517629623413, |
|
"learning_rate": 8.62209709315362e-05, |
|
"loss": 0.5884, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.17771256215863698, |
|
"grad_norm": 0.3214757442474365, |
|
"learning_rate": 8.590987680624174e-05, |
|
"loss": 0.6488, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.1780386402543409, |
|
"grad_norm": 0.2335442304611206, |
|
"learning_rate": 8.559892174477479e-05, |
|
"loss": 0.5544, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.17836471835004483, |
|
"grad_norm": 0.23834139108657837, |
|
"learning_rate": 8.528810881613626e-05, |
|
"loss": 0.5983, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.17869079644574876, |
|
"grad_norm": 0.23438434302806854, |
|
"learning_rate": 8.497744108792429e-05, |
|
"loss": 0.566, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.17901687454145268, |
|
"grad_norm": 0.37170785665512085, |
|
"learning_rate": 8.466692162630392e-05, |
|
"loss": 0.6824, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.1793429526371566, |
|
"grad_norm": 0.263026624917984, |
|
"learning_rate": 8.435655349597689e-05, |
|
"loss": 0.6363, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.17966903073286053, |
|
"grad_norm": 0.3249225616455078, |
|
"learning_rate": 8.404633976015134e-05, |
|
"loss": 0.618, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.17999510882856445, |
|
"grad_norm": 0.26690465211868286, |
|
"learning_rate": 8.373628348051165e-05, |
|
"loss": 0.6547, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.18032118692426835, |
|
"grad_norm": 0.22415734827518463, |
|
"learning_rate": 8.342638771718802e-05, |
|
"loss": 0.5189, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.18064726501997228, |
|
"grad_norm": 0.21609823405742645, |
|
"learning_rate": 8.311665552872662e-05, |
|
"loss": 0.6112, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.1809733431156762, |
|
"grad_norm": 0.3650791049003601, |
|
"learning_rate": 8.280708997205904e-05, |
|
"loss": 0.6109, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.18129942121138012, |
|
"grad_norm": 0.2111487239599228, |
|
"learning_rate": 8.249769410247239e-05, |
|
"loss": 0.4548, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.18162549930708405, |
|
"grad_norm": 0.3168624937534332, |
|
"learning_rate": 8.218847097357898e-05, |
|
"loss": 0.6538, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.18195157740278797, |
|
"grad_norm": 0.27414387464523315, |
|
"learning_rate": 8.187942363728625e-05, |
|
"loss": 0.688, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.1822776554984919, |
|
"grad_norm": 0.29763197898864746, |
|
"learning_rate": 8.157055514376666e-05, |
|
"loss": 0.673, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.18260373359419582, |
|
"grad_norm": 0.26111218333244324, |
|
"learning_rate": 8.126186854142752e-05, |
|
"loss": 0.6391, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.18292981168989972, |
|
"grad_norm": 0.34893324971199036, |
|
"learning_rate": 8.095336687688102e-05, |
|
"loss": 0.638, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.18325588978560364, |
|
"grad_norm": 0.279060423374176, |
|
"learning_rate": 8.064505319491398e-05, |
|
"loss": 0.6277, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.18358196788130757, |
|
"grad_norm": 0.30398038029670715, |
|
"learning_rate": 8.033693053845801e-05, |
|
"loss": 0.5687, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.1839080459770115, |
|
"grad_norm": 0.2892376780509949, |
|
"learning_rate": 8.002900194855932e-05, |
|
"loss": 0.6962, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.18423412407271542, |
|
"grad_norm": 0.34928885102272034, |
|
"learning_rate": 7.972127046434878e-05, |
|
"loss": 0.747, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.18456020216841934, |
|
"grad_norm": 0.27299997210502625, |
|
"learning_rate": 7.941373912301189e-05, |
|
"loss": 0.6286, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.18488628026412326, |
|
"grad_norm": 0.34455394744873047, |
|
"learning_rate": 7.910641095975886e-05, |
|
"loss": 0.667, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.1852123583598272, |
|
"grad_norm": 0.2987124025821686, |
|
"learning_rate": 7.879928900779456e-05, |
|
"loss": 0.7059, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.18553843645553109, |
|
"grad_norm": 0.24651587009429932, |
|
"learning_rate": 7.849237629828869e-05, |
|
"loss": 0.5836, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.185864514551235, |
|
"grad_norm": 0.24579085409641266, |
|
"learning_rate": 7.818567586034577e-05, |
|
"loss": 0.5498, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.18619059264693893, |
|
"grad_norm": 0.31513264775276184, |
|
"learning_rate": 7.787919072097531e-05, |
|
"loss": 0.6475, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.18651667074264286, |
|
"grad_norm": 0.2718188464641571, |
|
"learning_rate": 7.75729239050619e-05, |
|
"loss": 0.6896, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.18684274883834678, |
|
"grad_norm": 0.3963709771633148, |
|
"learning_rate": 7.726687843533538e-05, |
|
"loss": 0.7449, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.1871688269340507, |
|
"grad_norm": 0.2776338756084442, |
|
"learning_rate": 7.696105733234098e-05, |
|
"loss": 0.6492, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.18749490502975463, |
|
"grad_norm": 0.20792852342128754, |
|
"learning_rate": 7.66554636144095e-05, |
|
"loss": 0.536, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.18782098312545856, |
|
"grad_norm": 0.29838401079177856, |
|
"learning_rate": 7.635010029762756e-05, |
|
"loss": 0.6181, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.18814706122116248, |
|
"grad_norm": 0.2557230293750763, |
|
"learning_rate": 7.604497039580785e-05, |
|
"loss": 0.5459, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.18847313931686638, |
|
"grad_norm": 0.283884733915329, |
|
"learning_rate": 7.574007692045928e-05, |
|
"loss": 0.5817, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.1887992174125703, |
|
"grad_norm": 0.26855558156967163, |
|
"learning_rate": 7.543542288075739e-05, |
|
"loss": 0.6018, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.18912529550827423, |
|
"grad_norm": 0.2804105579853058, |
|
"learning_rate": 7.513101128351454e-05, |
|
"loss": 0.5872, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.18945137360397815, |
|
"grad_norm": 0.40271496772766113, |
|
"learning_rate": 7.48268451331503e-05, |
|
"loss": 0.7282, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.18977745169968208, |
|
"grad_norm": 0.27677327394485474, |
|
"learning_rate": 7.45229274316618e-05, |
|
"loss": 0.7078, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.190103529795386, |
|
"grad_norm": 0.28340569138526917, |
|
"learning_rate": 7.421926117859403e-05, |
|
"loss": 0.5747, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.19042960789108992, |
|
"grad_norm": 0.2599877715110779, |
|
"learning_rate": 7.391584937101033e-05, |
|
"loss": 0.5895, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.19075568598679385, |
|
"grad_norm": 0.2972414791584015, |
|
"learning_rate": 7.361269500346274e-05, |
|
"loss": 0.5901, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.19108176408249775, |
|
"grad_norm": 0.2536401152610779, |
|
"learning_rate": 7.330980106796246e-05, |
|
"loss": 0.5877, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.19140784217820167, |
|
"grad_norm": 0.2621424198150635, |
|
"learning_rate": 7.300717055395039e-05, |
|
"loss": 0.7006, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.1917339202739056, |
|
"grad_norm": 0.3111814558506012, |
|
"learning_rate": 7.270480644826749e-05, |
|
"loss": 0.6821, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.19205999836960952, |
|
"grad_norm": 0.3220475912094116, |
|
"learning_rate": 7.240271173512546e-05, |
|
"loss": 0.6543, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.19238607646531344, |
|
"grad_norm": 0.3028581142425537, |
|
"learning_rate": 7.210088939607708e-05, |
|
"loss": 0.6336, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.19271215456101737, |
|
"grad_norm": 0.2866508364677429, |
|
"learning_rate": 7.179934240998706e-05, |
|
"loss": 0.6173, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.1930382326567213, |
|
"grad_norm": 0.26337945461273193, |
|
"learning_rate": 7.149807375300239e-05, |
|
"loss": 0.5194, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.19336431075242522, |
|
"grad_norm": 0.31201040744781494, |
|
"learning_rate": 7.119708639852312e-05, |
|
"loss": 0.5724, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.1936903888481291, |
|
"grad_norm": 0.2324737012386322, |
|
"learning_rate": 7.089638331717284e-05, |
|
"loss": 0.562, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.19401646694383304, |
|
"grad_norm": 0.3376362919807434, |
|
"learning_rate": 7.059596747676962e-05, |
|
"loss": 0.5773, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.19434254503953696, |
|
"grad_norm": 0.26487740874290466, |
|
"learning_rate": 7.029584184229653e-05, |
|
"loss": 0.5336, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.1946686231352409, |
|
"grad_norm": 0.35700923204421997, |
|
"learning_rate": 6.999600937587239e-05, |
|
"loss": 0.592, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.1949947012309448, |
|
"grad_norm": 0.33490103483200073, |
|
"learning_rate": 6.969647303672262e-05, |
|
"loss": 0.6538, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.19532077932664874, |
|
"grad_norm": 0.27960991859436035, |
|
"learning_rate": 6.939723578114993e-05, |
|
"loss": 0.5769, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.19564685742235266, |
|
"grad_norm": 0.2691517472267151, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 0.6472, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.19597293551805658, |
|
"grad_norm": 0.2690907120704651, |
|
"learning_rate": 6.879967033115853e-05, |
|
"loss": 0.6053, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.1962990136137605, |
|
"grad_norm": 0.2998523712158203, |
|
"learning_rate": 6.850134803446954e-05, |
|
"loss": 0.6843, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.1966250917094644, |
|
"grad_norm": 0.2649098336696625, |
|
"learning_rate": 6.820333661675893e-05, |
|
"loss": 0.4998, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.19695116980516833, |
|
"grad_norm": 0.2461191862821579, |
|
"learning_rate": 6.790563901927907e-05, |
|
"loss": 0.5161, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.19727724790087225, |
|
"grad_norm": 0.221087247133255, |
|
"learning_rate": 6.760825818018508e-05, |
|
"loss": 0.5993, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.19760332599657618, |
|
"grad_norm": 0.3097057640552521, |
|
"learning_rate": 6.731119703450577e-05, |
|
"loss": 0.6602, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.1979294040922801, |
|
"grad_norm": 0.27952730655670166, |
|
"learning_rate": 6.701445851411472e-05, |
|
"loss": 0.6358, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.19825548218798403, |
|
"grad_norm": 0.29174724221229553, |
|
"learning_rate": 6.671804554770135e-05, |
|
"loss": 0.6346, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.19858156028368795, |
|
"grad_norm": 0.3168889582157135, |
|
"learning_rate": 6.642196106074194e-05, |
|
"loss": 0.7173, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.19890763837939188, |
|
"grad_norm": 0.3026885390281677, |
|
"learning_rate": 6.612620797547087e-05, |
|
"loss": 0.6518, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.19923371647509577, |
|
"grad_norm": 0.27130576968193054, |
|
"learning_rate": 6.583078921085167e-05, |
|
"loss": 0.5877, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.1995597945707997, |
|
"grad_norm": 0.2264910638332367, |
|
"learning_rate": 6.55357076825483e-05, |
|
"loss": 0.5495, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.19988587266650362, |
|
"grad_norm": 0.2999652326107025, |
|
"learning_rate": 6.52409663028963e-05, |
|
"loss": 0.7243, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.20021195076220755, |
|
"grad_norm": 0.3234879970550537, |
|
"learning_rate": 6.494656798087412e-05, |
|
"loss": 0.5561, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.20053802885791147, |
|
"grad_norm": 0.3255922198295593, |
|
"learning_rate": 6.465251562207431e-05, |
|
"loss": 0.7417, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.2008641069536154, |
|
"grad_norm": 0.3037979304790497, |
|
"learning_rate": 6.435881212867493e-05, |
|
"loss": 0.6452, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.20119018504931932, |
|
"grad_norm": 0.3250497281551361, |
|
"learning_rate": 6.406546039941094e-05, |
|
"loss": 0.6424, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.20151626314502324, |
|
"grad_norm": 0.28922873735427856, |
|
"learning_rate": 6.377246332954544e-05, |
|
"loss": 0.5418, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.20184234124072714, |
|
"grad_norm": 0.31836992502212524, |
|
"learning_rate": 6.347982381084123e-05, |
|
"loss": 0.7, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.20216841933643107, |
|
"grad_norm": 0.242594376206398, |
|
"learning_rate": 6.318754473153221e-05, |
|
"loss": 0.5934, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.202494497432135, |
|
"grad_norm": 0.2678995132446289, |
|
"learning_rate": 6.289562897629492e-05, |
|
"loss": 0.6335, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.20282057552783891, |
|
"grad_norm": 0.2010084092617035, |
|
"learning_rate": 6.260407942621998e-05, |
|
"loss": 0.4732, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.20314665362354284, |
|
"grad_norm": 0.2803085446357727, |
|
"learning_rate": 6.231289895878375e-05, |
|
"loss": 0.6144, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.20347273171924676, |
|
"grad_norm": 0.29869019985198975, |
|
"learning_rate": 6.20220904478199e-05, |
|
"loss": 0.6179, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.2037988098149507, |
|
"grad_norm": 0.2984759509563446, |
|
"learning_rate": 6.173165676349103e-05, |
|
"loss": 0.6074, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.2041248879106546, |
|
"grad_norm": 0.24173347651958466, |
|
"learning_rate": 6.144160077226036e-05, |
|
"loss": 0.5561, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.20445096600635854, |
|
"grad_norm": 0.25193408131599426, |
|
"learning_rate": 6.11519253368634e-05, |
|
"loss": 0.5448, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.20477704410206243, |
|
"grad_norm": 0.29573777318000793, |
|
"learning_rate": 6.086263331627976e-05, |
|
"loss": 0.6412, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.20510312219776636, |
|
"grad_norm": 0.27745357155799866, |
|
"learning_rate": 6.05737275657049e-05, |
|
"loss": 0.6071, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.20542920029347028, |
|
"grad_norm": 0.2653226852416992, |
|
"learning_rate": 6.0285210936521955e-05, |
|
"loss": 0.57, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.2057552783891742, |
|
"grad_norm": 0.32329148054122925, |
|
"learning_rate": 5.999708627627354e-05, |
|
"loss": 0.6697, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.20608135648487813, |
|
"grad_norm": 0.2372172623872757, |
|
"learning_rate": 5.9709356428633746e-05, |
|
"loss": 0.5417, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.20640743458058206, |
|
"grad_norm": 0.2067628800868988, |
|
"learning_rate": 5.9422024233380013e-05, |
|
"loss": 0.5155, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.20673351267628598, |
|
"grad_norm": 0.27399685978889465, |
|
"learning_rate": 5.913509252636511e-05, |
|
"loss": 0.5758, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.2070595907719899, |
|
"grad_norm": 0.2475450485944748, |
|
"learning_rate": 5.884856413948913e-05, |
|
"loss": 0.5713, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.2073856688676938, |
|
"grad_norm": 0.23439644277095795, |
|
"learning_rate": 5.856244190067159e-05, |
|
"loss": 0.5979, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.20771174696339773, |
|
"grad_norm": 0.22557716071605682, |
|
"learning_rate": 5.82767286338235e-05, |
|
"loss": 0.5951, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.20803782505910165, |
|
"grad_norm": 0.3093213140964508, |
|
"learning_rate": 5.799142715881938e-05, |
|
"loss": 0.6018, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.20836390315480557, |
|
"grad_norm": 0.3633502125740051, |
|
"learning_rate": 5.770654029146969e-05, |
|
"loss": 0.7025, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.2086899812505095, |
|
"grad_norm": 0.2910383343696594, |
|
"learning_rate": 5.7422070843492734e-05, |
|
"loss": 0.6501, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.20901605934621342, |
|
"grad_norm": 0.36639973521232605, |
|
"learning_rate": 5.713802162248718e-05, |
|
"loss": 0.6685, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.20934213744191735, |
|
"grad_norm": 0.3218449652194977, |
|
"learning_rate": 5.6854395431904094e-05, |
|
"loss": 0.5902, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.20966821553762127, |
|
"grad_norm": 0.2687339186668396, |
|
"learning_rate": 5.657119507101954e-05, |
|
"loss": 0.6373, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.20999429363332517, |
|
"grad_norm": 0.25556617975234985, |
|
"learning_rate": 5.6288423334906735e-05, |
|
"loss": 0.5198, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.2103203717290291, |
|
"grad_norm": 0.20101326704025269, |
|
"learning_rate": 5.6006083014408484e-05, |
|
"loss": 0.5569, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.21064644982473302, |
|
"grad_norm": 0.27940645813941956, |
|
"learning_rate": 5.572417689610987e-05, |
|
"loss": 0.6211, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.21097252792043694, |
|
"grad_norm": 0.24627013504505157, |
|
"learning_rate": 5.544270776231038e-05, |
|
"loss": 0.5832, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.21129860601614087, |
|
"grad_norm": 0.2317035049200058, |
|
"learning_rate": 5.5161678390996796e-05, |
|
"loss": 0.5157, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.2116246841118448, |
|
"grad_norm": 0.36045899987220764, |
|
"learning_rate": 5.488109155581549e-05, |
|
"loss": 0.6291, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.21195076220754872, |
|
"grad_norm": 0.24675047397613525, |
|
"learning_rate": 5.4600950026045326e-05, |
|
"loss": 0.5645, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.21227684030325264, |
|
"grad_norm": 0.29204484820365906, |
|
"learning_rate": 5.4321256566570036e-05, |
|
"loss": 0.6878, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.21260291839895654, |
|
"grad_norm": 0.2781514823436737, |
|
"learning_rate": 5.404201393785122e-05, |
|
"loss": 0.6069, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.21292899649466046, |
|
"grad_norm": 0.27885934710502625, |
|
"learning_rate": 5.3763224895900846e-05, |
|
"loss": 0.6134, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.21325507459036439, |
|
"grad_norm": 0.3275914788246155, |
|
"learning_rate": 5.348489219225416e-05, |
|
"loss": 0.6795, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.2135811526860683, |
|
"grad_norm": 0.2829757034778595, |
|
"learning_rate": 5.320701857394268e-05, |
|
"loss": 0.6291, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.21390723078177223, |
|
"grad_norm": 0.31990575790405273, |
|
"learning_rate": 5.292960678346675e-05, |
|
"loss": 0.6298, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.21423330887747616, |
|
"grad_norm": 0.2904050350189209, |
|
"learning_rate": 5.265265955876879e-05, |
|
"loss": 0.6357, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.21455938697318008, |
|
"grad_norm": 0.2836860418319702, |
|
"learning_rate": 5.237617963320608e-05, |
|
"loss": 0.6394, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.214885465068884, |
|
"grad_norm": 0.2522071301937103, |
|
"learning_rate": 5.210016973552391e-05, |
|
"loss": 0.6214, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.21521154316458793, |
|
"grad_norm": 0.2518799901008606, |
|
"learning_rate": 5.182463258982846e-05, |
|
"loss": 0.6404, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.21553762126029183, |
|
"grad_norm": 0.21216027438640594, |
|
"learning_rate": 5.1549570915560206e-05, |
|
"loss": 0.4331, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.21586369935599575, |
|
"grad_norm": 0.34990862011909485, |
|
"learning_rate": 5.127498742746675e-05, |
|
"loss": 0.5828, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.21618977745169968, |
|
"grad_norm": 0.2469099760055542, |
|
"learning_rate": 5.100088483557634e-05, |
|
"loss": 0.5904, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.2165158555474036, |
|
"grad_norm": 0.33630579710006714, |
|
"learning_rate": 5.072726584517086e-05, |
|
"loss": 0.5634, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.21684193364310753, |
|
"grad_norm": 0.2696487307548523, |
|
"learning_rate": 5.045413315675924e-05, |
|
"loss": 0.5211, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.21716801173881145, |
|
"grad_norm": 0.30904674530029297, |
|
"learning_rate": 5.018148946605092e-05, |
|
"loss": 0.624, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.21749408983451538, |
|
"grad_norm": 0.3870840072631836, |
|
"learning_rate": 4.990933746392899e-05, |
|
"loss": 0.6745, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.2178201679302193, |
|
"grad_norm": 0.3054164946079254, |
|
"learning_rate": 4.9637679836423924e-05, |
|
"loss": 0.6029, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.2181462460259232, |
|
"grad_norm": 0.24271585047245026, |
|
"learning_rate": 4.9366519264686725e-05, |
|
"loss": 0.5774, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.21847232412162712, |
|
"grad_norm": 0.2951551377773285, |
|
"learning_rate": 4.909585842496287e-05, |
|
"loss": 0.6597, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.21879840221733104, |
|
"grad_norm": 0.26155775785446167, |
|
"learning_rate": 4.8825699988565485e-05, |
|
"loss": 0.6342, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.21912448031303497, |
|
"grad_norm": 0.2630130648612976, |
|
"learning_rate": 4.8556046621849346e-05, |
|
"loss": 0.5336, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.2194505584087389, |
|
"grad_norm": 0.21812640130519867, |
|
"learning_rate": 4.828690098618429e-05, |
|
"loss": 0.6072, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.21977663650444282, |
|
"grad_norm": 0.25095483660697937, |
|
"learning_rate": 4.8018265737929044e-05, |
|
"loss": 0.6364, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.22010271460014674, |
|
"grad_norm": 0.20363305509090424, |
|
"learning_rate": 4.7750143528405126e-05, |
|
"loss": 0.5275, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.22042879269585067, |
|
"grad_norm": 0.29310062527656555, |
|
"learning_rate": 4.748253700387042e-05, |
|
"loss": 0.6871, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.22075487079155456, |
|
"grad_norm": 0.24926213920116425, |
|
"learning_rate": 4.721544880549337e-05, |
|
"loss": 0.5561, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.2210809488872585, |
|
"grad_norm": 0.3037482798099518, |
|
"learning_rate": 4.694888156932658e-05, |
|
"loss": 0.6398, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.2214070269829624, |
|
"grad_norm": 0.2365943193435669, |
|
"learning_rate": 4.668283792628114e-05, |
|
"loss": 0.5932, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.22173310507866634, |
|
"grad_norm": 0.2507438361644745, |
|
"learning_rate": 4.6417320502100316e-05, |
|
"loss": 0.6336, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.22205918317437026, |
|
"grad_norm": 0.2556549310684204, |
|
"learning_rate": 4.615233191733398e-05, |
|
"loss": 0.5695, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.22238526127007419, |
|
"grad_norm": 0.3738943040370941, |
|
"learning_rate": 4.588787478731242e-05, |
|
"loss": 0.6191, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.2227113393657781, |
|
"grad_norm": 0.2213764786720276, |
|
"learning_rate": 4.5623951722120736e-05, |
|
"loss": 0.4365, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.22303741746148203, |
|
"grad_norm": 0.24922537803649902, |
|
"learning_rate": 4.5360565326573104e-05, |
|
"loss": 0.5419, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.22336349555718596, |
|
"grad_norm": 0.27778947353363037, |
|
"learning_rate": 4.5097718200186814e-05, |
|
"loss": 0.5728, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.22368957365288986, |
|
"grad_norm": 0.273242324590683, |
|
"learning_rate": 4.483541293715698e-05, |
|
"loss": 0.547, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.22401565174859378, |
|
"grad_norm": 0.325244277715683, |
|
"learning_rate": 4.457365212633058e-05, |
|
"loss": 0.5824, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.2243417298442977, |
|
"grad_norm": 0.24153675138950348, |
|
"learning_rate": 4.431243835118124e-05, |
|
"loss": 0.5492, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.22466780794000163, |
|
"grad_norm": 0.2707926332950592, |
|
"learning_rate": 4.4051774189783315e-05, |
|
"loss": 0.5661, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.22499388603570555, |
|
"grad_norm": 0.22814729809761047, |
|
"learning_rate": 4.379166221478697e-05, |
|
"loss": 0.5473, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.22531996413140948, |
|
"grad_norm": 0.3418341279029846, |
|
"learning_rate": 4.3532104993392306e-05, |
|
"loss": 0.6342, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.2256460422271134, |
|
"grad_norm": 0.2698429822921753, |
|
"learning_rate": 4.327310508732437e-05, |
|
"loss": 0.6629, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.22597212032281733, |
|
"grad_norm": 0.25924572348594666, |
|
"learning_rate": 4.301466505280762e-05, |
|
"loss": 0.6371, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.22629819841852122, |
|
"grad_norm": 0.24862578511238098, |
|
"learning_rate": 4.2756787440540936e-05, |
|
"loss": 0.6055, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.22662427651422515, |
|
"grad_norm": 0.2641250491142273, |
|
"learning_rate": 4.249947479567218e-05, |
|
"loss": 0.5953, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.22695035460992907, |
|
"grad_norm": 0.34082797169685364, |
|
"learning_rate": 4.224272965777326e-05, |
|
"loss": 0.6108, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.227276432705633, |
|
"grad_norm": 0.26527827978134155, |
|
"learning_rate": 4.1986554560815096e-05, |
|
"loss": 0.5238, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.22760251080133692, |
|
"grad_norm": 0.2595398426055908, |
|
"learning_rate": 4.173095203314241e-05, |
|
"loss": 0.5896, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.22792858889704085, |
|
"grad_norm": 0.317978173494339, |
|
"learning_rate": 4.1475924597449024e-05, |
|
"loss": 0.6303, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.22825466699274477, |
|
"grad_norm": 0.28211531043052673, |
|
"learning_rate": 4.12214747707527e-05, |
|
"loss": 0.5635, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.2285807450884487, |
|
"grad_norm": 0.2978782653808594, |
|
"learning_rate": 4.096760506437057e-05, |
|
"loss": 0.6719, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.2289068231841526, |
|
"grad_norm": 0.22416797280311584, |
|
"learning_rate": 4.071431798389408e-05, |
|
"loss": 0.6223, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.22923290127985652, |
|
"grad_norm": 0.25638195872306824, |
|
"learning_rate": 4.0461616029164526e-05, |
|
"loss": 0.5414, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.22955897937556044, |
|
"grad_norm": 0.3361692428588867, |
|
"learning_rate": 4.020950169424815e-05, |
|
"loss": 0.6814, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.22988505747126436, |
|
"grad_norm": 0.45392799377441406, |
|
"learning_rate": 3.9957977467411615e-05, |
|
"loss": 0.7154, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.2302111355669683, |
|
"grad_norm": 0.31325310468673706, |
|
"learning_rate": 3.9707045831097555e-05, |
|
"loss": 0.73, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.2305372136626722, |
|
"grad_norm": 0.31382250785827637, |
|
"learning_rate": 3.945670926189987e-05, |
|
"loss": 0.5842, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.23086329175837614, |
|
"grad_norm": 0.31560176610946655, |
|
"learning_rate": 3.920697023053949e-05, |
|
"loss": 0.6322, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.23118936985408006, |
|
"grad_norm": 0.28578540682792664, |
|
"learning_rate": 3.895783120183976e-05, |
|
"loss": 0.5562, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.231515447949784, |
|
"grad_norm": 0.3130005896091461, |
|
"learning_rate": 3.8709294634702376e-05, |
|
"loss": 0.5816, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.23184152604548788, |
|
"grad_norm": 0.21266983449459076, |
|
"learning_rate": 3.846136298208285e-05, |
|
"loss": 0.6178, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.2321676041411918, |
|
"grad_norm": 0.28499025106430054, |
|
"learning_rate": 3.821403869096658e-05, |
|
"loss": 0.5944, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.23249368223689573, |
|
"grad_norm": 0.2727053761482239, |
|
"learning_rate": 3.796732420234443e-05, |
|
"loss": 0.5336, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.23281976033259966, |
|
"grad_norm": 0.4271116554737091, |
|
"learning_rate": 3.7721221951188765e-05, |
|
"loss": 0.5785, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.23314583842830358, |
|
"grad_norm": 0.2158544808626175, |
|
"learning_rate": 3.747573436642951e-05, |
|
"loss": 0.5463, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.2334719165240075, |
|
"grad_norm": 0.23925550282001495, |
|
"learning_rate": 3.7230863870929964e-05, |
|
"loss": 0.5424, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.23379799461971143, |
|
"grad_norm": 0.2507498562335968, |
|
"learning_rate": 3.698661288146311e-05, |
|
"loss": 0.5701, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.23412407271541535, |
|
"grad_norm": 0.3710465133190155, |
|
"learning_rate": 3.674298380868756e-05, |
|
"loss": 0.5927, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.23445015081111925, |
|
"grad_norm": 0.28400731086730957, |
|
"learning_rate": 3.649997905712396e-05, |
|
"loss": 0.5458, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.23477622890682318, |
|
"grad_norm": 0.2662917375564575, |
|
"learning_rate": 3.6257601025131026e-05, |
|
"loss": 0.5832, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.2351023070025271, |
|
"grad_norm": 0.28415587544441223, |
|
"learning_rate": 3.601585210488218e-05, |
|
"loss": 0.5417, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.23542838509823102, |
|
"grad_norm": 0.27368491888046265, |
|
"learning_rate": 3.577473468234156e-05, |
|
"loss": 0.6384, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.23575446319393495, |
|
"grad_norm": 0.2925770878791809, |
|
"learning_rate": 3.553425113724088e-05, |
|
"loss": 0.6143, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.23608054128963887, |
|
"grad_norm": 0.3160930871963501, |
|
"learning_rate": 3.52944038430556e-05, |
|
"loss": 0.5924, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.2364066193853428, |
|
"grad_norm": 0.3158300518989563, |
|
"learning_rate": 3.5055195166981645e-05, |
|
"loss": 0.4604, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.23673269748104672, |
|
"grad_norm": 0.2738193869590759, |
|
"learning_rate": 3.481662746991214e-05, |
|
"loss": 0.5122, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.23705877557675062, |
|
"grad_norm": 0.39256951212882996, |
|
"learning_rate": 3.4578703106413904e-05, |
|
"loss": 0.6385, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.23738485367245454, |
|
"grad_norm": 0.2841635048389435, |
|
"learning_rate": 3.4341424424704375e-05, |
|
"loss": 0.5826, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.23771093176815847, |
|
"grad_norm": 0.33367428183555603, |
|
"learning_rate": 3.4104793766628304e-05, |
|
"loss": 0.5943, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.2380370098638624, |
|
"grad_norm": 0.324015349149704, |
|
"learning_rate": 3.386881346763483e-05, |
|
"loss": 0.5332, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.23836308795956632, |
|
"grad_norm": 0.30556005239486694, |
|
"learning_rate": 3.363348585675414e-05, |
|
"loss": 0.536, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.23868916605527024, |
|
"grad_norm": 0.236478790640831, |
|
"learning_rate": 3.339881325657484e-05, |
|
"loss": 0.4597, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.23901524415097417, |
|
"grad_norm": 0.3337570130825043, |
|
"learning_rate": 3.316479798322072e-05, |
|
"loss": 0.5152, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.2393413222466781, |
|
"grad_norm": 0.3730219006538391, |
|
"learning_rate": 3.2931442346328004e-05, |
|
"loss": 0.7172, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.23966740034238201, |
|
"grad_norm": 0.3032122254371643, |
|
"learning_rate": 3.269874864902269e-05, |
|
"loss": 0.6797, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.2399934784380859, |
|
"grad_norm": 0.2508755624294281, |
|
"learning_rate": 3.246671918789755e-05, |
|
"loss": 0.5697, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.24031955653378984, |
|
"grad_norm": 0.26841485500335693, |
|
"learning_rate": 3.223535625298979e-05, |
|
"loss": 0.5376, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.24064563462949376, |
|
"grad_norm": 0.21499516069889069, |
|
"learning_rate": 3.200466212775808e-05, |
|
"loss": 0.5711, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.24097171272519768, |
|
"grad_norm": 0.2505497336387634, |
|
"learning_rate": 3.1774639089060363e-05, |
|
"loss": 0.5365, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.2412977908209016, |
|
"grad_norm": 0.2722863256931305, |
|
"learning_rate": 3.154528940713113e-05, |
|
"loss": 0.5966, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.24162386891660553, |
|
"grad_norm": 0.26614895462989807, |
|
"learning_rate": 3.1316615345559185e-05, |
|
"loss": 0.6171, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.24194994701230946, |
|
"grad_norm": 0.29123929142951965, |
|
"learning_rate": 3.108861916126518e-05, |
|
"loss": 0.6223, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.24227602510801338, |
|
"grad_norm": 0.2488095760345459, |
|
"learning_rate": 3.086130310447937e-05, |
|
"loss": 0.5843, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.24260210320371728, |
|
"grad_norm": 0.4080983102321625, |
|
"learning_rate": 3.063466941871952e-05, |
|
"loss": 0.649, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.2429281812994212, |
|
"grad_norm": 0.278074711561203, |
|
"learning_rate": 3.0408720340768572e-05, |
|
"loss": 0.6112, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.24325425939512513, |
|
"grad_norm": 0.22914646565914154, |
|
"learning_rate": 3.018345810065275e-05, |
|
"loss": 0.5193, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.24358033749082905, |
|
"grad_norm": 0.2255176156759262, |
|
"learning_rate": 2.9958884921619367e-05, |
|
"loss": 0.4962, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.24390641558653298, |
|
"grad_norm": 0.24137818813323975, |
|
"learning_rate": 2.9735003020115092e-05, |
|
"loss": 0.4636, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.2442324936822369, |
|
"grad_norm": 0.2586713433265686, |
|
"learning_rate": 2.9511814605763855e-05, |
|
"loss": 0.6269, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.24455857177794083, |
|
"grad_norm": 0.29199546575546265, |
|
"learning_rate": 2.9289321881345254e-05, |
|
"loss": 0.6608, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.24488464987364475, |
|
"grad_norm": 0.2844295799732208, |
|
"learning_rate": 2.9067527042772636e-05, |
|
"loss": 0.6155, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.24521072796934865, |
|
"grad_norm": 0.25179925560951233, |
|
"learning_rate": 2.8846432279071467e-05, |
|
"loss": 0.529, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.24553680606505257, |
|
"grad_norm": 0.2996719181537628, |
|
"learning_rate": 2.8626039772357882e-05, |
|
"loss": 0.626, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.2458628841607565, |
|
"grad_norm": 0.2830248177051544, |
|
"learning_rate": 2.840635169781688e-05, |
|
"loss": 0.5863, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.24618896225646042, |
|
"grad_norm": 0.32690149545669556, |
|
"learning_rate": 2.8187370223681132e-05, |
|
"loss": 0.6031, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.24651504035216434, |
|
"grad_norm": 0.2886967658996582, |
|
"learning_rate": 2.7969097511209308e-05, |
|
"loss": 0.6521, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.24684111844786827, |
|
"grad_norm": 0.2926975190639496, |
|
"learning_rate": 2.775153571466502e-05, |
|
"loss": 0.6166, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.2471671965435722, |
|
"grad_norm": 0.25793954730033875, |
|
"learning_rate": 2.753468698129533e-05, |
|
"loss": 0.6065, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.24749327463927612, |
|
"grad_norm": 0.30857494473457336, |
|
"learning_rate": 2.7318553451309726e-05, |
|
"loss": 0.6399, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.24781935273498001, |
|
"grad_norm": 0.3076545298099518, |
|
"learning_rate": 2.7103137257858868e-05, |
|
"loss": 0.58, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.24814543083068394, |
|
"grad_norm": 0.28424713015556335, |
|
"learning_rate": 2.688844052701359e-05, |
|
"loss": 0.5929, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.24847150892638786, |
|
"grad_norm": 0.26437848806381226, |
|
"learning_rate": 2.6674465377744017e-05, |
|
"loss": 0.5904, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.2487975870220918, |
|
"grad_norm": 0.35582268238067627, |
|
"learning_rate": 2.646121392189841e-05, |
|
"loss": 0.6299, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.2491236651177957, |
|
"grad_norm": 0.3153063654899597, |
|
"learning_rate": 2.624868826418262e-05, |
|
"loss": 0.6415, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.24944974321349964, |
|
"grad_norm": 0.23518048226833344, |
|
"learning_rate": 2.603689050213902e-05, |
|
"loss": 0.5852, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.24977582130920356, |
|
"grad_norm": 0.3225111663341522, |
|
"learning_rate": 2.582582272612609e-05, |
|
"loss": 0.6137, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.25010189940490746, |
|
"grad_norm": 0.28972920775413513, |
|
"learning_rate": 2.561548701929749e-05, |
|
"loss": 0.6576, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.2504279775006114, |
|
"grad_norm": 0.2671186625957489, |
|
"learning_rate": 2.540588545758179e-05, |
|
"loss": 0.557, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.2507540555963153, |
|
"grad_norm": 0.29164326190948486, |
|
"learning_rate": 2.5197020109661772e-05, |
|
"loss": 0.5875, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.25108013369201926, |
|
"grad_norm": 0.39768028259277344, |
|
"learning_rate": 2.4988893036954043e-05, |
|
"loss": 0.6615, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.25140621178772316, |
|
"grad_norm": 0.44214484095573425, |
|
"learning_rate": 2.4781506293588873e-05, |
|
"loss": 0.5663, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.2517322898834271, |
|
"grad_norm": 0.3203497529029846, |
|
"learning_rate": 2.4574861926389615e-05, |
|
"loss": 0.6532, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.252058367979131, |
|
"grad_norm": 0.30829140543937683, |
|
"learning_rate": 2.436896197485282e-05, |
|
"loss": 0.6116, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.2523844460748349, |
|
"grad_norm": 0.3043927550315857, |
|
"learning_rate": 2.4163808471127812e-05, |
|
"loss": 0.6069, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.25271052417053885, |
|
"grad_norm": 0.2910175323486328, |
|
"learning_rate": 2.3959403439996907e-05, |
|
"loss": 0.5352, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.25303660226624275, |
|
"grad_norm": 0.29559823870658875, |
|
"learning_rate": 2.37557488988552e-05, |
|
"loss": 0.5325, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.2533626803619467, |
|
"grad_norm": 0.26113376021385193, |
|
"learning_rate": 2.3552846857690846e-05, |
|
"loss": 0.6223, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.2536887584576506, |
|
"grad_norm": 0.3582890033721924, |
|
"learning_rate": 2.3350699319065026e-05, |
|
"loss": 0.6703, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.25401483655335455, |
|
"grad_norm": 0.25528156757354736, |
|
"learning_rate": 2.3149308278092342e-05, |
|
"loss": 0.5875, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.25434091464905845, |
|
"grad_norm": 0.2476278692483902, |
|
"learning_rate": 2.2948675722421086e-05, |
|
"loss": 0.5797, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.25466699274476234, |
|
"grad_norm": 0.2513749897480011, |
|
"learning_rate": 2.2748803632213557e-05, |
|
"loss": 0.5855, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.2549930708404663, |
|
"grad_norm": 0.23542198538780212, |
|
"learning_rate": 2.254969398012663e-05, |
|
"loss": 0.5742, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 0.3274197578430176, |
|
"learning_rate": 2.235134873129213e-05, |
|
"loss": 0.6306, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.25564522703187414, |
|
"grad_norm": 0.3096897602081299, |
|
"learning_rate": 2.2153769843297667e-05, |
|
"loss": 0.5931, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.25597130512757804, |
|
"grad_norm": 0.22587507963180542, |
|
"learning_rate": 2.195695926616702e-05, |
|
"loss": 0.4754, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.256297383223282, |
|
"grad_norm": 0.22682449221611023, |
|
"learning_rate": 2.1760918942341192e-05, |
|
"loss": 0.4945, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.2566234613189859, |
|
"grad_norm": 0.24025246500968933, |
|
"learning_rate": 2.1565650806658975e-05, |
|
"loss": 0.5782, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.25694953941468984, |
|
"grad_norm": 0.2858300805091858, |
|
"learning_rate": 2.137115678633811e-05, |
|
"loss": 0.6889, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.25727561751039374, |
|
"grad_norm": 0.24029089510440826, |
|
"learning_rate": 2.1177438800956007e-05, |
|
"loss": 0.6623, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.25760169560609764, |
|
"grad_norm": 0.2904897928237915, |
|
"learning_rate": 2.098449876243096e-05, |
|
"loss": 0.6324, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.2579277737018016, |
|
"grad_norm": 0.38068699836730957, |
|
"learning_rate": 2.07923385750033e-05, |
|
"loss": 0.6569, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.2582538517975055, |
|
"grad_norm": 0.37293192744255066, |
|
"learning_rate": 2.0600960135216462e-05, |
|
"loss": 0.7311, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.25857992989320944, |
|
"grad_norm": 0.43448659777641296, |
|
"learning_rate": 2.0410365331898416e-05, |
|
"loss": 0.5977, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.25890600798891333, |
|
"grad_norm": 0.3264097273349762, |
|
"learning_rate": 2.0220556046142893e-05, |
|
"loss": 0.6446, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.2592320860846173, |
|
"grad_norm": 0.31544405221939087, |
|
"learning_rate": 2.0031534151290943e-05, |
|
"loss": 0.63, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.2595581641803212, |
|
"grad_norm": 0.2896043658256531, |
|
"learning_rate": 1.9843301512912327e-05, |
|
"loss": 0.6207, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.25988424227602513, |
|
"grad_norm": 0.22150889039039612, |
|
"learning_rate": 1.965585998878724e-05, |
|
"loss": 0.5631, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.26021032037172903, |
|
"grad_norm": 0.31330549716949463, |
|
"learning_rate": 1.946921142888781e-05, |
|
"loss": 0.5656, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.26053639846743293, |
|
"grad_norm": 0.2872115969657898, |
|
"learning_rate": 1.928335767535997e-05, |
|
"loss": 0.5956, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.2608624765631369, |
|
"grad_norm": 0.23877061903476715, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 0.5805, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.2611885546588408, |
|
"grad_norm": 0.4347909688949585, |
|
"learning_rate": 1.891404191676265e-05, |
|
"loss": 0.7054, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.26151463275454473, |
|
"grad_norm": 0.28407391905784607, |
|
"learning_rate": 1.8730583556690605e-05, |
|
"loss": 0.5604, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.2618407108502486, |
|
"grad_norm": 0.26138556003570557, |
|
"learning_rate": 1.854792729294905e-05, |
|
"loss": 0.6588, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.2621667889459526, |
|
"grad_norm": 0.2357640415430069, |
|
"learning_rate": 1.8366074928281607e-05, |
|
"loss": 0.578, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.2624928670416565, |
|
"grad_norm": 0.45062610507011414, |
|
"learning_rate": 1.818502825749764e-05, |
|
"loss": 0.6529, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.26281894513736037, |
|
"grad_norm": 0.2354976236820221, |
|
"learning_rate": 1.8004789067454764e-05, |
|
"loss": 0.5139, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.2631450232330643, |
|
"grad_norm": 0.23322217166423798, |
|
"learning_rate": 1.7825359137040988e-05, |
|
"loss": 0.5918, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.2634711013287682, |
|
"grad_norm": 0.2705889046192169, |
|
"learning_rate": 1.7646740237157256e-05, |
|
"loss": 0.5813, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.2637971794244722, |
|
"grad_norm": 0.2796480655670166, |
|
"learning_rate": 1.7468934130700044e-05, |
|
"loss": 0.624, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.26412325752017607, |
|
"grad_norm": 0.4603550136089325, |
|
"learning_rate": 1.7291942572543807e-05, |
|
"loss": 0.6995, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.26444933561588, |
|
"grad_norm": 0.30643367767333984, |
|
"learning_rate": 1.7115767309523812e-05, |
|
"loss": 0.6055, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.2647754137115839, |
|
"grad_norm": 0.207069531083107, |
|
"learning_rate": 1.6940410080418723e-05, |
|
"loss": 0.4537, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.26510149180728787, |
|
"grad_norm": 0.2721695005893707, |
|
"learning_rate": 1.6765872615933677e-05, |
|
"loss": 0.5403, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.26542756990299177, |
|
"grad_norm": 0.20679564774036407, |
|
"learning_rate": 1.6592156638682886e-05, |
|
"loss": 0.506, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.26575364799869566, |
|
"grad_norm": 0.33122187852859497, |
|
"learning_rate": 1.6419263863172997e-05, |
|
"loss": 0.7106, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.2660797260943996, |
|
"grad_norm": 0.2706076204776764, |
|
"learning_rate": 1.6247195995785837e-05, |
|
"loss": 0.6488, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.2664058041901035, |
|
"grad_norm": 0.2812751233577728, |
|
"learning_rate": 1.6075954734761845e-05, |
|
"loss": 0.5977, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.26673188228580746, |
|
"grad_norm": 0.2687004804611206, |
|
"learning_rate": 1.5905541770183096e-05, |
|
"loss": 0.5824, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.26705796038151136, |
|
"grad_norm": 0.2888942062854767, |
|
"learning_rate": 1.5735958783956794e-05, |
|
"loss": 0.604, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.2673840384772153, |
|
"grad_norm": 0.26342645287513733, |
|
"learning_rate": 1.5567207449798515e-05, |
|
"loss": 0.5679, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.2677101165729192, |
|
"grad_norm": 0.2661057412624359, |
|
"learning_rate": 1.539928943321579e-05, |
|
"loss": 0.5325, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.26803619466862316, |
|
"grad_norm": 0.21790291368961334, |
|
"learning_rate": 1.5232206391491699e-05, |
|
"loss": 0.5227, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.26836227276432706, |
|
"grad_norm": 0.2722076177597046, |
|
"learning_rate": 1.5065959973668353e-05, |
|
"loss": 0.5633, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.26868835086003096, |
|
"grad_norm": 0.24521185457706451, |
|
"learning_rate": 1.4900551820530828e-05, |
|
"loss": 0.5588, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.2690144289557349, |
|
"grad_norm": 0.3043542802333832, |
|
"learning_rate": 1.4735983564590783e-05, |
|
"loss": 0.6736, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.2693405070514388, |
|
"grad_norm": 0.2543700933456421, |
|
"learning_rate": 1.4572256830070497e-05, |
|
"loss": 0.4988, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.26966658514714276, |
|
"grad_norm": 0.30012184381484985, |
|
"learning_rate": 1.4409373232886702e-05, |
|
"loss": 0.5979, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.26999266324284665, |
|
"grad_norm": 0.2988223135471344, |
|
"learning_rate": 1.4247334380634792e-05, |
|
"loss": 0.5374, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.2703187413385506, |
|
"grad_norm": 0.29680508375167847, |
|
"learning_rate": 1.4086141872572789e-05, |
|
"loss": 0.6135, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.2706448194342545, |
|
"grad_norm": 0.3112308382987976, |
|
"learning_rate": 1.3925797299605647e-05, |
|
"loss": 0.5622, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.2709708975299584, |
|
"grad_norm": 0.23930135369300842, |
|
"learning_rate": 1.3766302244269624e-05, |
|
"loss": 0.6004, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.27129697562566235, |
|
"grad_norm": 0.28775033354759216, |
|
"learning_rate": 1.3607658280716473e-05, |
|
"loss": 0.562, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.27162305372136625, |
|
"grad_norm": 0.32726067304611206, |
|
"learning_rate": 1.3449866974698122e-05, |
|
"loss": 0.5781, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.2719491318170702, |
|
"grad_norm": 0.25154900550842285, |
|
"learning_rate": 1.3292929883550998e-05, |
|
"loss": 0.5886, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.2722752099127741, |
|
"grad_norm": 0.27102169394493103, |
|
"learning_rate": 1.3136848556180892e-05, |
|
"loss": 0.5878, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.27260128800847805, |
|
"grad_norm": 0.26259303092956543, |
|
"learning_rate": 1.2981624533047432e-05, |
|
"loss": 0.6176, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.27292736610418195, |
|
"grad_norm": 0.3033280372619629, |
|
"learning_rate": 1.2827259346149122e-05, |
|
"loss": 0.541, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.2732534441998859, |
|
"grad_norm": 0.32570040225982666, |
|
"learning_rate": 1.2673754519008008e-05, |
|
"loss": 0.6198, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.2735795222955898, |
|
"grad_norm": 0.2976042628288269, |
|
"learning_rate": 1.2521111566654731e-05, |
|
"loss": 0.5765, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.2739056003912937, |
|
"grad_norm": 0.29010704159736633, |
|
"learning_rate": 1.2369331995613665e-05, |
|
"loss": 0.5873, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.27423167848699764, |
|
"grad_norm": 0.2495676726102829, |
|
"learning_rate": 1.2218417303887842e-05, |
|
"loss": 0.533, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.27455775658270154, |
|
"grad_norm": 0.25514352321624756, |
|
"learning_rate": 1.206836898094439e-05, |
|
"loss": 0.4843, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.2748838346784055, |
|
"grad_norm": 0.3069244921207428, |
|
"learning_rate": 1.191918850769964e-05, |
|
"loss": 0.5408, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.2752099127741094, |
|
"grad_norm": 0.31279370188713074, |
|
"learning_rate": 1.1770877356504683e-05, |
|
"loss": 0.6755, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.27553599086981334, |
|
"grad_norm": 0.2172246128320694, |
|
"learning_rate": 1.1623436991130654e-05, |
|
"loss": 0.5187, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 0.3862195611000061, |
|
"learning_rate": 1.1476868866754486e-05, |
|
"loss": 0.6153, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.2761881470612212, |
|
"grad_norm": 0.27538466453552246, |
|
"learning_rate": 1.1331174429944347e-05, |
|
"loss": 0.5874, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.2765142251569251, |
|
"grad_norm": 0.2538582980632782, |
|
"learning_rate": 1.1186355118645554e-05, |
|
"loss": 0.5447, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.276840303252629, |
|
"grad_norm": 0.34064552187919617, |
|
"learning_rate": 1.1042412362166222e-05, |
|
"loss": 0.5341, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.27716638134833294, |
|
"grad_norm": 0.239769846200943, |
|
"learning_rate": 1.0899347581163221e-05, |
|
"loss": 0.5149, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.27749245944403683, |
|
"grad_norm": 0.3682393431663513, |
|
"learning_rate": 1.0757162187628222e-05, |
|
"loss": 0.6912, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.2778185375397408, |
|
"grad_norm": 0.2674311697483063, |
|
"learning_rate": 1.0615857584873623e-05, |
|
"loss": 0.6053, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.2781446156354447, |
|
"grad_norm": 0.26375558972358704, |
|
"learning_rate": 1.0475435167518843e-05, |
|
"loss": 0.5715, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.27847069373114863, |
|
"grad_norm": 0.2378285825252533, |
|
"learning_rate": 1.0335896321476413e-05, |
|
"loss": 0.5853, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.27879677182685253, |
|
"grad_norm": 0.2970817983150482, |
|
"learning_rate": 1.0197242423938446e-05, |
|
"loss": 0.5929, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.2791228499225564, |
|
"grad_norm": 0.30819419026374817, |
|
"learning_rate": 1.0059474843362892e-05, |
|
"loss": 0.577, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.2794489280182604, |
|
"grad_norm": 0.2650148868560791, |
|
"learning_rate": 9.922594939460194e-06, |
|
"loss": 0.5986, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.2797750061139643, |
|
"grad_norm": 0.33730190992355347, |
|
"learning_rate": 9.786604063179728e-06, |
|
"loss": 0.596, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.2801010842096682, |
|
"grad_norm": 0.2908703088760376, |
|
"learning_rate": 9.651503556696516e-06, |
|
"loss": 0.6086, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.2804271623053721, |
|
"grad_norm": 0.3620714545249939, |
|
"learning_rate": 9.517294753398064e-06, |
|
"loss": 0.6332, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.2807532404010761, |
|
"grad_norm": 0.25597527623176575, |
|
"learning_rate": 9.383978977871021e-06, |
|
"loss": 0.5788, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.28107931849678, |
|
"grad_norm": 0.23144716024398804, |
|
"learning_rate": 9.251557545888312e-06, |
|
"loss": 0.5323, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.2814053965924839, |
|
"grad_norm": 0.2824934422969818, |
|
"learning_rate": 9.120031764395987e-06, |
|
"loss": 0.5951, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.2817314746881878, |
|
"grad_norm": 0.2706288695335388, |
|
"learning_rate": 8.989402931500434e-06, |
|
"loss": 0.5499, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.2820575527838917, |
|
"grad_norm": 0.2830828130245209, |
|
"learning_rate": 8.85967233645547e-06, |
|
"loss": 0.6568, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.28238363087959567, |
|
"grad_norm": 0.2941051423549652, |
|
"learning_rate": 8.730841259649725e-06, |
|
"loss": 0.5747, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.28270970897529957, |
|
"grad_norm": 0.20262877643108368, |
|
"learning_rate": 8.602910972593892e-06, |
|
"loss": 0.6067, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.2830357870710035, |
|
"grad_norm": 0.25369739532470703, |
|
"learning_rate": 8.475882737908248e-06, |
|
"loss": 0.4986, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.2833618651667074, |
|
"grad_norm": 0.27120092511177063, |
|
"learning_rate": 8.34975780931021e-06, |
|
"loss": 0.632, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.28368794326241137, |
|
"grad_norm": 0.2529854476451874, |
|
"learning_rate": 8.224537431601886e-06, |
|
"loss": 0.556, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.28401402135811527, |
|
"grad_norm": 0.347957968711853, |
|
"learning_rate": 8.100222840657878e-06, |
|
"loss": 0.5932, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.2843400994538192, |
|
"grad_norm": 0.34991616010665894, |
|
"learning_rate": 7.976815263412963e-06, |
|
"loss": 0.6714, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.2846661775495231, |
|
"grad_norm": 0.27129024267196655, |
|
"learning_rate": 7.854315917850163e-06, |
|
"loss": 0.6417, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.284992255645227, |
|
"grad_norm": 0.20815640687942505, |
|
"learning_rate": 7.73272601298851e-06, |
|
"loss": 0.5793, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.28531833374093096, |
|
"grad_norm": 0.27495530247688293, |
|
"learning_rate": 7.612046748871327e-06, |
|
"loss": 0.4999, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.28564441183663486, |
|
"grad_norm": 0.25796929001808167, |
|
"learning_rate": 7.492279316554207e-06, |
|
"loss": 0.5908, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.2859704899323388, |
|
"grad_norm": 0.23965142667293549, |
|
"learning_rate": 7.3734248980933395e-06, |
|
"loss": 0.5887, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.2862965680280427, |
|
"grad_norm": 0.23252816498279572, |
|
"learning_rate": 7.255484666533874e-06, |
|
"loss": 0.5622, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.28662264612374666, |
|
"grad_norm": 0.31792140007019043, |
|
"learning_rate": 7.138459785898266e-06, |
|
"loss": 0.5411, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.28694872421945056, |
|
"grad_norm": 0.27042651176452637, |
|
"learning_rate": 7.022351411174866e-06, |
|
"loss": 0.6432, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.28727480231515445, |
|
"grad_norm": 0.2737380862236023, |
|
"learning_rate": 6.907160688306425e-06, |
|
"loss": 0.5333, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.2876008804108584, |
|
"grad_norm": 0.49083641171455383, |
|
"learning_rate": 6.7928887541789055e-06, |
|
"loss": 0.7366, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.2879269585065623, |
|
"grad_norm": 0.2284003347158432, |
|
"learning_rate": 6.679536736610137e-06, |
|
"loss": 0.4846, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.28825303660226625, |
|
"grad_norm": 0.27481234073638916, |
|
"learning_rate": 6.5671057543387985e-06, |
|
"loss": 0.5543, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.28857911469797015, |
|
"grad_norm": 0.2470434457063675, |
|
"learning_rate": 6.455596917013273e-06, |
|
"loss": 0.5875, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.2889051927936741, |
|
"grad_norm": 0.30581676959991455, |
|
"learning_rate": 6.345011325180772e-06, |
|
"loss": 0.565, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.289231270889378, |
|
"grad_norm": 0.22337621450424194, |
|
"learning_rate": 6.235350070276447e-06, |
|
"loss": 0.5746, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.28955734898508195, |
|
"grad_norm": 0.2908400595188141, |
|
"learning_rate": 6.126614234612593e-06, |
|
"loss": 0.4911, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.28988342708078585, |
|
"grad_norm": 0.26192542910575867, |
|
"learning_rate": 6.018804891368035e-06, |
|
"loss": 0.5577, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.29020950517648975, |
|
"grad_norm": 0.21913784742355347, |
|
"learning_rate": 5.911923104577455e-06, |
|
"loss": 0.4661, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.2905355832721937, |
|
"grad_norm": 0.24812312424182892, |
|
"learning_rate": 5.805969929120947e-06, |
|
"loss": 0.5104, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.2908616613678976, |
|
"grad_norm": 0.285163015127182, |
|
"learning_rate": 5.700946410713548e-06, |
|
"loss": 0.5446, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.29118773946360155, |
|
"grad_norm": 0.28700485825538635, |
|
"learning_rate": 5.5968535858950345e-06, |
|
"loss": 0.6212, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.29151381755930544, |
|
"grad_norm": 0.311657190322876, |
|
"learning_rate": 5.49369248201953e-06, |
|
"loss": 0.6656, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.2918398956550094, |
|
"grad_norm": 0.26617059111595154, |
|
"learning_rate": 5.39146411724547e-06, |
|
"loss": 0.6205, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.2921659737507133, |
|
"grad_norm": 0.23219060897827148, |
|
"learning_rate": 5.290169500525577e-06, |
|
"loss": 0.5324, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.2924920518464172, |
|
"grad_norm": 0.2956946790218353, |
|
"learning_rate": 5.189809631596798e-06, |
|
"loss": 0.5709, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.29281812994212114, |
|
"grad_norm": 0.24315470457077026, |
|
"learning_rate": 5.0903855009705514e-06, |
|
"loss": 0.566, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.29314420803782504, |
|
"grad_norm": 0.2868155539035797, |
|
"learning_rate": 4.991898089922819e-06, |
|
"loss": 0.6156, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.293470286133529, |
|
"grad_norm": 0.29666030406951904, |
|
"learning_rate": 4.8943483704846475e-06, |
|
"loss": 0.5783, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.2937963642292329, |
|
"grad_norm": 0.23827630281448364, |
|
"learning_rate": 4.797737305432337e-06, |
|
"loss": 0.5259, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.29412244232493684, |
|
"grad_norm": 0.33247071504592896, |
|
"learning_rate": 4.702065848278126e-06, |
|
"loss": 0.6135, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.29444852042064074, |
|
"grad_norm": 0.24986249208450317, |
|
"learning_rate": 4.607334943260655e-06, |
|
"loss": 0.5976, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.2947745985163447, |
|
"grad_norm": 0.31358757615089417, |
|
"learning_rate": 4.513545525335705e-06, |
|
"loss": 0.6994, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.2951006766120486, |
|
"grad_norm": 0.3073498010635376, |
|
"learning_rate": 4.420698520166988e-06, |
|
"loss": 0.5653, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.2954267547077525, |
|
"grad_norm": 0.24950246512889862, |
|
"learning_rate": 4.328794844116946e-06, |
|
"loss": 0.6577, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.29575283280345643, |
|
"grad_norm": 0.3969646990299225, |
|
"learning_rate": 4.237835404237778e-06, |
|
"loss": 0.5671, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.29607891089916033, |
|
"grad_norm": 0.33874574303627014, |
|
"learning_rate": 4.147821098262405e-06, |
|
"loss": 0.7208, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.2964049889948643, |
|
"grad_norm": 0.2954644560813904, |
|
"learning_rate": 4.0587528145957235e-06, |
|
"loss": 0.5627, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.2967310670905682, |
|
"grad_norm": 0.26160573959350586, |
|
"learning_rate": 3.970631432305694e-06, |
|
"loss": 0.5891, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.29705714518627213, |
|
"grad_norm": 0.27814751863479614, |
|
"learning_rate": 3.883457821114811e-06, |
|
"loss": 0.6133, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.29738322328197603, |
|
"grad_norm": 0.4170272946357727, |
|
"learning_rate": 3.797232841391407e-06, |
|
"loss": 0.5701, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.29770930137768, |
|
"grad_norm": 0.21299175918102264, |
|
"learning_rate": 3.711957344141237e-06, |
|
"loss": 0.5677, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.2980353794733839, |
|
"grad_norm": 0.3248412311077118, |
|
"learning_rate": 3.627632170999029e-06, |
|
"loss": 0.6522, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.2983614575690878, |
|
"grad_norm": 0.3027745485305786, |
|
"learning_rate": 3.5442581542201923e-06, |
|
"loss": 0.5193, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.2986875356647917, |
|
"grad_norm": 0.2598162591457367, |
|
"learning_rate": 3.461836116672612e-06, |
|
"loss": 0.5704, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.2990136137604956, |
|
"grad_norm": 0.2748893201351166, |
|
"learning_rate": 3.380366871828522e-06, |
|
"loss": 0.6077, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.2993396918561996, |
|
"grad_norm": 0.2935779094696045, |
|
"learning_rate": 3.2998512237565005e-06, |
|
"loss": 0.6494, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.29966576995190347, |
|
"grad_norm": 0.28127992153167725, |
|
"learning_rate": 3.2202899671134546e-06, |
|
"loss": 0.5611, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.2999918480476074, |
|
"grad_norm": 0.27138617634773254, |
|
"learning_rate": 3.1416838871368924e-06, |
|
"loss": 0.5692, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.3003179261433113, |
|
"grad_norm": 0.33780840039253235, |
|
"learning_rate": 3.064033759637064e-06, |
|
"loss": 0.5973, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.3006440042390152, |
|
"grad_norm": 0.2962702214717865, |
|
"learning_rate": 2.9873403509894203e-06, |
|
"loss": 0.5803, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.30097008233471917, |
|
"grad_norm": 0.3189234435558319, |
|
"learning_rate": 2.9116044181269007e-06, |
|
"loss": 0.6576, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.30129616043042307, |
|
"grad_norm": 0.39585644006729126, |
|
"learning_rate": 2.836826708532603e-06, |
|
"loss": 0.6375, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.301622238526127, |
|
"grad_norm": 0.37866878509521484, |
|
"learning_rate": 2.7630079602323442e-06, |
|
"loss": 0.6671, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.3019483166218309, |
|
"grad_norm": 0.23039337992668152, |
|
"learning_rate": 2.690148901787337e-06, |
|
"loss": 0.4663, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.30227439471753487, |
|
"grad_norm": 0.20355091989040375, |
|
"learning_rate": 2.618250252287113e-06, |
|
"loss": 0.5337, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.30260047281323876, |
|
"grad_norm": 0.2754460871219635, |
|
"learning_rate": 2.5473127213422763e-06, |
|
"loss": 0.5808, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.3029265509089427, |
|
"grad_norm": 0.2289859503507614, |
|
"learning_rate": 2.4773370090776626e-06, |
|
"loss": 0.5963, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.3032526290046466, |
|
"grad_norm": 0.3919331729412079, |
|
"learning_rate": 2.4083238061252567e-06, |
|
"loss": 0.5978, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.3035787071003505, |
|
"grad_norm": 0.28480133414268494, |
|
"learning_rate": 2.3402737936175425e-06, |
|
"loss": 0.6194, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.30390478519605446, |
|
"grad_norm": 0.24756163358688354, |
|
"learning_rate": 2.273187643180652e-06, |
|
"loss": 0.5772, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.30423086329175836, |
|
"grad_norm": 0.3628486096858978, |
|
"learning_rate": 2.2070660169278166e-06, |
|
"loss": 0.5918, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.3045569413874623, |
|
"grad_norm": 0.38187867403030396, |
|
"learning_rate": 2.141909567452793e-06, |
|
"loss": 0.5799, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.3048830194831662, |
|
"grad_norm": 0.32940131425857544, |
|
"learning_rate": 2.0777189378234143e-06, |
|
"loss": 0.5906, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.30520909757887016, |
|
"grad_norm": 0.2577744126319885, |
|
"learning_rate": 2.014494761575314e-06, |
|
"loss": 0.5325, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.30553517567457406, |
|
"grad_norm": 0.21027593314647675, |
|
"learning_rate": 1.9522376627055583e-06, |
|
"loss": 0.5189, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.305861253770278, |
|
"grad_norm": 0.31960952281951904, |
|
"learning_rate": 1.8909482556666024e-06, |
|
"loss": 0.6651, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.3061873318659819, |
|
"grad_norm": 0.28659772872924805, |
|
"learning_rate": 1.8306271453601199e-06, |
|
"loss": 0.6643, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.3065134099616858, |
|
"grad_norm": 0.3308188021183014, |
|
"learning_rate": 1.771274927131139e-06, |
|
"loss": 0.6336, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.30683948805738975, |
|
"grad_norm": 0.23489847779273987, |
|
"learning_rate": 1.712892186762083e-06, |
|
"loss": 0.5784, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.30716556615309365, |
|
"grad_norm": 0.2495911717414856, |
|
"learning_rate": 1.6554795004670388e-06, |
|
"loss": 0.4387, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.3074916442487976, |
|
"grad_norm": 0.26292648911476135, |
|
"learning_rate": 1.5990374348860305e-06, |
|
"loss": 0.5927, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.3078177223445015, |
|
"grad_norm": 0.31785205006599426, |
|
"learning_rate": 1.543566547079467e-06, |
|
"loss": 0.6102, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.30814380044020545, |
|
"grad_norm": 0.21520088613033295, |
|
"learning_rate": 1.4890673845226133e-06, |
|
"loss": 0.4613, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.30846987853590935, |
|
"grad_norm": 0.23349148035049438, |
|
"learning_rate": 1.4355404851001952e-06, |
|
"loss": 0.4913, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.30879595663161324, |
|
"grad_norm": 0.28058943152427673, |
|
"learning_rate": 1.3829863771011253e-06, |
|
"loss": 0.5491, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.3091220347273172, |
|
"grad_norm": 0.29834824800491333, |
|
"learning_rate": 1.3314055792131964e-06, |
|
"loss": 0.6751, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.3094481128230211, |
|
"grad_norm": 0.29814356565475464, |
|
"learning_rate": 1.280798600518085e-06, |
|
"loss": 0.5418, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.30977419091872505, |
|
"grad_norm": 0.24206741154193878, |
|
"learning_rate": 1.231165940486234e-06, |
|
"loss": 0.5177, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.31010026901442894, |
|
"grad_norm": 0.24119798839092255, |
|
"learning_rate": 1.1825080889719563e-06, |
|
"loss": 0.6286, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.3104263471101329, |
|
"grad_norm": 0.45291998982429504, |
|
"learning_rate": 1.134825526208605e-06, |
|
"loss": 0.6756, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.3107524252058368, |
|
"grad_norm": 0.36271461844444275, |
|
"learning_rate": 1.0881187228038215e-06, |
|
"loss": 0.6432, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.31107850330154074, |
|
"grad_norm": 0.3073123097419739, |
|
"learning_rate": 1.0423881397349068e-06, |
|
"loss": 0.6344, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.31140458139724464, |
|
"grad_norm": 0.29428645968437195, |
|
"learning_rate": 9.976342283442463e-07, |
|
"loss": 0.6755, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.31173065949294854, |
|
"grad_norm": 0.2660908102989197, |
|
"learning_rate": 9.538574303348813e-07, |
|
"loss": 0.5873, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.3120567375886525, |
|
"grad_norm": 0.27123311161994934, |
|
"learning_rate": 9.110581777661331e-07, |
|
"loss": 0.6825, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.3123828156843564, |
|
"grad_norm": 0.24501000344753265, |
|
"learning_rate": 8.692368930493521e-07, |
|
"loss": 0.5176, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.31270889378006034, |
|
"grad_norm": 0.19210560619831085, |
|
"learning_rate": 8.283939889437209e-07, |
|
"loss": 0.4684, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.31303497187576423, |
|
"grad_norm": 0.2760256230831146, |
|
"learning_rate": 7.885298685522235e-07, |
|
"loss": 0.5789, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.3133610499714682, |
|
"grad_norm": 0.3339109420776367, |
|
"learning_rate": 7.496449253176274e-07, |
|
"loss": 0.6615, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.3136871280671721, |
|
"grad_norm": 0.2638777196407318, |
|
"learning_rate": 7.117395430186414e-07, |
|
"loss": 0.6077, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.31401320616287604, |
|
"grad_norm": 0.34010159969329834, |
|
"learning_rate": 6.748140957660631e-07, |
|
"loss": 0.7114, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.31433928425857993, |
|
"grad_norm": 0.26364463567733765, |
|
"learning_rate": 6.388689479991605e-07, |
|
"loss": 0.5443, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.31466536235428383, |
|
"grad_norm": 0.24462150037288666, |
|
"learning_rate": 6.039044544820404e-07, |
|
"loss": 0.5761, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.3149914404499878, |
|
"grad_norm": 0.28215262293815613, |
|
"learning_rate": 5.699209603001076e-07, |
|
"loss": 0.6064, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.3153175185456917, |
|
"grad_norm": 0.23975957930088043, |
|
"learning_rate": 5.369188008567672e-07, |
|
"loss": 0.567, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.31564359664139563, |
|
"grad_norm": 0.251577764749527, |
|
"learning_rate": 5.048983018699827e-07, |
|
"loss": 0.6322, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.3159696747370995, |
|
"grad_norm": 0.34745168685913086, |
|
"learning_rate": 4.738597793691679e-07, |
|
"loss": 0.6558, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.3162957528328035, |
|
"grad_norm": 0.2653641402721405, |
|
"learning_rate": 4.438035396920004e-07, |
|
"loss": 0.5111, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.3166218309285074, |
|
"grad_norm": 0.2363329529762268, |
|
"learning_rate": 4.1472987948143473e-07, |
|
"loss": 0.5657, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.31694790902421127, |
|
"grad_norm": 0.37044990062713623, |
|
"learning_rate": 3.866390856827495e-07, |
|
"loss": 0.7043, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.3172739871199152, |
|
"grad_norm": 0.2447771430015564, |
|
"learning_rate": 3.595314355407609e-07, |
|
"loss": 0.5734, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.3176000652156191, |
|
"grad_norm": 0.36151984333992004, |
|
"learning_rate": 3.3340719659701313e-07, |
|
"loss": 0.6627, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.3179261433113231, |
|
"grad_norm": 0.256767213344574, |
|
"learning_rate": 3.0826662668720364e-07, |
|
"loss": 0.5589, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.31825222140702697, |
|
"grad_norm": 0.2616105079650879, |
|
"learning_rate": 2.841099739386066e-07, |
|
"loss": 0.5758, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.3185782995027309, |
|
"grad_norm": 0.20511144399642944, |
|
"learning_rate": 2.609374767676309e-07, |
|
"loss": 0.5115, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.3189043775984348, |
|
"grad_norm": 0.2790910601615906, |
|
"learning_rate": 2.387493638774774e-07, |
|
"loss": 0.6598, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.31923045569413877, |
|
"grad_norm": 0.2062654048204422, |
|
"learning_rate": 2.175458542558517e-07, |
|
"loss": 0.5254, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.31955653378984267, |
|
"grad_norm": 0.3161718249320984, |
|
"learning_rate": 1.973271571728441e-07, |
|
"loss": 0.6436, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.31988261188554656, |
|
"grad_norm": 0.24626140296459198, |
|
"learning_rate": 1.7809347217881966e-07, |
|
"loss": 0.5604, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.3202086899812505, |
|
"grad_norm": 0.2532305419445038, |
|
"learning_rate": 1.598449891024978e-07, |
|
"loss": 0.633, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.3205347680769544, |
|
"grad_norm": 0.2982385456562042, |
|
"learning_rate": 1.425818880490315e-07, |
|
"loss": 0.5065, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.32086084617265836, |
|
"grad_norm": 0.3654409646987915, |
|
"learning_rate": 1.2630433939825327e-07, |
|
"loss": 0.699, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.32118692426836226, |
|
"grad_norm": 0.3042480945587158, |
|
"learning_rate": 1.1101250380300965e-07, |
|
"loss": 0.6072, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.3215130023640662, |
|
"grad_norm": 0.344954252243042, |
|
"learning_rate": 9.670653218752934e-08, |
|
"loss": 0.6552, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.3218390804597701, |
|
"grad_norm": 0.23715342581272125, |
|
"learning_rate": 8.33865657459909e-08, |
|
"loss": 0.5171, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.32216515855547406, |
|
"grad_norm": 0.22725951671600342, |
|
"learning_rate": 7.105273594107953e-08, |
|
"loss": 0.5189, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.32249123665117796, |
|
"grad_norm": 0.3205645680427551, |
|
"learning_rate": 5.970516450271025e-08, |
|
"loss": 0.6342, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.32281731474688186, |
|
"grad_norm": 0.22355954349040985, |
|
"learning_rate": 4.934396342684e-08, |
|
"loss": 0.56, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.3231433928425858, |
|
"grad_norm": 0.30192822217941284, |
|
"learning_rate": 3.996923497434635e-08, |
|
"loss": 0.6033, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.3234694709382897, |
|
"grad_norm": 0.24073319137096405, |
|
"learning_rate": 3.1581071670006015e-08, |
|
"loss": 0.5943, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.32379554903399366, |
|
"grad_norm": 0.24182894825935364, |
|
"learning_rate": 2.417955630159563e-08, |
|
"loss": 0.5463, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.32412162712969755, |
|
"grad_norm": 0.28949111700057983, |
|
"learning_rate": 1.7764761919103477e-08, |
|
"loss": 0.5812, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.3244477052254015, |
|
"grad_norm": 0.2450643628835678, |
|
"learning_rate": 1.2336751833941229e-08, |
|
"loss": 0.5409, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.3247737833211054, |
|
"grad_norm": 0.25702303647994995, |
|
"learning_rate": 7.895579618388827e-09, |
|
"loss": 0.5836, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.3250998614168093, |
|
"grad_norm": 0.29124775528907776, |
|
"learning_rate": 4.4412891050171765e-09, |
|
"loss": 0.5742, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.32542593951251325, |
|
"grad_norm": 0.25140178203582764, |
|
"learning_rate": 1.973914386288467e-09, |
|
"loss": 0.5419, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.32575201760821715, |
|
"grad_norm": 0.2724677622318268, |
|
"learning_rate": 4.934798141786879e-10, |
|
"loss": 0.5966, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.3260780957039211, |
|
"grad_norm": 0.24416160583496094, |
|
"learning_rate": 0.0, |
|
"loss": 0.6293, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.3260780957039211, |
|
"eval_loss": 0.68351149559021, |
|
"eval_runtime": 58.9294, |
|
"eval_samples_per_second": 43.832, |
|
"eval_steps_per_second": 10.962, |
|
"step": 1000 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.53166934979117e+17, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|