Llama-3.2-1B-ultrachat200k / trainer_state.json
AIR-hl's picture
upload
e4b2f2a
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.9998749194192412,
"eval_steps": 500,
"global_step": 6495,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.0015394533016462528,
"grad_norm": 7.90625,
"learning_rate": 3.0769230769230774e-07,
"loss": 1.7262,
"step": 10
},
{
"epoch": 0.0030789066032925055,
"grad_norm": 8.375,
"learning_rate": 6.153846153846155e-07,
"loss": 1.7972,
"step": 20
},
{
"epoch": 0.004618359904938758,
"grad_norm": 6.8125,
"learning_rate": 9.230769230769232e-07,
"loss": 1.768,
"step": 30
},
{
"epoch": 0.006157813206585011,
"grad_norm": 6.1875,
"learning_rate": 1.230769230769231e-06,
"loss": 1.7936,
"step": 40
},
{
"epoch": 0.007697266508231265,
"grad_norm": 5.8125,
"learning_rate": 1.5384615384615387e-06,
"loss": 1.7506,
"step": 50
},
{
"epoch": 0.009236719809877517,
"grad_norm": 5.25,
"learning_rate": 1.8461538461538465e-06,
"loss": 1.7089,
"step": 60
},
{
"epoch": 0.01077617311152377,
"grad_norm": 4.71875,
"learning_rate": 2.153846153846154e-06,
"loss": 1.6747,
"step": 70
},
{
"epoch": 0.012315626413170022,
"grad_norm": 3.828125,
"learning_rate": 2.461538461538462e-06,
"loss": 1.6836,
"step": 80
},
{
"epoch": 0.013855079714816277,
"grad_norm": 5.78125,
"learning_rate": 2.7692307692307697e-06,
"loss": 1.6626,
"step": 90
},
{
"epoch": 0.01539453301646253,
"grad_norm": 3.578125,
"learning_rate": 3.0769230769230774e-06,
"loss": 1.6116,
"step": 100
},
{
"epoch": 0.01693398631810878,
"grad_norm": 2.953125,
"learning_rate": 3.384615384615385e-06,
"loss": 1.5566,
"step": 110
},
{
"epoch": 0.018473439619755033,
"grad_norm": 2.546875,
"learning_rate": 3.692307692307693e-06,
"loss": 1.53,
"step": 120
},
{
"epoch": 0.020012892921401286,
"grad_norm": 2.328125,
"learning_rate": 4.000000000000001e-06,
"loss": 1.5322,
"step": 130
},
{
"epoch": 0.02155234622304754,
"grad_norm": 2.5625,
"learning_rate": 4.307692307692308e-06,
"loss": 1.5002,
"step": 140
},
{
"epoch": 0.02309179952469379,
"grad_norm": 2.328125,
"learning_rate": 4.615384615384616e-06,
"loss": 1.4738,
"step": 150
},
{
"epoch": 0.024631252826340044,
"grad_norm": 2.3125,
"learning_rate": 4.923076923076924e-06,
"loss": 1.4935,
"step": 160
},
{
"epoch": 0.0261707061279863,
"grad_norm": 2.328125,
"learning_rate": 5.230769230769232e-06,
"loss": 1.446,
"step": 170
},
{
"epoch": 0.027710159429632553,
"grad_norm": 2.296875,
"learning_rate": 5.538461538461539e-06,
"loss": 1.4611,
"step": 180
},
{
"epoch": 0.029249612731278806,
"grad_norm": 2.125,
"learning_rate": 5.846153846153847e-06,
"loss": 1.4191,
"step": 190
},
{
"epoch": 0.03078906603292506,
"grad_norm": 2.140625,
"learning_rate": 6.153846153846155e-06,
"loss": 1.4419,
"step": 200
},
{
"epoch": 0.03232851933457131,
"grad_norm": 2.0625,
"learning_rate": 6.461538461538463e-06,
"loss": 1.4503,
"step": 210
},
{
"epoch": 0.03386797263621756,
"grad_norm": 2.046875,
"learning_rate": 6.76923076923077e-06,
"loss": 1.4216,
"step": 220
},
{
"epoch": 0.035407425937863814,
"grad_norm": 2.53125,
"learning_rate": 7.076923076923078e-06,
"loss": 1.4101,
"step": 230
},
{
"epoch": 0.036946879239510066,
"grad_norm": 2.109375,
"learning_rate": 7.384615384615386e-06,
"loss": 1.4217,
"step": 240
},
{
"epoch": 0.03848633254115632,
"grad_norm": 2.375,
"learning_rate": 7.692307692307694e-06,
"loss": 1.4087,
"step": 250
},
{
"epoch": 0.04002578584280257,
"grad_norm": 2.21875,
"learning_rate": 8.000000000000001e-06,
"loss": 1.3453,
"step": 260
},
{
"epoch": 0.041565239144448825,
"grad_norm": 2.0625,
"learning_rate": 8.307692307692309e-06,
"loss": 1.358,
"step": 270
},
{
"epoch": 0.04310469244609508,
"grad_norm": 2.1875,
"learning_rate": 8.615384615384617e-06,
"loss": 1.377,
"step": 280
},
{
"epoch": 0.04464414574774133,
"grad_norm": 2.109375,
"learning_rate": 8.923076923076925e-06,
"loss": 1.38,
"step": 290
},
{
"epoch": 0.04618359904938758,
"grad_norm": 2.140625,
"learning_rate": 9.230769230769232e-06,
"loss": 1.3745,
"step": 300
},
{
"epoch": 0.047723052351033836,
"grad_norm": 2.0,
"learning_rate": 9.53846153846154e-06,
"loss": 1.3737,
"step": 310
},
{
"epoch": 0.04926250565268009,
"grad_norm": 2.15625,
"learning_rate": 9.846153846153848e-06,
"loss": 1.3393,
"step": 320
},
{
"epoch": 0.05080195895432634,
"grad_norm": 1.9765625,
"learning_rate": 1.0153846153846154e-05,
"loss": 1.3267,
"step": 330
},
{
"epoch": 0.0523414122559726,
"grad_norm": 1.8828125,
"learning_rate": 1.0461538461538463e-05,
"loss": 1.3294,
"step": 340
},
{
"epoch": 0.053880865557618854,
"grad_norm": 2.09375,
"learning_rate": 1.076923076923077e-05,
"loss": 1.3504,
"step": 350
},
{
"epoch": 0.05542031885926511,
"grad_norm": 2.140625,
"learning_rate": 1.1076923076923079e-05,
"loss": 1.3404,
"step": 360
},
{
"epoch": 0.05695977216091136,
"grad_norm": 2.0625,
"learning_rate": 1.1384615384615385e-05,
"loss": 1.3509,
"step": 370
},
{
"epoch": 0.05849922546255761,
"grad_norm": 2.0625,
"learning_rate": 1.1692307692307694e-05,
"loss": 1.3597,
"step": 380
},
{
"epoch": 0.060038678764203865,
"grad_norm": 1.8671875,
"learning_rate": 1.2e-05,
"loss": 1.2981,
"step": 390
},
{
"epoch": 0.06157813206585012,
"grad_norm": 1.9921875,
"learning_rate": 1.230769230769231e-05,
"loss": 1.274,
"step": 400
},
{
"epoch": 0.06311758536749637,
"grad_norm": 2.234375,
"learning_rate": 1.2615384615384616e-05,
"loss": 1.275,
"step": 410
},
{
"epoch": 0.06465703866914262,
"grad_norm": 2.171875,
"learning_rate": 1.2923076923076925e-05,
"loss": 1.298,
"step": 420
},
{
"epoch": 0.06619649197078888,
"grad_norm": 2.609375,
"learning_rate": 1.3230769230769231e-05,
"loss": 1.3105,
"step": 430
},
{
"epoch": 0.06773594527243512,
"grad_norm": 1.8515625,
"learning_rate": 1.353846153846154e-05,
"loss": 1.3525,
"step": 440
},
{
"epoch": 0.06927539857408138,
"grad_norm": 1.953125,
"learning_rate": 1.3846153846153847e-05,
"loss": 1.3378,
"step": 450
},
{
"epoch": 0.07081485187572763,
"grad_norm": 2.0,
"learning_rate": 1.4153846153846156e-05,
"loss": 1.2971,
"step": 460
},
{
"epoch": 0.07235430517737389,
"grad_norm": 2.21875,
"learning_rate": 1.4461538461538462e-05,
"loss": 1.3607,
"step": 470
},
{
"epoch": 0.07389375847902013,
"grad_norm": 2.484375,
"learning_rate": 1.4769230769230772e-05,
"loss": 1.3031,
"step": 480
},
{
"epoch": 0.07543321178066639,
"grad_norm": 1.8125,
"learning_rate": 1.5076923076923078e-05,
"loss": 1.2899,
"step": 490
},
{
"epoch": 0.07697266508231264,
"grad_norm": 1.90625,
"learning_rate": 1.5384615384615387e-05,
"loss": 1.2925,
"step": 500
},
{
"epoch": 0.0785121183839589,
"grad_norm": 2.015625,
"learning_rate": 1.5692307692307693e-05,
"loss": 1.2697,
"step": 510
},
{
"epoch": 0.08005157168560514,
"grad_norm": 1.84375,
"learning_rate": 1.6000000000000003e-05,
"loss": 1.2969,
"step": 520
},
{
"epoch": 0.0815910249872514,
"grad_norm": 1.9140625,
"learning_rate": 1.630769230769231e-05,
"loss": 1.2807,
"step": 530
},
{
"epoch": 0.08313047828889765,
"grad_norm": 1.765625,
"learning_rate": 1.6615384615384618e-05,
"loss": 1.2498,
"step": 540
},
{
"epoch": 0.08466993159054391,
"grad_norm": 1.640625,
"learning_rate": 1.6923076923076924e-05,
"loss": 1.285,
"step": 550
},
{
"epoch": 0.08620938489219016,
"grad_norm": 1.890625,
"learning_rate": 1.7230769230769234e-05,
"loss": 1.268,
"step": 560
},
{
"epoch": 0.08774883819383641,
"grad_norm": 1.84375,
"learning_rate": 1.753846153846154e-05,
"loss": 1.3036,
"step": 570
},
{
"epoch": 0.08928829149548266,
"grad_norm": 1.7734375,
"learning_rate": 1.784615384615385e-05,
"loss": 1.2921,
"step": 580
},
{
"epoch": 0.09082774479712892,
"grad_norm": 1.9453125,
"learning_rate": 1.8153846153846155e-05,
"loss": 1.2433,
"step": 590
},
{
"epoch": 0.09236719809877517,
"grad_norm": 1.9296875,
"learning_rate": 1.8461538461538465e-05,
"loss": 1.2554,
"step": 600
},
{
"epoch": 0.09390665140042143,
"grad_norm": 1.9296875,
"learning_rate": 1.876923076923077e-05,
"loss": 1.248,
"step": 610
},
{
"epoch": 0.09544610470206767,
"grad_norm": 2.03125,
"learning_rate": 1.907692307692308e-05,
"loss": 1.2987,
"step": 620
},
{
"epoch": 0.09698555800371393,
"grad_norm": 1.890625,
"learning_rate": 1.9384615384615386e-05,
"loss": 1.2726,
"step": 630
},
{
"epoch": 0.09852501130536018,
"grad_norm": 1.9921875,
"learning_rate": 1.9692307692307696e-05,
"loss": 1.2511,
"step": 640
},
{
"epoch": 0.10006446460700644,
"grad_norm": 1.84375,
"learning_rate": 2e-05,
"loss": 1.2196,
"step": 650
},
{
"epoch": 0.10160391790865268,
"grad_norm": 1.7265625,
"learning_rate": 1.9999855555944584e-05,
"loss": 1.264,
"step": 660
},
{
"epoch": 0.10314337121029894,
"grad_norm": 1.859375,
"learning_rate": 1.999942222795114e-05,
"loss": 1.2615,
"step": 670
},
{
"epoch": 0.1046828245119452,
"grad_norm": 1.8359375,
"learning_rate": 1.9998700028538e-05,
"loss": 1.291,
"step": 680
},
{
"epoch": 0.10622227781359145,
"grad_norm": 1.7890625,
"learning_rate": 1.9997688978568653e-05,
"loss": 1.2532,
"step": 690
},
{
"epoch": 0.10776173111523771,
"grad_norm": 1.9296875,
"learning_rate": 1.9996389107251126e-05,
"loss": 1.264,
"step": 700
},
{
"epoch": 0.10930118441688395,
"grad_norm": 2.375,
"learning_rate": 1.9994800452137158e-05,
"loss": 1.2498,
"step": 710
},
{
"epoch": 0.11084063771853021,
"grad_norm": 1.8984375,
"learning_rate": 1.9992923059121107e-05,
"loss": 1.2506,
"step": 720
},
{
"epoch": 0.11238009102017646,
"grad_norm": 1.859375,
"learning_rate": 1.999075698243862e-05,
"loss": 1.2375,
"step": 730
},
{
"epoch": 0.11391954432182272,
"grad_norm": 1.609375,
"learning_rate": 1.998830228466508e-05,
"loss": 1.2478,
"step": 740
},
{
"epoch": 0.11545899762346896,
"grad_norm": 1.7734375,
"learning_rate": 1.998555903671379e-05,
"loss": 1.2518,
"step": 750
},
{
"epoch": 0.11699845092511522,
"grad_norm": 1.8984375,
"learning_rate": 1.9982527317833922e-05,
"loss": 1.2227,
"step": 760
},
{
"epoch": 0.11853790422676147,
"grad_norm": 1.8359375,
"learning_rate": 1.9979207215608224e-05,
"loss": 1.2787,
"step": 770
},
{
"epoch": 0.12007735752840773,
"grad_norm": 1.765625,
"learning_rate": 1.9975598825950507e-05,
"loss": 1.2991,
"step": 780
},
{
"epoch": 0.12161681083005398,
"grad_norm": 1.8515625,
"learning_rate": 1.9971702253102856e-05,
"loss": 1.2403,
"step": 790
},
{
"epoch": 0.12315626413170024,
"grad_norm": 1.734375,
"learning_rate": 1.996751760963263e-05,
"loss": 1.2529,
"step": 800
},
{
"epoch": 0.12469571743334648,
"grad_norm": 1.7734375,
"learning_rate": 1.9963045016429202e-05,
"loss": 1.2516,
"step": 810
},
{
"epoch": 0.12623517073499274,
"grad_norm": 1.8125,
"learning_rate": 1.995828460270047e-05,
"loss": 1.2235,
"step": 820
},
{
"epoch": 0.127774624036639,
"grad_norm": 1.7265625,
"learning_rate": 1.995323650596913e-05,
"loss": 1.2124,
"step": 830
},
{
"epoch": 0.12931407733828523,
"grad_norm": 1.84375,
"learning_rate": 1.9947900872068696e-05,
"loss": 1.2891,
"step": 840
},
{
"epoch": 0.1308535306399315,
"grad_norm": 1.7265625,
"learning_rate": 1.9942277855139284e-05,
"loss": 1.2427,
"step": 850
},
{
"epoch": 0.13239298394157775,
"grad_norm": 1.796875,
"learning_rate": 1.993636761762317e-05,
"loss": 1.2534,
"step": 860
},
{
"epoch": 0.133932437243224,
"grad_norm": 1.7734375,
"learning_rate": 1.9930170330260093e-05,
"loss": 1.2416,
"step": 870
},
{
"epoch": 0.13547189054487024,
"grad_norm": 1.9375,
"learning_rate": 1.9923686172082308e-05,
"loss": 1.2763,
"step": 880
},
{
"epoch": 0.1370113438465165,
"grad_norm": 1.9453125,
"learning_rate": 1.991691533040944e-05,
"loss": 1.2747,
"step": 890
},
{
"epoch": 0.13855079714816276,
"grad_norm": 1.6875,
"learning_rate": 1.9909858000843058e-05,
"loss": 1.2531,
"step": 900
},
{
"epoch": 0.14009025044980902,
"grad_norm": 1.78125,
"learning_rate": 1.9902514387261017e-05,
"loss": 1.3041,
"step": 910
},
{
"epoch": 0.14162970375145525,
"grad_norm": 1.7890625,
"learning_rate": 1.989488470181159e-05,
"loss": 1.2637,
"step": 920
},
{
"epoch": 0.14316915705310151,
"grad_norm": 1.7578125,
"learning_rate": 1.9886969164907306e-05,
"loss": 1.2539,
"step": 930
},
{
"epoch": 0.14470861035474777,
"grad_norm": 1.890625,
"learning_rate": 1.987876800521863e-05,
"loss": 1.2786,
"step": 940
},
{
"epoch": 0.14624806365639403,
"grad_norm": 1.75,
"learning_rate": 1.9870281459667304e-05,
"loss": 1.2926,
"step": 950
},
{
"epoch": 0.14778751695804027,
"grad_norm": 1.8203125,
"learning_rate": 1.9861509773419544e-05,
"loss": 1.2558,
"step": 960
},
{
"epoch": 0.14932697025968653,
"grad_norm": 2.046875,
"learning_rate": 1.9852453199878937e-05,
"loss": 1.2615,
"step": 970
},
{
"epoch": 0.15086642356133279,
"grad_norm": 1.6953125,
"learning_rate": 1.9843112000679127e-05,
"loss": 1.2784,
"step": 980
},
{
"epoch": 0.15240587686297905,
"grad_norm": 1.8828125,
"learning_rate": 1.9833486445676245e-05,
"loss": 1.2658,
"step": 990
},
{
"epoch": 0.15394533016462528,
"grad_norm": 1.84375,
"learning_rate": 1.982357681294114e-05,
"loss": 1.2886,
"step": 1000
},
{
"epoch": 0.15548478346627154,
"grad_norm": 1.765625,
"learning_rate": 1.9813383388751313e-05,
"loss": 1.2468,
"step": 1010
},
{
"epoch": 0.1570242367679178,
"grad_norm": 1.7734375,
"learning_rate": 1.9802906467582674e-05,
"loss": 1.2381,
"step": 1020
},
{
"epoch": 0.15856369006956406,
"grad_norm": 1.7265625,
"learning_rate": 1.9792146352101017e-05,
"loss": 1.2694,
"step": 1030
},
{
"epoch": 0.1601031433712103,
"grad_norm": 2.09375,
"learning_rate": 1.9781103353153292e-05,
"loss": 1.2631,
"step": 1040
},
{
"epoch": 0.16164259667285655,
"grad_norm": 1.8203125,
"learning_rate": 1.97697777897586e-05,
"loss": 1.2477,
"step": 1050
},
{
"epoch": 0.1631820499745028,
"grad_norm": 1.890625,
"learning_rate": 1.975816998909901e-05,
"loss": 1.2353,
"step": 1060
},
{
"epoch": 0.16472150327614907,
"grad_norm": 1.90625,
"learning_rate": 1.974628028651007e-05,
"loss": 1.2681,
"step": 1070
},
{
"epoch": 0.1662609565777953,
"grad_norm": 2.078125,
"learning_rate": 1.9734109025471168e-05,
"loss": 1.3015,
"step": 1080
},
{
"epoch": 0.16780040987944156,
"grad_norm": 1.671875,
"learning_rate": 1.9721656557595556e-05,
"loss": 1.25,
"step": 1090
},
{
"epoch": 0.16933986318108782,
"grad_norm": 1.8125,
"learning_rate": 1.9708923242620228e-05,
"loss": 1.2688,
"step": 1100
},
{
"epoch": 0.17087931648273408,
"grad_norm": 1.953125,
"learning_rate": 1.9695909448395515e-05,
"loss": 1.2725,
"step": 1110
},
{
"epoch": 0.1724187697843803,
"grad_norm": 1.671875,
"learning_rate": 1.9682615550874458e-05,
"loss": 1.2741,
"step": 1120
},
{
"epoch": 0.17395822308602657,
"grad_norm": 1.9296875,
"learning_rate": 1.966904193410195e-05,
"loss": 1.272,
"step": 1130
},
{
"epoch": 0.17549767638767283,
"grad_norm": 1.9140625,
"learning_rate": 1.9655188990203648e-05,
"loss": 1.2651,
"step": 1140
},
{
"epoch": 0.1770371296893191,
"grad_norm": 1.8203125,
"learning_rate": 1.9641057119374623e-05,
"loss": 1.2183,
"step": 1150
},
{
"epoch": 0.17857658299096532,
"grad_norm": 1.7265625,
"learning_rate": 1.962664672986783e-05,
"loss": 1.2264,
"step": 1160
},
{
"epoch": 0.18011603629261158,
"grad_norm": 1.78125,
"learning_rate": 1.9611958237982285e-05,
"loss": 1.2697,
"step": 1170
},
{
"epoch": 0.18165548959425784,
"grad_norm": 1.875,
"learning_rate": 1.9596992068051054e-05,
"loss": 1.2831,
"step": 1180
},
{
"epoch": 0.1831949428959041,
"grad_norm": 1.71875,
"learning_rate": 1.9581748652428995e-05,
"loss": 1.2868,
"step": 1190
},
{
"epoch": 0.18473439619755033,
"grad_norm": 1.796875,
"learning_rate": 1.9566228431480256e-05,
"loss": 1.2707,
"step": 1200
},
{
"epoch": 0.1862738494991966,
"grad_norm": 1.859375,
"learning_rate": 1.9550431853565577e-05,
"loss": 1.2572,
"step": 1210
},
{
"epoch": 0.18781330280084285,
"grad_norm": 2.0625,
"learning_rate": 1.9534359375029308e-05,
"loss": 1.2409,
"step": 1220
},
{
"epoch": 0.1893527561024891,
"grad_norm": 2.0,
"learning_rate": 1.9518011460186246e-05,
"loss": 1.2489,
"step": 1230
},
{
"epoch": 0.19089220940413534,
"grad_norm": 2.1875,
"learning_rate": 1.9501388581308215e-05,
"loss": 1.2392,
"step": 1240
},
{
"epoch": 0.1924316627057816,
"grad_norm": 1.859375,
"learning_rate": 1.9484491218610423e-05,
"loss": 1.2299,
"step": 1250
},
{
"epoch": 0.19397111600742786,
"grad_norm": 1.8515625,
"learning_rate": 1.946731986023759e-05,
"loss": 1.2723,
"step": 1260
},
{
"epoch": 0.19551056930907412,
"grad_norm": 1.7265625,
"learning_rate": 1.9449875002249835e-05,
"loss": 1.264,
"step": 1270
},
{
"epoch": 0.19705002261072035,
"grad_norm": 1.796875,
"learning_rate": 1.943215714860838e-05,
"loss": 1.2613,
"step": 1280
},
{
"epoch": 0.19858947591236661,
"grad_norm": 1.8046875,
"learning_rate": 1.941416681116094e-05,
"loss": 1.2534,
"step": 1290
},
{
"epoch": 0.20012892921401287,
"grad_norm": 1.9140625,
"learning_rate": 1.939590450962698e-05,
"loss": 1.2561,
"step": 1300
},
{
"epoch": 0.20166838251565913,
"grad_norm": 1.703125,
"learning_rate": 1.9377370771582677e-05,
"loss": 1.2618,
"step": 1310
},
{
"epoch": 0.20320783581730537,
"grad_norm": 1.6953125,
"learning_rate": 1.935856613244569e-05,
"loss": 1.2488,
"step": 1320
},
{
"epoch": 0.20474728911895163,
"grad_norm": 1.7578125,
"learning_rate": 1.9339491135459683e-05,
"loss": 1.2258,
"step": 1330
},
{
"epoch": 0.20628674242059789,
"grad_norm": 1.8125,
"learning_rate": 1.932014633167864e-05,
"loss": 1.2749,
"step": 1340
},
{
"epoch": 0.20782619572224414,
"grad_norm": 1.7734375,
"learning_rate": 1.930053227995095e-05,
"loss": 1.1903,
"step": 1350
},
{
"epoch": 0.2093656490238904,
"grad_norm": 1.703125,
"learning_rate": 1.928064954690324e-05,
"loss": 1.3004,
"step": 1360
},
{
"epoch": 0.21090510232553664,
"grad_norm": 1.6328125,
"learning_rate": 1.9260498706924036e-05,
"loss": 1.2255,
"step": 1370
},
{
"epoch": 0.2124445556271829,
"grad_norm": 1.7578125,
"learning_rate": 1.924008034214714e-05,
"loss": 1.2626,
"step": 1380
},
{
"epoch": 0.21398400892882916,
"grad_norm": 1.75,
"learning_rate": 1.921939504243484e-05,
"loss": 1.2563,
"step": 1390
},
{
"epoch": 0.21552346223047542,
"grad_norm": 1.828125,
"learning_rate": 1.919844340536085e-05,
"loss": 1.2406,
"step": 1400
},
{
"epoch": 0.21706291553212165,
"grad_norm": 1.703125,
"learning_rate": 1.9177226036193058e-05,
"loss": 1.2178,
"step": 1410
},
{
"epoch": 0.2186023688337679,
"grad_norm": 2.09375,
"learning_rate": 1.9155743547876026e-05,
"loss": 1.2385,
"step": 1420
},
{
"epoch": 0.22014182213541417,
"grad_norm": 1.6484375,
"learning_rate": 1.9133996561013305e-05,
"loss": 1.2643,
"step": 1430
},
{
"epoch": 0.22168127543706043,
"grad_norm": 1.8671875,
"learning_rate": 1.9111985703849493e-05,
"loss": 1.2659,
"step": 1440
},
{
"epoch": 0.22322072873870666,
"grad_norm": 1.78125,
"learning_rate": 1.9089711612252082e-05,
"loss": 1.2504,
"step": 1450
},
{
"epoch": 0.22476018204035292,
"grad_norm": 1.640625,
"learning_rate": 1.9067174929693095e-05,
"loss": 1.2148,
"step": 1460
},
{
"epoch": 0.22629963534199918,
"grad_norm": 1.71875,
"learning_rate": 1.9044376307230496e-05,
"loss": 1.3097,
"step": 1470
},
{
"epoch": 0.22783908864364544,
"grad_norm": 1.8515625,
"learning_rate": 1.9021316403489387e-05,
"loss": 1.2139,
"step": 1480
},
{
"epoch": 0.22937854194529167,
"grad_norm": 1.6953125,
"learning_rate": 1.8997995884642968e-05,
"loss": 1.2255,
"step": 1490
},
{
"epoch": 0.23091799524693793,
"grad_norm": 1.8515625,
"learning_rate": 1.8974415424393302e-05,
"loss": 1.2577,
"step": 1500
},
{
"epoch": 0.2324574485485842,
"grad_norm": 1.8125,
"learning_rate": 1.895057570395185e-05,
"loss": 1.2222,
"step": 1510
},
{
"epoch": 0.23399690185023045,
"grad_norm": 1.921875,
"learning_rate": 1.8926477412019793e-05,
"loss": 1.2298,
"step": 1520
},
{
"epoch": 0.23553635515187668,
"grad_norm": 1.78125,
"learning_rate": 1.8902121244768135e-05,
"loss": 1.2368,
"step": 1530
},
{
"epoch": 0.23707580845352294,
"grad_norm": 1.734375,
"learning_rate": 1.887750790581759e-05,
"loss": 1.2801,
"step": 1540
},
{
"epoch": 0.2386152617551692,
"grad_norm": 1.7109375,
"learning_rate": 1.8852638106218254e-05,
"loss": 1.222,
"step": 1550
},
{
"epoch": 0.24015471505681546,
"grad_norm": 1.7734375,
"learning_rate": 1.8827512564429076e-05,
"loss": 1.2335,
"step": 1560
},
{
"epoch": 0.2416941683584617,
"grad_norm": 1.6640625,
"learning_rate": 1.8802132006297082e-05,
"loss": 1.2756,
"step": 1570
},
{
"epoch": 0.24323362166010795,
"grad_norm": 1.7265625,
"learning_rate": 1.8776497165036415e-05,
"loss": 1.2417,
"step": 1580
},
{
"epoch": 0.2447730749617542,
"grad_norm": 1.65625,
"learning_rate": 1.8750608781207172e-05,
"loss": 1.2156,
"step": 1590
},
{
"epoch": 0.24631252826340047,
"grad_norm": 1.703125,
"learning_rate": 1.8724467602693978e-05,
"loss": 1.2686,
"step": 1600
},
{
"epoch": 0.2478519815650467,
"grad_norm": 1.8125,
"learning_rate": 1.8698074384684403e-05,
"loss": 1.2667,
"step": 1610
},
{
"epoch": 0.24939143486669296,
"grad_norm": 1.75,
"learning_rate": 1.867142988964713e-05,
"loss": 1.2481,
"step": 1620
},
{
"epoch": 0.2509308881683392,
"grad_norm": 1.8671875,
"learning_rate": 1.8644534887309947e-05,
"loss": 1.1874,
"step": 1630
},
{
"epoch": 0.2524703414699855,
"grad_norm": 1.796875,
"learning_rate": 1.8617390154637494e-05,
"loss": 1.2225,
"step": 1640
},
{
"epoch": 0.2540097947716317,
"grad_norm": 1.7265625,
"learning_rate": 1.858999647580883e-05,
"loss": 1.2612,
"step": 1650
},
{
"epoch": 0.255549248073278,
"grad_norm": 1.84375,
"learning_rate": 1.856235464219476e-05,
"loss": 1.2158,
"step": 1660
},
{
"epoch": 0.25708870137492423,
"grad_norm": 1.8203125,
"learning_rate": 1.8534465452335e-05,
"loss": 1.2551,
"step": 1670
},
{
"epoch": 0.25862815467657047,
"grad_norm": 1.734375,
"learning_rate": 1.850632971191508e-05,
"loss": 1.2419,
"step": 1680
},
{
"epoch": 0.26016760797821675,
"grad_norm": 1.9453125,
"learning_rate": 1.8477948233743098e-05,
"loss": 1.2247,
"step": 1690
},
{
"epoch": 0.261707061279863,
"grad_norm": 1.7265625,
"learning_rate": 1.8449321837726207e-05,
"loss": 1.227,
"step": 1700
},
{
"epoch": 0.2632465145815092,
"grad_norm": 1.7578125,
"learning_rate": 1.842045135084696e-05,
"loss": 1.2355,
"step": 1710
},
{
"epoch": 0.2647859678831555,
"grad_norm": 1.6484375,
"learning_rate": 1.8391337607139396e-05,
"loss": 1.2215,
"step": 1720
},
{
"epoch": 0.26632542118480174,
"grad_norm": 1.6875,
"learning_rate": 1.8361981447664955e-05,
"loss": 1.2323,
"step": 1730
},
{
"epoch": 0.267864874486448,
"grad_norm": 1.8125,
"learning_rate": 1.8332383720488188e-05,
"loss": 1.2376,
"step": 1740
},
{
"epoch": 0.26940432778809426,
"grad_norm": 1.765625,
"learning_rate": 1.8302545280652234e-05,
"loss": 1.2654,
"step": 1750
},
{
"epoch": 0.2709437810897405,
"grad_norm": 1.8984375,
"learning_rate": 1.8272466990154155e-05,
"loss": 1.242,
"step": 1760
},
{
"epoch": 0.2724832343913868,
"grad_norm": 1.8125,
"learning_rate": 1.8242149717919993e-05,
"loss": 1.208,
"step": 1770
},
{
"epoch": 0.274022687693033,
"grad_norm": 1.609375,
"learning_rate": 1.8211594339779712e-05,
"loss": 1.2327,
"step": 1780
},
{
"epoch": 0.27556214099467924,
"grad_norm": 1.7265625,
"learning_rate": 1.8180801738441843e-05,
"loss": 1.275,
"step": 1790
},
{
"epoch": 0.2771015942963255,
"grad_norm": 1.7421875,
"learning_rate": 1.814977280346804e-05,
"loss": 1.2041,
"step": 1800
},
{
"epoch": 0.27864104759797176,
"grad_norm": 1.921875,
"learning_rate": 1.8118508431247338e-05,
"loss": 1.2476,
"step": 1810
},
{
"epoch": 0.28018050089961805,
"grad_norm": 1.7734375,
"learning_rate": 1.808700952497028e-05,
"loss": 1.2273,
"step": 1820
},
{
"epoch": 0.2817199542012643,
"grad_norm": 1.8984375,
"learning_rate": 1.8055276994602825e-05,
"loss": 1.2121,
"step": 1830
},
{
"epoch": 0.2832594075029105,
"grad_norm": 1.8984375,
"learning_rate": 1.8023311756860036e-05,
"loss": 1.2607,
"step": 1840
},
{
"epoch": 0.2847988608045568,
"grad_norm": 1.6171875,
"learning_rate": 1.799111473517964e-05,
"loss": 1.2393,
"step": 1850
},
{
"epoch": 0.28633831410620303,
"grad_norm": 1.7890625,
"learning_rate": 1.7958686859695307e-05,
"loss": 1.2149,
"step": 1860
},
{
"epoch": 0.28787776740784926,
"grad_norm": 1.7109375,
"learning_rate": 1.7926029067209808e-05,
"loss": 1.237,
"step": 1870
},
{
"epoch": 0.28941722070949555,
"grad_norm": 1.6328125,
"learning_rate": 1.789314230116794e-05,
"loss": 1.2456,
"step": 1880
},
{
"epoch": 0.2909566740111418,
"grad_norm": 1.8203125,
"learning_rate": 1.786002751162927e-05,
"loss": 1.2625,
"step": 1890
},
{
"epoch": 0.29249612731278807,
"grad_norm": 1.7734375,
"learning_rate": 1.7826685655240702e-05,
"loss": 1.2202,
"step": 1900
},
{
"epoch": 0.2940355806144343,
"grad_norm": 1.71875,
"learning_rate": 1.7793117695208828e-05,
"loss": 1.2714,
"step": 1910
},
{
"epoch": 0.29557503391608053,
"grad_norm": 1.765625,
"learning_rate": 1.7759324601272098e-05,
"loss": 1.2527,
"step": 1920
},
{
"epoch": 0.2971144872177268,
"grad_norm": 1.671875,
"learning_rate": 1.7725307349672826e-05,
"loss": 1.247,
"step": 1930
},
{
"epoch": 0.29865394051937305,
"grad_norm": 1.546875,
"learning_rate": 1.7691066923128962e-05,
"loss": 1.2389,
"step": 1940
},
{
"epoch": 0.3001933938210193,
"grad_norm": 1.8671875,
"learning_rate": 1.765660431080572e-05,
"loss": 1.2449,
"step": 1950
},
{
"epoch": 0.30173284712266557,
"grad_norm": 1.8515625,
"learning_rate": 1.7621920508287e-05,
"loss": 1.2037,
"step": 1960
},
{
"epoch": 0.3032723004243118,
"grad_norm": 1.75,
"learning_rate": 1.7587016517546614e-05,
"loss": 1.2367,
"step": 1970
},
{
"epoch": 0.3048117537259581,
"grad_norm": 1.734375,
"learning_rate": 1.7551893346919363e-05,
"loss": 1.2234,
"step": 1980
},
{
"epoch": 0.3063512070276043,
"grad_norm": 1.703125,
"learning_rate": 1.751655201107189e-05,
"loss": 1.2423,
"step": 1990
},
{
"epoch": 0.30789066032925055,
"grad_norm": 1.75,
"learning_rate": 1.748099353097336e-05,
"loss": 1.2393,
"step": 2000
},
{
"epoch": 0.30943011363089684,
"grad_norm": 1.6484375,
"learning_rate": 1.7445218933865994e-05,
"loss": 1.2416,
"step": 2010
},
{
"epoch": 0.3109695669325431,
"grad_norm": 1.703125,
"learning_rate": 1.7409229253235365e-05,
"loss": 1.2058,
"step": 2020
},
{
"epoch": 0.31250902023418936,
"grad_norm": 1.7421875,
"learning_rate": 1.7373025528780568e-05,
"loss": 1.1996,
"step": 2030
},
{
"epoch": 0.3140484735358356,
"grad_norm": 1.765625,
"learning_rate": 1.733660880638415e-05,
"loss": 1.229,
"step": 2040
},
{
"epoch": 0.3155879268374818,
"grad_norm": 1.765625,
"learning_rate": 1.729998013808192e-05,
"loss": 1.2154,
"step": 2050
},
{
"epoch": 0.3171273801391281,
"grad_norm": 1.703125,
"learning_rate": 1.726314058203257e-05,
"loss": 1.2293,
"step": 2060
},
{
"epoch": 0.31866683344077434,
"grad_norm": 1.796875,
"learning_rate": 1.7226091202487068e-05,
"loss": 1.242,
"step": 2070
},
{
"epoch": 0.3202062867424206,
"grad_norm": 1.7421875,
"learning_rate": 1.718883306975794e-05,
"loss": 1.2323,
"step": 2080
},
{
"epoch": 0.32174574004406686,
"grad_norm": 1.78125,
"learning_rate": 1.7151367260188348e-05,
"loss": 1.1962,
"step": 2090
},
{
"epoch": 0.3232851933457131,
"grad_norm": 5.5625,
"learning_rate": 1.7113694856120983e-05,
"loss": 1.2355,
"step": 2100
},
{
"epoch": 0.3248246466473594,
"grad_norm": 1.6328125,
"learning_rate": 1.7075816945866805e-05,
"loss": 1.2173,
"step": 2110
},
{
"epoch": 0.3263640999490056,
"grad_norm": 1.71875,
"learning_rate": 1.7037734623673616e-05,
"loss": 1.2464,
"step": 2120
},
{
"epoch": 0.32790355325065185,
"grad_norm": 1.765625,
"learning_rate": 1.6999448989694424e-05,
"loss": 1.2087,
"step": 2130
},
{
"epoch": 0.32944300655229813,
"grad_norm": 1.75,
"learning_rate": 1.6960961149955676e-05,
"loss": 1.2423,
"step": 2140
},
{
"epoch": 0.33098245985394437,
"grad_norm": 2.40625,
"learning_rate": 1.6922272216325302e-05,
"loss": 1.2444,
"step": 2150
},
{
"epoch": 0.3325219131555906,
"grad_norm": 2.03125,
"learning_rate": 1.68833833064806e-05,
"loss": 1.2362,
"step": 2160
},
{
"epoch": 0.3340613664572369,
"grad_norm": 1.8515625,
"learning_rate": 1.6844295543875936e-05,
"loss": 1.2213,
"step": 2170
},
{
"epoch": 0.3356008197588831,
"grad_norm": 1.765625,
"learning_rate": 1.6805010057710306e-05,
"loss": 1.2586,
"step": 2180
},
{
"epoch": 0.3371402730605294,
"grad_norm": 1.734375,
"learning_rate": 1.676552798289469e-05,
"loss": 1.2177,
"step": 2190
},
{
"epoch": 0.33867972636217564,
"grad_norm": 1.7421875,
"learning_rate": 1.6725850460019293e-05,
"loss": 1.2562,
"step": 2200
},
{
"epoch": 0.34021917966382187,
"grad_norm": 1.625,
"learning_rate": 1.6685978635320578e-05,
"loss": 1.2212,
"step": 2210
},
{
"epoch": 0.34175863296546816,
"grad_norm": 2.03125,
"learning_rate": 1.6645913660648153e-05,
"loss": 1.2413,
"step": 2220
},
{
"epoch": 0.3432980862671144,
"grad_norm": 1.8671875,
"learning_rate": 1.6605656693431504e-05,
"loss": 1.2449,
"step": 2230
},
{
"epoch": 0.3448375395687606,
"grad_norm": 1.671875,
"learning_rate": 1.6565208896646554e-05,
"loss": 1.2832,
"step": 2240
},
{
"epoch": 0.3463769928704069,
"grad_norm": 1.6640625,
"learning_rate": 1.652457143878206e-05,
"loss": 1.2153,
"step": 2250
},
{
"epoch": 0.34791644617205314,
"grad_norm": 1.8203125,
"learning_rate": 1.648374549380587e-05,
"loss": 1.2656,
"step": 2260
},
{
"epoch": 0.3494558994736994,
"grad_norm": 1.6640625,
"learning_rate": 1.6442732241130987e-05,
"loss": 1.2398,
"step": 2270
},
{
"epoch": 0.35099535277534566,
"grad_norm": 1.859375,
"learning_rate": 1.640153286558153e-05,
"loss": 1.2344,
"step": 2280
},
{
"epoch": 0.3525348060769919,
"grad_norm": 1.671875,
"learning_rate": 1.6360148557358468e-05,
"loss": 1.2415,
"step": 2290
},
{
"epoch": 0.3540742593786382,
"grad_norm": 1.8515625,
"learning_rate": 1.6318580512005266e-05,
"loss": 1.2392,
"step": 2300
},
{
"epoch": 0.3556137126802844,
"grad_norm": 1.71875,
"learning_rate": 1.6276829930373335e-05,
"loss": 1.2433,
"step": 2310
},
{
"epoch": 0.35715316598193064,
"grad_norm": 1.765625,
"learning_rate": 1.6234898018587336e-05,
"loss": 1.2768,
"step": 2320
},
{
"epoch": 0.35869261928357693,
"grad_norm": 1.734375,
"learning_rate": 1.6192785988010354e-05,
"loss": 1.2245,
"step": 2330
},
{
"epoch": 0.36023207258522316,
"grad_norm": 1.7578125,
"learning_rate": 1.6150495055208876e-05,
"loss": 1.2241,
"step": 2340
},
{
"epoch": 0.36177152588686945,
"grad_norm": 1.8125,
"learning_rate": 1.610802644191768e-05,
"loss": 1.2334,
"step": 2350
},
{
"epoch": 0.3633109791885157,
"grad_norm": 1.75,
"learning_rate": 1.60653813750045e-05,
"loss": 1.2278,
"step": 2360
},
{
"epoch": 0.3648504324901619,
"grad_norm": 1.75,
"learning_rate": 1.6022561086434628e-05,
"loss": 1.2763,
"step": 2370
},
{
"epoch": 0.3663898857918082,
"grad_norm": 1.796875,
"learning_rate": 1.5979566813235288e-05,
"loss": 1.2296,
"step": 2380
},
{
"epoch": 0.36792933909345443,
"grad_norm": 1.6484375,
"learning_rate": 1.5936399797459916e-05,
"loss": 1.201,
"step": 2390
},
{
"epoch": 0.36946879239510066,
"grad_norm": 1.703125,
"learning_rate": 1.5893061286152275e-05,
"loss": 1.2525,
"step": 2400
},
{
"epoch": 0.37100824569674695,
"grad_norm": 1.765625,
"learning_rate": 1.5849552531310432e-05,
"loss": 1.217,
"step": 2410
},
{
"epoch": 0.3725476989983932,
"grad_norm": 1.6015625,
"learning_rate": 1.5805874789850586e-05,
"loss": 1.2368,
"step": 2420
},
{
"epoch": 0.37408715230003947,
"grad_norm": 1.78125,
"learning_rate": 1.576202932357076e-05,
"loss": 1.2079,
"step": 2430
},
{
"epoch": 0.3756266056016857,
"grad_norm": 1.9453125,
"learning_rate": 1.571801739911434e-05,
"loss": 1.2388,
"step": 2440
},
{
"epoch": 0.37716605890333194,
"grad_norm": 1.796875,
"learning_rate": 1.5673840287933504e-05,
"loss": 1.2795,
"step": 2450
},
{
"epoch": 0.3787055122049782,
"grad_norm": 1.7890625,
"learning_rate": 1.562949926625247e-05,
"loss": 1.2192,
"step": 2460
},
{
"epoch": 0.38024496550662445,
"grad_norm": 1.734375,
"learning_rate": 1.5584995615030634e-05,
"loss": 1.239,
"step": 2470
},
{
"epoch": 0.3817844188082707,
"grad_norm": 1.7265625,
"learning_rate": 1.554033061992557e-05,
"loss": 1.2019,
"step": 2480
},
{
"epoch": 0.383323872109917,
"grad_norm": 1.5703125,
"learning_rate": 1.5495505571255884e-05,
"loss": 1.2104,
"step": 2490
},
{
"epoch": 0.3848633254115632,
"grad_norm": 1.8671875,
"learning_rate": 1.5450521763963943e-05,
"loss": 1.2426,
"step": 2500
},
{
"epoch": 0.3864027787132095,
"grad_norm": 1.8203125,
"learning_rate": 1.5405380497578445e-05,
"loss": 1.2542,
"step": 2510
},
{
"epoch": 0.3879422320148557,
"grad_norm": 1.7578125,
"learning_rate": 1.536008307617692e-05,
"loss": 1.2149,
"step": 2520
},
{
"epoch": 0.38948168531650196,
"grad_norm": 1.7578125,
"learning_rate": 1.5314630808348013e-05,
"loss": 1.2396,
"step": 2530
},
{
"epoch": 0.39102113861814825,
"grad_norm": 1.765625,
"learning_rate": 1.52690250071537e-05,
"loss": 1.2132,
"step": 2540
},
{
"epoch": 0.3925605919197945,
"grad_norm": 1.84375,
"learning_rate": 1.522326699009136e-05,
"loss": 1.2143,
"step": 2550
},
{
"epoch": 0.3941000452214407,
"grad_norm": 1.8984375,
"learning_rate": 1.5177358079055697e-05,
"loss": 1.2405,
"step": 2560
},
{
"epoch": 0.395639498523087,
"grad_norm": 1.6953125,
"learning_rate": 1.513129960030058e-05,
"loss": 1.2648,
"step": 2570
},
{
"epoch": 0.39717895182473323,
"grad_norm": 1.703125,
"learning_rate": 1.5085092884400689e-05,
"loss": 1.2312,
"step": 2580
},
{
"epoch": 0.3987184051263795,
"grad_norm": 1.765625,
"learning_rate": 1.5038739266213118e-05,
"loss": 1.2176,
"step": 2590
},
{
"epoch": 0.40025785842802575,
"grad_norm": 1.984375,
"learning_rate": 1.4992240084838786e-05,
"loss": 1.2382,
"step": 2600
},
{
"epoch": 0.401797311729672,
"grad_norm": 1.734375,
"learning_rate": 1.4945596683583754e-05,
"loss": 1.2135,
"step": 2610
},
{
"epoch": 0.40333676503131827,
"grad_norm": 1.796875,
"learning_rate": 1.4898810409920432e-05,
"loss": 1.2252,
"step": 2620
},
{
"epoch": 0.4048762183329645,
"grad_norm": 1.7109375,
"learning_rate": 1.485188261544864e-05,
"loss": 1.2436,
"step": 2630
},
{
"epoch": 0.40641567163461073,
"grad_norm": 1.8046875,
"learning_rate": 1.480481465585657e-05,
"loss": 1.2567,
"step": 2640
},
{
"epoch": 0.407955124936257,
"grad_norm": 1.7109375,
"learning_rate": 1.4757607890881615e-05,
"loss": 1.2577,
"step": 2650
},
{
"epoch": 0.40949457823790325,
"grad_norm": 1.796875,
"learning_rate": 1.4710263684271087e-05,
"loss": 1.2255,
"step": 2660
},
{
"epoch": 0.41103403153954954,
"grad_norm": 1.75,
"learning_rate": 1.4662783403742826e-05,
"loss": 1.2305,
"step": 2670
},
{
"epoch": 0.41257348484119577,
"grad_norm": 1.71875,
"learning_rate": 1.4615168420945689e-05,
"loss": 1.2841,
"step": 2680
},
{
"epoch": 0.414112938142842,
"grad_norm": 1.828125,
"learning_rate": 1.4567420111419919e-05,
"loss": 1.216,
"step": 2690
},
{
"epoch": 0.4156523914444883,
"grad_norm": 1.9609375,
"learning_rate": 1.4519539854557412e-05,
"loss": 1.1817,
"step": 2700
},
{
"epoch": 0.4171918447461345,
"grad_norm": 1.65625,
"learning_rate": 1.4471529033561856e-05,
"loss": 1.2346,
"step": 2710
},
{
"epoch": 0.4187312980477808,
"grad_norm": 1.640625,
"learning_rate": 1.442338903540879e-05,
"loss": 1.2352,
"step": 2720
},
{
"epoch": 0.42027075134942704,
"grad_norm": 1.8359375,
"learning_rate": 1.4375121250805532e-05,
"loss": 1.2227,
"step": 2730
},
{
"epoch": 0.4218102046510733,
"grad_norm": 1.671875,
"learning_rate": 1.432672707415099e-05,
"loss": 1.1777,
"step": 2740
},
{
"epoch": 0.42334965795271956,
"grad_norm": 1.8984375,
"learning_rate": 1.4278207903495388e-05,
"loss": 1.2389,
"step": 2750
},
{
"epoch": 0.4248891112543658,
"grad_norm": 1.796875,
"learning_rate": 1.4229565140499885e-05,
"loss": 1.1987,
"step": 2760
},
{
"epoch": 0.426428564556012,
"grad_norm": 1.703125,
"learning_rate": 1.418080019039607e-05,
"loss": 1.2211,
"step": 2770
},
{
"epoch": 0.4279680178576583,
"grad_norm": 1.765625,
"learning_rate": 1.4131914461945377e-05,
"loss": 1.2202,
"step": 2780
},
{
"epoch": 0.42950747115930454,
"grad_norm": 1.796875,
"learning_rate": 1.408290936739838e-05,
"loss": 1.2389,
"step": 2790
},
{
"epoch": 0.43104692446095083,
"grad_norm": 1.7578125,
"learning_rate": 1.4033786322453989e-05,
"loss": 1.2537,
"step": 2800
},
{
"epoch": 0.43258637776259706,
"grad_norm": 1.734375,
"learning_rate": 1.3984546746218576e-05,
"loss": 1.2682,
"step": 2810
},
{
"epoch": 0.4341258310642433,
"grad_norm": 1.859375,
"learning_rate": 1.3935192061164957e-05,
"loss": 1.2386,
"step": 2820
},
{
"epoch": 0.4356652843658896,
"grad_norm": 1.5625,
"learning_rate": 1.3885723693091304e-05,
"loss": 1.2681,
"step": 2830
},
{
"epoch": 0.4372047376675358,
"grad_norm": 1.7578125,
"learning_rate": 1.3836143071079954e-05,
"loss": 1.2176,
"step": 2840
},
{
"epoch": 0.43874419096918205,
"grad_norm": 1.8828125,
"learning_rate": 1.3786451627456134e-05,
"loss": 1.2285,
"step": 2850
},
{
"epoch": 0.44028364427082833,
"grad_norm": 1.6015625,
"learning_rate": 1.3736650797746568e-05,
"loss": 1.2128,
"step": 2860
},
{
"epoch": 0.44182309757247457,
"grad_norm": 1.9453125,
"learning_rate": 1.3686742020638023e-05,
"loss": 1.2559,
"step": 2870
},
{
"epoch": 0.44336255087412085,
"grad_norm": 1.6875,
"learning_rate": 1.3636726737935724e-05,
"loss": 1.2586,
"step": 2880
},
{
"epoch": 0.4449020041757671,
"grad_norm": 1.953125,
"learning_rate": 1.3586606394521729e-05,
"loss": 1.2085,
"step": 2890
},
{
"epoch": 0.4464414574774133,
"grad_norm": 1.5703125,
"learning_rate": 1.353638243831317e-05,
"loss": 1.244,
"step": 2900
},
{
"epoch": 0.4479809107790596,
"grad_norm": 1.8203125,
"learning_rate": 1.348605632022043e-05,
"loss": 1.2222,
"step": 2910
},
{
"epoch": 0.44952036408070584,
"grad_norm": 1.6953125,
"learning_rate": 1.3435629494105226e-05,
"loss": 1.218,
"step": 2920
},
{
"epoch": 0.45105981738235207,
"grad_norm": 1.7890625,
"learning_rate": 1.3385103416738614e-05,
"loss": 1.2392,
"step": 2930
},
{
"epoch": 0.45259927068399836,
"grad_norm": 1.7421875,
"learning_rate": 1.3334479547758894e-05,
"loss": 1.2327,
"step": 2940
},
{
"epoch": 0.4541387239856446,
"grad_norm": 1.71875,
"learning_rate": 1.3283759349629457e-05,
"loss": 1.2302,
"step": 2950
},
{
"epoch": 0.4556781772872909,
"grad_norm": 1.6484375,
"learning_rate": 1.323294428759652e-05,
"loss": 1.217,
"step": 2960
},
{
"epoch": 0.4572176305889371,
"grad_norm": 1.6953125,
"learning_rate": 1.3182035829646818e-05,
"loss": 1.2292,
"step": 2970
},
{
"epoch": 0.45875708389058334,
"grad_norm": 1.6328125,
"learning_rate": 1.3131035446465165e-05,
"loss": 1.2211,
"step": 2980
},
{
"epoch": 0.4602965371922296,
"grad_norm": 1.8671875,
"learning_rate": 1.3079944611392005e-05,
"loss": 1.2049,
"step": 2990
},
{
"epoch": 0.46183599049387586,
"grad_norm": 1.8046875,
"learning_rate": 1.3028764800380818e-05,
"loss": 1.2315,
"step": 3000
},
{
"epoch": 0.4633754437955221,
"grad_norm": 1.625,
"learning_rate": 1.2977497491955493e-05,
"loss": 1.177,
"step": 3010
},
{
"epoch": 0.4649148970971684,
"grad_norm": 1.7109375,
"learning_rate": 1.292614416716762e-05,
"loss": 1.2158,
"step": 3020
},
{
"epoch": 0.4664543503988146,
"grad_norm": 1.78125,
"learning_rate": 1.2874706309553697e-05,
"loss": 1.2513,
"step": 3030
},
{
"epoch": 0.4679938037004609,
"grad_norm": 1.78125,
"learning_rate": 1.2823185405092274e-05,
"loss": 1.2859,
"step": 3040
},
{
"epoch": 0.46953325700210713,
"grad_norm": 1.78125,
"learning_rate": 1.277158294216103e-05,
"loss": 1.2705,
"step": 3050
},
{
"epoch": 0.47107271030375336,
"grad_norm": 1.703125,
"learning_rate": 1.2719900411493764e-05,
"loss": 1.2111,
"step": 3060
},
{
"epoch": 0.47261216360539965,
"grad_norm": 1.890625,
"learning_rate": 1.2668139306137343e-05,
"loss": 1.2126,
"step": 3070
},
{
"epoch": 0.4741516169070459,
"grad_norm": 1.8125,
"learning_rate": 1.261630112140856e-05,
"loss": 1.2502,
"step": 3080
},
{
"epoch": 0.4756910702086921,
"grad_norm": 1.7734375,
"learning_rate": 1.2564387354850949e-05,
"loss": 1.2279,
"step": 3090
},
{
"epoch": 0.4772305235103384,
"grad_norm": 1.875,
"learning_rate": 1.251239950619149e-05,
"loss": 1.2369,
"step": 3100
},
{
"epoch": 0.47876997681198463,
"grad_norm": 1.8984375,
"learning_rate": 1.2460339077297335e-05,
"loss": 1.2727,
"step": 3110
},
{
"epoch": 0.4803094301136309,
"grad_norm": 1.8515625,
"learning_rate": 1.2408207572132367e-05,
"loss": 1.2004,
"step": 3120
},
{
"epoch": 0.48184888341527715,
"grad_norm": 1.6796875,
"learning_rate": 1.2356006496713798e-05,
"loss": 1.2237,
"step": 3130
},
{
"epoch": 0.4833883367169234,
"grad_norm": 1.7421875,
"learning_rate": 1.2303737359068632e-05,
"loss": 1.1884,
"step": 3140
},
{
"epoch": 0.48492779001856967,
"grad_norm": 1.765625,
"learning_rate": 1.2251401669190115e-05,
"loss": 1.2038,
"step": 3150
},
{
"epoch": 0.4864672433202159,
"grad_norm": 1.7265625,
"learning_rate": 1.2199000938994101e-05,
"loss": 1.2383,
"step": 3160
},
{
"epoch": 0.48800669662186213,
"grad_norm": 1.734375,
"learning_rate": 1.2146536682275388e-05,
"loss": 1.2389,
"step": 3170
},
{
"epoch": 0.4895461499235084,
"grad_norm": 1.6171875,
"learning_rate": 1.2094010414663972e-05,
"loss": 1.2236,
"step": 3180
},
{
"epoch": 0.49108560322515465,
"grad_norm": 1.78125,
"learning_rate": 1.2041423653581284e-05,
"loss": 1.2085,
"step": 3190
},
{
"epoch": 0.49262505652680094,
"grad_norm": 1.7734375,
"learning_rate": 1.1988777918196324e-05,
"loss": 1.241,
"step": 3200
},
{
"epoch": 0.4941645098284472,
"grad_norm": 1.796875,
"learning_rate": 1.1936074729381795e-05,
"loss": 1.2456,
"step": 3210
},
{
"epoch": 0.4957039631300934,
"grad_norm": 1.75,
"learning_rate": 1.1883315609670163e-05,
"loss": 1.2387,
"step": 3220
},
{
"epoch": 0.4972434164317397,
"grad_norm": 1.75,
"learning_rate": 1.1830502083209674e-05,
"loss": 1.244,
"step": 3230
},
{
"epoch": 0.4987828697333859,
"grad_norm": 1.8046875,
"learning_rate": 1.1777635675720313e-05,
"loss": 1.2172,
"step": 3240
},
{
"epoch": 0.5003223230350322,
"grad_norm": 1.9609375,
"learning_rate": 1.1724717914449744e-05,
"loss": 1.2698,
"step": 3250
},
{
"epoch": 0.5018617763366784,
"grad_norm": 1.6171875,
"learning_rate": 1.1671750328129168e-05,
"loss": 1.2265,
"step": 3260
},
{
"epoch": 0.5034012296383247,
"grad_norm": 1.78125,
"learning_rate": 1.161873444692918e-05,
"loss": 1.194,
"step": 3270
},
{
"epoch": 0.504940682939971,
"grad_norm": 1.6796875,
"learning_rate": 1.1565671802415564e-05,
"loss": 1.2437,
"step": 3280
},
{
"epoch": 0.5064801362416171,
"grad_norm": 1.6875,
"learning_rate": 1.1512563927505025e-05,
"loss": 1.1946,
"step": 3290
},
{
"epoch": 0.5080195895432634,
"grad_norm": 1.75,
"learning_rate": 1.1459412356420936e-05,
"loss": 1.2534,
"step": 3300
},
{
"epoch": 0.5095590428449097,
"grad_norm": 1.9140625,
"learning_rate": 1.1406218624648986e-05,
"loss": 1.2192,
"step": 3310
},
{
"epoch": 0.511098496146556,
"grad_norm": 1.515625,
"learning_rate": 1.1352984268892844e-05,
"loss": 1.1832,
"step": 3320
},
{
"epoch": 0.5126379494482022,
"grad_norm": 1.953125,
"learning_rate": 1.129971082702976e-05,
"loss": 1.2234,
"step": 3330
},
{
"epoch": 0.5141774027498485,
"grad_norm": 1.7109375,
"learning_rate": 1.124639983806613e-05,
"loss": 1.2296,
"step": 3340
},
{
"epoch": 0.5157168560514948,
"grad_norm": 1.953125,
"learning_rate": 1.1193052842093043e-05,
"loss": 1.2196,
"step": 3350
},
{
"epoch": 0.5172563093531409,
"grad_norm": 1.75,
"learning_rate": 1.1139671380241786e-05,
"loss": 1.2308,
"step": 3360
},
{
"epoch": 0.5187957626547872,
"grad_norm": 1.7890625,
"learning_rate": 1.1086256994639328e-05,
"loss": 1.217,
"step": 3370
},
{
"epoch": 0.5203352159564335,
"grad_norm": 1.8203125,
"learning_rate": 1.1032811228363766e-05,
"loss": 1.2209,
"step": 3380
},
{
"epoch": 0.5218746692580797,
"grad_norm": 1.71875,
"learning_rate": 1.0979335625399739e-05,
"loss": 1.2224,
"step": 3390
},
{
"epoch": 0.523414122559726,
"grad_norm": 1.6484375,
"learning_rate": 1.0925831730593843e-05,
"loss": 1.1895,
"step": 3400
},
{
"epoch": 0.5249535758613723,
"grad_norm": 1.8515625,
"learning_rate": 1.087230108960999e-05,
"loss": 1.246,
"step": 3410
},
{
"epoch": 0.5264930291630184,
"grad_norm": 1.703125,
"learning_rate": 1.0818745248884746e-05,
"loss": 1.2262,
"step": 3420
},
{
"epoch": 0.5280324824646647,
"grad_norm": 1.765625,
"learning_rate": 1.076516575558268e-05,
"loss": 1.2029,
"step": 3430
},
{
"epoch": 0.529571935766311,
"grad_norm": 1.640625,
"learning_rate": 1.0711564157551654e-05,
"loss": 1.1688,
"step": 3440
},
{
"epoch": 0.5311113890679573,
"grad_norm": 1.671875,
"learning_rate": 1.0657942003278107e-05,
"loss": 1.211,
"step": 3450
},
{
"epoch": 0.5326508423696035,
"grad_norm": 1.625,
"learning_rate": 1.0604300841842324e-05,
"loss": 1.2085,
"step": 3460
},
{
"epoch": 0.5341902956712498,
"grad_norm": 1.703125,
"learning_rate": 1.0550642222873681e-05,
"loss": 1.2051,
"step": 3470
},
{
"epoch": 0.535729748972896,
"grad_norm": 1.6875,
"learning_rate": 1.0496967696505888e-05,
"loss": 1.1968,
"step": 3480
},
{
"epoch": 0.5372692022745422,
"grad_norm": 1.71875,
"learning_rate": 1.0443278813332197e-05,
"loss": 1.2471,
"step": 3490
},
{
"epoch": 0.5388086555761885,
"grad_norm": 1.6484375,
"learning_rate": 1.038957712436061e-05,
"loss": 1.2299,
"step": 3500
},
{
"epoch": 0.5403481088778348,
"grad_norm": 1.859375,
"learning_rate": 1.0335864180969072e-05,
"loss": 1.2289,
"step": 3510
},
{
"epoch": 0.541887562179481,
"grad_norm": 1.8203125,
"learning_rate": 1.0282141534860662e-05,
"loss": 1.2811,
"step": 3520
},
{
"epoch": 0.5434270154811273,
"grad_norm": 1.765625,
"learning_rate": 1.022841073801875e-05,
"loss": 1.2566,
"step": 3530
},
{
"epoch": 0.5449664687827735,
"grad_norm": 1.78125,
"learning_rate": 1.0174673342662182e-05,
"loss": 1.2111,
"step": 3540
},
{
"epoch": 0.5465059220844197,
"grad_norm": 1.7109375,
"learning_rate": 1.0120930901200415e-05,
"loss": 1.1946,
"step": 3550
},
{
"epoch": 0.548045375386066,
"grad_norm": 1.6875,
"learning_rate": 1.0067184966188687e-05,
"loss": 1.209,
"step": 3560
},
{
"epoch": 0.5495848286877123,
"grad_norm": 1.7109375,
"learning_rate": 1.0013437090283164e-05,
"loss": 1.2233,
"step": 3570
},
{
"epoch": 0.5511242819893585,
"grad_norm": 1.8046875,
"learning_rate": 9.959688826196078e-06,
"loss": 1.2574,
"step": 3580
},
{
"epoch": 0.5526637352910048,
"grad_norm": 1.59375,
"learning_rate": 9.90594172665088e-06,
"loss": 1.2208,
"step": 3590
},
{
"epoch": 0.554203188592651,
"grad_norm": 1.765625,
"learning_rate": 9.852197344337366e-06,
"loss": 1.2289,
"step": 3600
},
{
"epoch": 0.5557426418942973,
"grad_norm": 1.6953125,
"learning_rate": 9.798457231866853e-06,
"loss": 1.2027,
"step": 3610
},
{
"epoch": 0.5572820951959435,
"grad_norm": 1.96875,
"learning_rate": 9.744722941727292e-06,
"loss": 1.2382,
"step": 3620
},
{
"epoch": 0.5588215484975898,
"grad_norm": 1.8203125,
"learning_rate": 9.690996026238443e-06,
"loss": 1.2074,
"step": 3630
},
{
"epoch": 0.5603610017992361,
"grad_norm": 1.6796875,
"learning_rate": 9.637278037507013e-06,
"loss": 1.2176,
"step": 3640
},
{
"epoch": 0.5619004551008823,
"grad_norm": 1.6015625,
"learning_rate": 9.583570527381838e-06,
"loss": 1.2196,
"step": 3650
},
{
"epoch": 0.5634399084025286,
"grad_norm": 1.6875,
"learning_rate": 9.529875047409027e-06,
"loss": 1.2159,
"step": 3660
},
{
"epoch": 0.5649793617041748,
"grad_norm": 1.7109375,
"learning_rate": 9.476193148787155e-06,
"loss": 1.2233,
"step": 3670
},
{
"epoch": 0.566518815005821,
"grad_norm": 1.7109375,
"learning_rate": 9.422526382322454e-06,
"loss": 1.2253,
"step": 3680
},
{
"epoch": 0.5680582683074673,
"grad_norm": 1.78125,
"learning_rate": 9.368876298383999e-06,
"loss": 1.1681,
"step": 3690
},
{
"epoch": 0.5695977216091136,
"grad_norm": 1.8203125,
"learning_rate": 9.315244446858932e-06,
"loss": 1.2346,
"step": 3700
},
{
"epoch": 0.5711371749107598,
"grad_norm": 1.6796875,
"learning_rate": 9.261632377107674e-06,
"loss": 1.2196,
"step": 3710
},
{
"epoch": 0.5726766282124061,
"grad_norm": 1.71875,
"learning_rate": 9.20804163791919e-06,
"loss": 1.2159,
"step": 3720
},
{
"epoch": 0.5742160815140523,
"grad_norm": 1.828125,
"learning_rate": 9.154473777466209e-06,
"loss": 1.2226,
"step": 3730
},
{
"epoch": 0.5757555348156985,
"grad_norm": 1.8203125,
"learning_rate": 9.100930343260544e-06,
"loss": 1.2466,
"step": 3740
},
{
"epoch": 0.5772949881173448,
"grad_norm": 1.5546875,
"learning_rate": 9.047412882108341e-06,
"loss": 1.1791,
"step": 3750
},
{
"epoch": 0.5788344414189911,
"grad_norm": 1.8359375,
"learning_rate": 8.993922940065426e-06,
"loss": 1.2507,
"step": 3760
},
{
"epoch": 0.5803738947206374,
"grad_norm": 1.6953125,
"learning_rate": 8.940462062392635e-06,
"loss": 1.2668,
"step": 3770
},
{
"epoch": 0.5819133480222836,
"grad_norm": 1.78125,
"learning_rate": 8.887031793511154e-06,
"loss": 1.2039,
"step": 3780
},
{
"epoch": 0.5834528013239298,
"grad_norm": 1.828125,
"learning_rate": 8.833633676957939e-06,
"loss": 1.2296,
"step": 3790
},
{
"epoch": 0.5849922546255761,
"grad_norm": 1.71875,
"learning_rate": 8.78026925534108e-06,
"loss": 1.205,
"step": 3800
},
{
"epoch": 0.5865317079272223,
"grad_norm": 1.828125,
"learning_rate": 8.726940070295282e-06,
"loss": 1.1951,
"step": 3810
},
{
"epoch": 0.5880711612288686,
"grad_norm": 1.5390625,
"learning_rate": 8.673647662437288e-06,
"loss": 1.1984,
"step": 3820
},
{
"epoch": 0.5896106145305149,
"grad_norm": 1.765625,
"learning_rate": 8.620393571321408e-06,
"loss": 1.2233,
"step": 3830
},
{
"epoch": 0.5911500678321611,
"grad_norm": 1.7890625,
"learning_rate": 8.567179335395012e-06,
"loss": 1.2112,
"step": 3840
},
{
"epoch": 0.5926895211338074,
"grad_norm": 1.6328125,
"learning_rate": 8.514006491954118e-06,
"loss": 1.2664,
"step": 3850
},
{
"epoch": 0.5942289744354536,
"grad_norm": 1.765625,
"learning_rate": 8.460876577098951e-06,
"loss": 1.1851,
"step": 3860
},
{
"epoch": 0.5957684277370998,
"grad_norm": 1.609375,
"learning_rate": 8.407791125689577e-06,
"loss": 1.1998,
"step": 3870
},
{
"epoch": 0.5973078810387461,
"grad_norm": 1.7265625,
"learning_rate": 8.354751671301584e-06,
"loss": 1.1966,
"step": 3880
},
{
"epoch": 0.5988473343403924,
"grad_norm": 1.7109375,
"learning_rate": 8.30175974618174e-06,
"loss": 1.2512,
"step": 3890
},
{
"epoch": 0.6003867876420386,
"grad_norm": 1.796875,
"learning_rate": 8.248816881203771e-06,
"loss": 1.2015,
"step": 3900
},
{
"epoch": 0.6019262409436849,
"grad_norm": 1.8046875,
"learning_rate": 8.195924605824088e-06,
"loss": 1.218,
"step": 3910
},
{
"epoch": 0.6034656942453311,
"grad_norm": 1.8203125,
"learning_rate": 8.143084448037654e-06,
"loss": 1.1875,
"step": 3920
},
{
"epoch": 0.6050051475469774,
"grad_norm": 1.734375,
"learning_rate": 8.090297934333795e-06,
"loss": 1.2273,
"step": 3930
},
{
"epoch": 0.6065446008486236,
"grad_norm": 1.7421875,
"learning_rate": 8.037566589652141e-06,
"loss": 1.1902,
"step": 3940
},
{
"epoch": 0.6080840541502699,
"grad_norm": 1.953125,
"learning_rate": 7.984891937338546e-06,
"loss": 1.2323,
"step": 3950
},
{
"epoch": 0.6096235074519162,
"grad_norm": 1.859375,
"learning_rate": 7.932275499101081e-06,
"loss": 1.2546,
"step": 3960
},
{
"epoch": 0.6111629607535624,
"grad_norm": 1.6796875,
"learning_rate": 7.8797187949661e-06,
"loss": 1.2519,
"step": 3970
},
{
"epoch": 0.6127024140552086,
"grad_norm": 1.7109375,
"learning_rate": 7.827223343234298e-06,
"loss": 1.184,
"step": 3980
},
{
"epoch": 0.6142418673568549,
"grad_norm": 1.6484375,
"learning_rate": 7.774790660436857e-06,
"loss": 1.2536,
"step": 3990
},
{
"epoch": 0.6157813206585011,
"grad_norm": 1.78125,
"learning_rate": 7.72242226129165e-06,
"loss": 1.2455,
"step": 4000
},
{
"epoch": 0.6173207739601474,
"grad_norm": 1.765625,
"learning_rate": 7.670119658659469e-06,
"loss": 1.2347,
"step": 4010
},
{
"epoch": 0.6188602272617937,
"grad_norm": 1.625,
"learning_rate": 7.617884363500313e-06,
"loss": 1.1972,
"step": 4020
},
{
"epoch": 0.6203996805634399,
"grad_norm": 1.9296875,
"learning_rate": 7.565717884829764e-06,
"loss": 1.2546,
"step": 4030
},
{
"epoch": 0.6219391338650861,
"grad_norm": 1.6640625,
"learning_rate": 7.513621729675362e-06,
"loss": 1.2472,
"step": 4040
},
{
"epoch": 0.6234785871667324,
"grad_norm": 1.8984375,
"learning_rate": 7.4615974030330985e-06,
"loss": 1.2416,
"step": 4050
},
{
"epoch": 0.6250180404683787,
"grad_norm": 1.78125,
"learning_rate": 7.409646407823914e-06,
"loss": 1.2288,
"step": 4060
},
{
"epoch": 0.6265574937700249,
"grad_norm": 1.6875,
"learning_rate": 7.357770244850291e-06,
"loss": 1.2328,
"step": 4070
},
{
"epoch": 0.6280969470716712,
"grad_norm": 1.6796875,
"learning_rate": 7.30597041275291e-06,
"loss": 1.2831,
"step": 4080
},
{
"epoch": 0.6296364003733175,
"grad_norm": 2.046875,
"learning_rate": 7.254248407967323e-06,
"loss": 1.2569,
"step": 4090
},
{
"epoch": 0.6311758536749636,
"grad_norm": 1.7109375,
"learning_rate": 7.202605724680769e-06,
"loss": 1.2089,
"step": 4100
},
{
"epoch": 0.6327153069766099,
"grad_norm": 1.8046875,
"learning_rate": 7.151043854788959e-06,
"loss": 1.2329,
"step": 4110
},
{
"epoch": 0.6342547602782562,
"grad_norm": 1.765625,
"learning_rate": 7.099564287853016e-06,
"loss": 1.2164,
"step": 4120
},
{
"epoch": 0.6357942135799024,
"grad_norm": 1.7890625,
"learning_rate": 7.048168511056421e-06,
"loss": 1.2881,
"step": 4130
},
{
"epoch": 0.6373336668815487,
"grad_norm": 1.8671875,
"learning_rate": 6.996858009162064e-06,
"loss": 1.2515,
"step": 4140
},
{
"epoch": 0.638873120183195,
"grad_norm": 1.7734375,
"learning_rate": 6.945634264469338e-06,
"loss": 1.2581,
"step": 4150
},
{
"epoch": 0.6404125734848412,
"grad_norm": 1.765625,
"learning_rate": 6.894498756771326e-06,
"loss": 1.2559,
"step": 4160
},
{
"epoch": 0.6419520267864874,
"grad_norm": 1.609375,
"learning_rate": 6.843452963312055e-06,
"loss": 1.2397,
"step": 4170
},
{
"epoch": 0.6434914800881337,
"grad_norm": 1.625,
"learning_rate": 6.7924983587438e-06,
"loss": 1.2234,
"step": 4180
},
{
"epoch": 0.6450309333897799,
"grad_norm": 1.90625,
"learning_rate": 6.741636415084514e-06,
"loss": 1.2609,
"step": 4190
},
{
"epoch": 0.6465703866914262,
"grad_norm": 1.578125,
"learning_rate": 6.690868601675273e-06,
"loss": 1.2129,
"step": 4200
},
{
"epoch": 0.6481098399930725,
"grad_norm": 1.8046875,
"learning_rate": 6.640196385137854e-06,
"loss": 1.2219,
"step": 4210
},
{
"epoch": 0.6496492932947188,
"grad_norm": 1.7890625,
"learning_rate": 6.58962122933234e-06,
"loss": 1.2781,
"step": 4220
},
{
"epoch": 0.6511887465963649,
"grad_norm": 1.796875,
"learning_rate": 6.5391445953148615e-06,
"loss": 1.2453,
"step": 4230
},
{
"epoch": 0.6527281998980112,
"grad_norm": 1.734375,
"learning_rate": 6.4887679412953546e-06,
"loss": 1.2456,
"step": 4240
},
{
"epoch": 0.6542676531996575,
"grad_norm": 1.890625,
"learning_rate": 6.438492722595467e-06,
"loss": 1.2498,
"step": 4250
},
{
"epoch": 0.6558071065013037,
"grad_norm": 1.8125,
"learning_rate": 6.38832039160649e-06,
"loss": 1.2614,
"step": 4260
},
{
"epoch": 0.65734655980295,
"grad_norm": 1.7109375,
"learning_rate": 6.338252397747417e-06,
"loss": 1.2455,
"step": 4270
},
{
"epoch": 0.6588860131045963,
"grad_norm": 1.828125,
"learning_rate": 6.2882901874230604e-06,
"loss": 1.2504,
"step": 4280
},
{
"epoch": 0.6604254664062424,
"grad_norm": 1.6953125,
"learning_rate": 6.238435203982278e-06,
"loss": 1.1987,
"step": 4290
},
{
"epoch": 0.6619649197078887,
"grad_norm": 1.7265625,
"learning_rate": 6.1886888876762725e-06,
"loss": 1.2427,
"step": 4300
},
{
"epoch": 0.663504373009535,
"grad_norm": 1.8046875,
"learning_rate": 6.1390526756169675e-06,
"loss": 1.2541,
"step": 4310
},
{
"epoch": 0.6650438263111812,
"grad_norm": 1.703125,
"learning_rate": 6.089528001735527e-06,
"loss": 1.1983,
"step": 4320
},
{
"epoch": 0.6665832796128275,
"grad_norm": 1.7734375,
"learning_rate": 6.04011629674089e-06,
"loss": 1.2211,
"step": 4330
},
{
"epoch": 0.6681227329144738,
"grad_norm": 1.71875,
"learning_rate": 5.990818988078475e-06,
"loss": 1.2224,
"step": 4340
},
{
"epoch": 0.66966218621612,
"grad_norm": 1.6953125,
"learning_rate": 5.941637499888915e-06,
"loss": 1.2194,
"step": 4350
},
{
"epoch": 0.6712016395177662,
"grad_norm": 1.84375,
"learning_rate": 5.892573252966926e-06,
"loss": 1.196,
"step": 4360
},
{
"epoch": 0.6727410928194125,
"grad_norm": 1.8203125,
"learning_rate": 5.843627664720279e-06,
"loss": 1.2119,
"step": 4370
},
{
"epoch": 0.6742805461210588,
"grad_norm": 1.7421875,
"learning_rate": 5.794802149128818e-06,
"loss": 1.2326,
"step": 4380
},
{
"epoch": 0.675819999422705,
"grad_norm": 1.59375,
"learning_rate": 5.746098116703647e-06,
"loss": 1.2473,
"step": 4390
},
{
"epoch": 0.6773594527243513,
"grad_norm": 1.8125,
"learning_rate": 5.697516974446344e-06,
"loss": 1.244,
"step": 4400
},
{
"epoch": 0.6788989060259976,
"grad_norm": 1.765625,
"learning_rate": 5.6490601258083675e-06,
"loss": 1.2076,
"step": 4410
},
{
"epoch": 0.6804383593276437,
"grad_norm": 1.578125,
"learning_rate": 5.600728970650446e-06,
"loss": 1.2208,
"step": 4420
},
{
"epoch": 0.68197781262929,
"grad_norm": 1.9765625,
"learning_rate": 5.552524905202208e-06,
"loss": 1.2155,
"step": 4430
},
{
"epoch": 0.6835172659309363,
"grad_norm": 1.6484375,
"learning_rate": 5.504449322021774e-06,
"loss": 1.2246,
"step": 4440
},
{
"epoch": 0.6850567192325825,
"grad_norm": 1.71875,
"learning_rate": 5.456503609955606e-06,
"loss": 1.1766,
"step": 4450
},
{
"epoch": 0.6865961725342288,
"grad_norm": 1.7265625,
"learning_rate": 5.408689154098306e-06,
"loss": 1.2345,
"step": 4460
},
{
"epoch": 0.6881356258358751,
"grad_norm": 1.7421875,
"learning_rate": 5.361007335752662e-06,
"loss": 1.2412,
"step": 4470
},
{
"epoch": 0.6896750791375212,
"grad_norm": 1.6328125,
"learning_rate": 5.313459532389713e-06,
"loss": 1.2205,
"step": 4480
},
{
"epoch": 0.6912145324391675,
"grad_norm": 1.7109375,
"learning_rate": 5.266047117608973e-06,
"loss": 1.171,
"step": 4490
},
{
"epoch": 0.6927539857408138,
"grad_norm": 1.7578125,
"learning_rate": 5.218771461098733e-06,
"loss": 1.1932,
"step": 4500
},
{
"epoch": 0.69429343904246,
"grad_norm": 1.78125,
"learning_rate": 5.1716339285965035e-06,
"loss": 1.2628,
"step": 4510
},
{
"epoch": 0.6958328923441063,
"grad_norm": 1.8359375,
"learning_rate": 5.124635881849558e-06,
"loss": 1.2283,
"step": 4520
},
{
"epoch": 0.6973723456457526,
"grad_norm": 1.6640625,
"learning_rate": 5.077778678575581e-06,
"loss": 1.2474,
"step": 4530
},
{
"epoch": 0.6989117989473989,
"grad_norm": 1.65625,
"learning_rate": 5.0310636724234796e-06,
"loss": 1.2207,
"step": 4540
},
{
"epoch": 0.700451252249045,
"grad_norm": 1.7109375,
"learning_rate": 4.984492212934231e-06,
"loss": 1.2369,
"step": 4550
},
{
"epoch": 0.7019907055506913,
"grad_norm": 1.6640625,
"learning_rate": 4.938065645501934e-06,
"loss": 1.231,
"step": 4560
},
{
"epoch": 0.7035301588523376,
"grad_norm": 1.59375,
"learning_rate": 4.891785311334924e-06,
"loss": 1.2238,
"step": 4570
},
{
"epoch": 0.7050696121539838,
"grad_norm": 1.9921875,
"learning_rate": 4.84565254741703e-06,
"loss": 1.2368,
"step": 4580
},
{
"epoch": 0.7066090654556301,
"grad_norm": 1.5625,
"learning_rate": 4.7996686864689565e-06,
"loss": 1.2171,
"step": 4590
},
{
"epoch": 0.7081485187572764,
"grad_norm": 1.84375,
"learning_rate": 4.753835056909774e-06,
"loss": 1.2543,
"step": 4600
},
{
"epoch": 0.7096879720589225,
"grad_norm": 1.8125,
"learning_rate": 4.708152982818548e-06,
"loss": 1.2192,
"step": 4610
},
{
"epoch": 0.7112274253605688,
"grad_norm": 1.7421875,
"learning_rate": 4.662623783896088e-06,
"loss": 1.2161,
"step": 4620
},
{
"epoch": 0.7127668786622151,
"grad_norm": 1.703125,
"learning_rate": 4.617248775426818e-06,
"loss": 1.2368,
"step": 4630
},
{
"epoch": 0.7143063319638613,
"grad_norm": 1.671875,
"learning_rate": 4.572029268240787e-06,
"loss": 1.2,
"step": 4640
},
{
"epoch": 0.7158457852655076,
"grad_norm": 1.828125,
"learning_rate": 4.526966568675802e-06,
"loss": 1.2892,
"step": 4650
},
{
"epoch": 0.7173852385671539,
"grad_norm": 1.6640625,
"learning_rate": 4.482061978539665e-06,
"loss": 1.1981,
"step": 4660
},
{
"epoch": 0.7189246918688001,
"grad_norm": 1.640625,
"learning_rate": 4.437316795072605e-06,
"loss": 1.217,
"step": 4670
},
{
"epoch": 0.7204641451704463,
"grad_norm": 1.859375,
"learning_rate": 4.392732310909771e-06,
"loss": 1.2843,
"step": 4680
},
{
"epoch": 0.7220035984720926,
"grad_norm": 1.7421875,
"learning_rate": 4.3483098140439066e-06,
"loss": 1.2294,
"step": 4690
},
{
"epoch": 0.7235430517737389,
"grad_norm": 1.6484375,
"learning_rate": 4.304050587788129e-06,
"loss": 1.2224,
"step": 4700
},
{
"epoch": 0.7250825050753851,
"grad_norm": 1.5546875,
"learning_rate": 4.2599559107388645e-06,
"loss": 1.2226,
"step": 4710
},
{
"epoch": 0.7266219583770314,
"grad_norm": 1.6796875,
"learning_rate": 4.216027056738909e-06,
"loss": 1.2431,
"step": 4720
},
{
"epoch": 0.7281614116786777,
"grad_norm": 1.8203125,
"learning_rate": 4.172265294840628e-06,
"loss": 1.2082,
"step": 4730
},
{
"epoch": 0.7297008649803238,
"grad_norm": 1.7734375,
"learning_rate": 4.128671889269292e-06,
"loss": 1.1767,
"step": 4740
},
{
"epoch": 0.7312403182819701,
"grad_norm": 1.6953125,
"learning_rate": 4.0852480993865625e-06,
"loss": 1.1983,
"step": 4750
},
{
"epoch": 0.7327797715836164,
"grad_norm": 1.625,
"learning_rate": 4.0419951796540915e-06,
"loss": 1.1887,
"step": 4760
},
{
"epoch": 0.7343192248852626,
"grad_norm": 1.640625,
"learning_rate": 3.9989143795973206e-06,
"loss": 1.2006,
"step": 4770
},
{
"epoch": 0.7358586781869089,
"grad_norm": 1.6640625,
"learning_rate": 3.956006943769331e-06,
"loss": 1.2182,
"step": 4780
},
{
"epoch": 0.7373981314885552,
"grad_norm": 1.6484375,
"learning_rate": 3.913274111714942e-06,
"loss": 1.1946,
"step": 4790
},
{
"epoch": 0.7389375847902013,
"grad_norm": 1.703125,
"learning_rate": 3.870717117934857e-06,
"loss": 1.2858,
"step": 4800
},
{
"epoch": 0.7404770380918476,
"grad_norm": 1.6953125,
"learning_rate": 3.8283371918500325e-06,
"loss": 1.2157,
"step": 4810
},
{
"epoch": 0.7420164913934939,
"grad_norm": 1.703125,
"learning_rate": 3.786135557766146e-06,
"loss": 1.2178,
"step": 4820
},
{
"epoch": 0.7435559446951402,
"grad_norm": 1.7578125,
"learning_rate": 3.7441134348382345e-06,
"loss": 1.2035,
"step": 4830
},
{
"epoch": 0.7450953979967864,
"grad_norm": 1.71875,
"learning_rate": 3.702272037035468e-06,
"loss": 1.2462,
"step": 4840
},
{
"epoch": 0.7466348512984327,
"grad_norm": 1.9140625,
"learning_rate": 3.6606125731060815e-06,
"loss": 1.1922,
"step": 4850
},
{
"epoch": 0.7481743046000789,
"grad_norm": 1.9453125,
"learning_rate": 3.6191362465424617e-06,
"loss": 1.2296,
"step": 4860
},
{
"epoch": 0.7497137579017251,
"grad_norm": 1.7421875,
"learning_rate": 3.5778442555463633e-06,
"loss": 1.2526,
"step": 4870
},
{
"epoch": 0.7512532112033714,
"grad_norm": 1.6875,
"learning_rate": 3.5367377929943246e-06,
"loss": 1.2143,
"step": 4880
},
{
"epoch": 0.7527926645050177,
"grad_norm": 1.71875,
"learning_rate": 3.4958180464031645e-06,
"loss": 1.2277,
"step": 4890
},
{
"epoch": 0.7543321178066639,
"grad_norm": 1.796875,
"learning_rate": 3.455086197895725e-06,
"loss": 1.2065,
"step": 4900
},
{
"epoch": 0.7558715711083102,
"grad_norm": 1.7265625,
"learning_rate": 3.41454342416667e-06,
"loss": 1.1921,
"step": 4910
},
{
"epoch": 0.7574110244099564,
"grad_norm": 1.6484375,
"learning_rate": 3.3741908964485415e-06,
"loss": 1.2295,
"step": 4920
},
{
"epoch": 0.7589504777116026,
"grad_norm": 1.8046875,
"learning_rate": 3.3340297804778822e-06,
"loss": 1.248,
"step": 4930
},
{
"epoch": 0.7604899310132489,
"grad_norm": 1.7265625,
"learning_rate": 3.2940612364615863e-06,
"loss": 1.2379,
"step": 4940
},
{
"epoch": 0.7620293843148952,
"grad_norm": 1.796875,
"learning_rate": 3.254286419043372e-06,
"loss": 1.246,
"step": 4950
},
{
"epoch": 0.7635688376165414,
"grad_norm": 1.671875,
"learning_rate": 3.214706477270424e-06,
"loss": 1.2417,
"step": 4960
},
{
"epoch": 0.7651082909181877,
"grad_norm": 1.7265625,
"learning_rate": 3.1753225545602083e-06,
"loss": 1.2571,
"step": 4970
},
{
"epoch": 0.766647744219834,
"grad_norm": 1.8515625,
"learning_rate": 3.1361357886674172e-06,
"loss": 1.2244,
"step": 4980
},
{
"epoch": 0.7681871975214802,
"grad_norm": 1.796875,
"learning_rate": 3.09714731165114e-06,
"loss": 1.219,
"step": 4990
},
{
"epoch": 0.7697266508231264,
"grad_norm": 1.640625,
"learning_rate": 3.0583582498421107e-06,
"loss": 1.197,
"step": 5000
},
{
"epoch": 0.7712661041247727,
"grad_norm": 1.734375,
"learning_rate": 3.019769723810222e-06,
"loss": 1.1917,
"step": 5010
},
{
"epoch": 0.772805557426419,
"grad_norm": 1.8359375,
"learning_rate": 2.9813828483320983e-06,
"loss": 1.2693,
"step": 5020
},
{
"epoch": 0.7743450107280652,
"grad_norm": 1.59375,
"learning_rate": 2.9431987323589473e-06,
"loss": 1.2338,
"step": 5030
},
{
"epoch": 0.7758844640297115,
"grad_norm": 1.7109375,
"learning_rate": 2.9052184789844696e-06,
"loss": 1.2399,
"step": 5040
},
{
"epoch": 0.7774239173313577,
"grad_norm": 1.7890625,
"learning_rate": 2.86744318541304e-06,
"loss": 1.2505,
"step": 5050
},
{
"epoch": 0.7789633706330039,
"grad_norm": 1.6875,
"learning_rate": 2.829873942927971e-06,
"loss": 1.2275,
"step": 5060
},
{
"epoch": 0.7805028239346502,
"grad_norm": 1.6328125,
"learning_rate": 2.7925118368600126e-06,
"loss": 1.1781,
"step": 5070
},
{
"epoch": 0.7820422772362965,
"grad_norm": 1.6328125,
"learning_rate": 2.7553579465559908e-06,
"loss": 1.1947,
"step": 5080
},
{
"epoch": 0.7835817305379427,
"grad_norm": 1.8359375,
"learning_rate": 2.7184133453476224e-06,
"loss": 1.1985,
"step": 5090
},
{
"epoch": 0.785121183839589,
"grad_norm": 1.578125,
"learning_rate": 2.6816791005205124e-06,
"loss": 1.2575,
"step": 5100
},
{
"epoch": 0.7866606371412352,
"grad_norm": 1.6875,
"learning_rate": 2.645156273283319e-06,
"loss": 1.2176,
"step": 5110
},
{
"epoch": 0.7882000904428814,
"grad_norm": 1.578125,
"learning_rate": 2.6088459187371027e-06,
"loss": 1.2582,
"step": 5120
},
{
"epoch": 0.7897395437445277,
"grad_norm": 1.640625,
"learning_rate": 2.572749085844829e-06,
"loss": 1.2022,
"step": 5130
},
{
"epoch": 0.791278997046174,
"grad_norm": 1.7578125,
"learning_rate": 2.5368668174010926e-06,
"loss": 1.2003,
"step": 5140
},
{
"epoch": 0.7928184503478203,
"grad_norm": 1.6953125,
"learning_rate": 2.501200150001961e-06,
"loss": 1.2139,
"step": 5150
},
{
"epoch": 0.7943579036494665,
"grad_norm": 1.6875,
"learning_rate": 2.465750114015053e-06,
"loss": 1.2381,
"step": 5160
},
{
"epoch": 0.7958973569511127,
"grad_norm": 1.7734375,
"learning_rate": 2.430517733549761e-06,
"loss": 1.2559,
"step": 5170
},
{
"epoch": 0.797436810252759,
"grad_norm": 1.703125,
"learning_rate": 2.3955040264276684e-06,
"loss": 1.2447,
"step": 5180
},
{
"epoch": 0.7989762635544052,
"grad_norm": 1.6328125,
"learning_rate": 2.3607100041531474e-06,
"loss": 1.2168,
"step": 5190
},
{
"epoch": 0.8005157168560515,
"grad_norm": 1.7265625,
"learning_rate": 2.3261366718841307e-06,
"loss": 1.1945,
"step": 5200
},
{
"epoch": 0.8020551701576978,
"grad_norm": 1.8359375,
"learning_rate": 2.291785028403086e-06,
"loss": 1.2354,
"step": 5210
},
{
"epoch": 0.803594623459344,
"grad_norm": 1.71875,
"learning_rate": 2.257656066088151e-06,
"loss": 1.2296,
"step": 5220
},
{
"epoch": 0.8051340767609902,
"grad_norm": 1.8515625,
"learning_rate": 2.2237507708844707e-06,
"loss": 1.1465,
"step": 5230
},
{
"epoch": 0.8066735300626365,
"grad_norm": 1.5859375,
"learning_rate": 2.1900701222757124e-06,
"loss": 1.2334,
"step": 5240
},
{
"epoch": 0.8082129833642827,
"grad_norm": 1.671875,
"learning_rate": 2.1566150932557737e-06,
"loss": 1.1959,
"step": 5250
},
{
"epoch": 0.809752436665929,
"grad_norm": 1.6796875,
"learning_rate": 2.123386650300664e-06,
"loss": 1.1912,
"step": 5260
},
{
"epoch": 0.8112918899675753,
"grad_norm": 1.6953125,
"learning_rate": 2.090385753340596e-06,
"loss": 1.19,
"step": 5270
},
{
"epoch": 0.8128313432692215,
"grad_norm": 1.8203125,
"learning_rate": 2.0576133557322463e-06,
"loss": 1.2264,
"step": 5280
},
{
"epoch": 0.8143707965708677,
"grad_norm": 1.78125,
"learning_rate": 2.0250704042312198e-06,
"loss": 1.2282,
"step": 5290
},
{
"epoch": 0.815910249872514,
"grad_norm": 1.6015625,
"learning_rate": 1.992757838964694e-06,
"loss": 1.2152,
"step": 5300
},
{
"epoch": 0.8174497031741603,
"grad_norm": 1.734375,
"learning_rate": 1.9606765934042637e-06,
"loss": 1.2547,
"step": 5310
},
{
"epoch": 0.8189891564758065,
"grad_norm": 1.5625,
"learning_rate": 1.928827594338969e-06,
"loss": 1.258,
"step": 5320
},
{
"epoch": 0.8205286097774528,
"grad_norm": 1.6953125,
"learning_rate": 1.8972117618485286e-06,
"loss": 1.2381,
"step": 5330
},
{
"epoch": 0.8220680630790991,
"grad_norm": 1.703125,
"learning_rate": 1.8658300092767546e-06,
"loss": 1.2604,
"step": 5340
},
{
"epoch": 0.8236075163807453,
"grad_norm": 1.703125,
"learning_rate": 1.834683243205171e-06,
"loss": 1.2129,
"step": 5350
},
{
"epoch": 0.8251469696823915,
"grad_norm": 1.71875,
"learning_rate": 1.80377236342681e-06,
"loss": 1.2199,
"step": 5360
},
{
"epoch": 0.8266864229840378,
"grad_norm": 1.6640625,
"learning_rate": 1.7730982629202497e-06,
"loss": 1.2336,
"step": 5370
},
{
"epoch": 0.828225876285684,
"grad_norm": 1.8359375,
"learning_rate": 1.7426618278237761e-06,
"loss": 1.2604,
"step": 5380
},
{
"epoch": 0.8297653295873303,
"grad_norm": 1.6640625,
"learning_rate": 1.7124639374098162e-06,
"loss": 1.2213,
"step": 5390
},
{
"epoch": 0.8313047828889766,
"grad_norm": 2.109375,
"learning_rate": 1.6825054640595196e-06,
"loss": 1.2854,
"step": 5400
},
{
"epoch": 0.8328442361906228,
"grad_norm": 1.671875,
"learning_rate": 1.652787273237565e-06,
"loss": 1.196,
"step": 5410
},
{
"epoch": 0.834383689492269,
"grad_norm": 1.625,
"learning_rate": 1.6233102234671528e-06,
"loss": 1.2436,
"step": 5420
},
{
"epoch": 0.8359231427939153,
"grad_norm": 1.6875,
"learning_rate": 1.5940751663052046e-06,
"loss": 1.2107,
"step": 5430
},
{
"epoch": 0.8374625960955616,
"grad_norm": 1.765625,
"learning_rate": 1.565082946317763e-06,
"loss": 1.2167,
"step": 5440
},
{
"epoch": 0.8390020493972078,
"grad_norm": 1.8203125,
"learning_rate": 1.5363344010555958e-06,
"loss": 1.1855,
"step": 5450
},
{
"epoch": 0.8405415026988541,
"grad_norm": 1.703125,
"learning_rate": 1.5078303610299972e-06,
"loss": 1.2,
"step": 5460
},
{
"epoch": 0.8420809560005004,
"grad_norm": 1.7265625,
"learning_rate": 1.4795716496887879e-06,
"loss": 1.2382,
"step": 5470
},
{
"epoch": 0.8436204093021465,
"grad_norm": 1.6328125,
"learning_rate": 1.4515590833925508e-06,
"loss": 1.2268,
"step": 5480
},
{
"epoch": 0.8451598626037928,
"grad_norm": 1.6640625,
"learning_rate": 1.4237934713910118e-06,
"loss": 1.1563,
"step": 5490
},
{
"epoch": 0.8466993159054391,
"grad_norm": 1.7109375,
"learning_rate": 1.3962756157997004e-06,
"loss": 1.236,
"step": 5500
},
{
"epoch": 0.8482387692070853,
"grad_norm": 1.7578125,
"learning_rate": 1.3690063115767393e-06,
"loss": 1.2667,
"step": 5510
},
{
"epoch": 0.8497782225087316,
"grad_norm": 1.8359375,
"learning_rate": 1.3419863464999138e-06,
"loss": 1.2505,
"step": 5520
},
{
"epoch": 0.8513176758103779,
"grad_norm": 1.8046875,
"learning_rate": 1.3152165011438867e-06,
"loss": 1.192,
"step": 5530
},
{
"epoch": 0.852857129112024,
"grad_norm": 2.109375,
"learning_rate": 1.2886975488576637e-06,
"loss": 1.2168,
"step": 5540
},
{
"epoch": 0.8543965824136703,
"grad_norm": 1.703125,
"learning_rate": 1.2624302557422475e-06,
"loss": 1.2354,
"step": 5550
},
{
"epoch": 0.8559360357153166,
"grad_norm": 1.6484375,
"learning_rate": 1.2364153806285074e-06,
"loss": 1.2418,
"step": 5560
},
{
"epoch": 0.8574754890169628,
"grad_norm": 1.71875,
"learning_rate": 1.2106536750552578e-06,
"loss": 1.2171,
"step": 5570
},
{
"epoch": 0.8590149423186091,
"grad_norm": 1.703125,
"learning_rate": 1.1851458832475371e-06,
"loss": 1.25,
"step": 5580
},
{
"epoch": 0.8605543956202554,
"grad_norm": 1.671875,
"learning_rate": 1.1598927420951333e-06,
"loss": 1.2345,
"step": 5590
},
{
"epoch": 0.8620938489219017,
"grad_norm": 1.6875,
"learning_rate": 1.1348949811312616e-06,
"loss": 1.2224,
"step": 5600
},
{
"epoch": 0.8636333022235478,
"grad_norm": 1.640625,
"learning_rate": 1.1101533225115247e-06,
"loss": 1.1752,
"step": 5610
},
{
"epoch": 0.8651727555251941,
"grad_norm": 1.8125,
"learning_rate": 1.085668480993015e-06,
"loss": 1.2384,
"step": 5620
},
{
"epoch": 0.8667122088268404,
"grad_norm": 1.65625,
"learning_rate": 1.0614411639137034e-06,
"loss": 1.2213,
"step": 5630
},
{
"epoch": 0.8682516621284866,
"grad_norm": 1.765625,
"learning_rate": 1.0374720711719676e-06,
"loss": 1.2478,
"step": 5640
},
{
"epoch": 0.8697911154301329,
"grad_norm": 1.6640625,
"learning_rate": 1.0137618952064066e-06,
"loss": 1.2059,
"step": 5650
},
{
"epoch": 0.8713305687317792,
"grad_norm": 1.6484375,
"learning_rate": 9.903113209758098e-07,
"loss": 1.1992,
"step": 5660
},
{
"epoch": 0.8728700220334253,
"grad_norm": 1.6875,
"learning_rate": 9.67121025939387e-07,
"loss": 1.2318,
"step": 5670
},
{
"epoch": 0.8744094753350716,
"grad_norm": 1.796875,
"learning_rate": 9.441916800371909e-07,
"loss": 1.2161,
"step": 5680
},
{
"epoch": 0.8759489286367179,
"grad_norm": 1.6953125,
"learning_rate": 9.215239456707636e-07,
"loss": 1.2029,
"step": 5690
},
{
"epoch": 0.8774883819383641,
"grad_norm": 1.6015625,
"learning_rate": 8.991184776840034e-07,
"loss": 1.1941,
"step": 5700
},
{
"epoch": 0.8790278352400104,
"grad_norm": 2.078125,
"learning_rate": 8.769759233442366e-07,
"loss": 1.221,
"step": 5710
},
{
"epoch": 0.8805672885416567,
"grad_norm": 1.7578125,
"learning_rate": 8.550969223235383e-07,
"loss": 1.2539,
"step": 5720
},
{
"epoch": 0.8821067418433028,
"grad_norm": 1.7578125,
"learning_rate": 8.334821066802301e-07,
"loss": 1.1944,
"step": 5730
},
{
"epoch": 0.8836461951449491,
"grad_norm": 1.8359375,
"learning_rate": 8.121321008406436e-07,
"loss": 1.2218,
"step": 5740
},
{
"epoch": 0.8851856484465954,
"grad_norm": 1.703125,
"learning_rate": 7.910475215810598e-07,
"loss": 1.2036,
"step": 5750
},
{
"epoch": 0.8867251017482417,
"grad_norm": 1.8046875,
"learning_rate": 7.70228978009907e-07,
"loss": 1.2703,
"step": 5760
},
{
"epoch": 0.8882645550498879,
"grad_norm": 1.7265625,
"learning_rate": 7.496770715501566e-07,
"loss": 1.2755,
"step": 5770
},
{
"epoch": 0.8898040083515342,
"grad_norm": 1.796875,
"learning_rate": 7.29392395921954e-07,
"loss": 1.2312,
"step": 5780
},
{
"epoch": 0.8913434616531805,
"grad_norm": 1.828125,
"learning_rate": 7.093755371254596e-07,
"loss": 1.26,
"step": 5790
},
{
"epoch": 0.8928829149548266,
"grad_norm": 1.5546875,
"learning_rate": 6.896270734239263e-07,
"loss": 1.2239,
"step": 5800
},
{
"epoch": 0.8944223682564729,
"grad_norm": 1.734375,
"learning_rate": 6.701475753269915e-07,
"loss": 1.2245,
"step": 5810
},
{
"epoch": 0.8959618215581192,
"grad_norm": 1.7578125,
"learning_rate": 6.509376055741944e-07,
"loss": 1.1909,
"step": 5820
},
{
"epoch": 0.8975012748597654,
"grad_norm": 1.6640625,
"learning_rate": 6.319977191187232e-07,
"loss": 1.1744,
"step": 5830
},
{
"epoch": 0.8990407281614117,
"grad_norm": 1.765625,
"learning_rate": 6.133284631113789e-07,
"loss": 1.2049,
"step": 5840
},
{
"epoch": 0.900580181463058,
"grad_norm": 1.5,
"learning_rate": 5.949303768847736e-07,
"loss": 1.1935,
"step": 5850
},
{
"epoch": 0.9021196347647041,
"grad_norm": 1.71875,
"learning_rate": 5.768039919377421e-07,
"loss": 1.2084,
"step": 5860
},
{
"epoch": 0.9036590880663504,
"grad_norm": 1.7578125,
"learning_rate": 5.589498319199949e-07,
"loss": 1.1963,
"step": 5870
},
{
"epoch": 0.9051985413679967,
"grad_norm": 1.640625,
"learning_rate": 5.413684126169882e-07,
"loss": 1.2019,
"step": 5880
},
{
"epoch": 0.9067379946696429,
"grad_norm": 1.859375,
"learning_rate": 5.240602419350238e-07,
"loss": 1.2063,
"step": 5890
},
{
"epoch": 0.9082774479712892,
"grad_norm": 1.640625,
"learning_rate": 5.070258198865741e-07,
"loss": 1.2011,
"step": 5900
},
{
"epoch": 0.9098169012729355,
"grad_norm": 1.8515625,
"learning_rate": 4.902656385758397e-07,
"loss": 1.2129,
"step": 5910
},
{
"epoch": 0.9113563545745818,
"grad_norm": 1.671875,
"learning_rate": 4.7378018218453113e-07,
"loss": 1.2446,
"step": 5920
},
{
"epoch": 0.9128958078762279,
"grad_norm": 1.6796875,
"learning_rate": 4.575699269578848e-07,
"loss": 1.2285,
"step": 5930
},
{
"epoch": 0.9144352611778742,
"grad_norm": 1.796875,
"learning_rate": 4.4163534119090245e-07,
"loss": 1.2069,
"step": 5940
},
{
"epoch": 0.9159747144795205,
"grad_norm": 1.671875,
"learning_rate": 4.259768852148216e-07,
"loss": 1.2399,
"step": 5950
},
{
"epoch": 0.9175141677811667,
"grad_norm": 1.6171875,
"learning_rate": 4.105950113838153e-07,
"loss": 1.2366,
"step": 5960
},
{
"epoch": 0.919053621082813,
"grad_norm": 1.765625,
"learning_rate": 3.9549016406193686e-07,
"loss": 1.2248,
"step": 5970
},
{
"epoch": 0.9205930743844593,
"grad_norm": 1.796875,
"learning_rate": 3.8066277961026134e-07,
"loss": 1.2546,
"step": 5980
},
{
"epoch": 0.9221325276861054,
"grad_norm": 1.6015625,
"learning_rate": 3.661132863743011e-07,
"loss": 1.2198,
"step": 5990
},
{
"epoch": 0.9236719809877517,
"grad_norm": 1.6796875,
"learning_rate": 3.518421046716147e-07,
"loss": 1.186,
"step": 6000
},
{
"epoch": 0.925211434289398,
"grad_norm": 1.671875,
"learning_rate": 3.378496467796766e-07,
"loss": 1.2156,
"step": 6010
},
{
"epoch": 0.9267508875910442,
"grad_norm": 1.59375,
"learning_rate": 3.2413631692395753e-07,
"loss": 1.1959,
"step": 6020
},
{
"epoch": 0.9282903408926905,
"grad_norm": 1.859375,
"learning_rate": 3.1070251126625427e-07,
"loss": 1.2426,
"step": 6030
},
{
"epoch": 0.9298297941943368,
"grad_norm": 1.640625,
"learning_rate": 2.9754861789324074e-07,
"loss": 1.21,
"step": 6040
},
{
"epoch": 0.931369247495983,
"grad_norm": 1.765625,
"learning_rate": 2.84675016805257e-07,
"loss": 1.2328,
"step": 6050
},
{
"epoch": 0.9329087007976292,
"grad_norm": 1.734375,
"learning_rate": 2.7208207990533365e-07,
"loss": 1.2377,
"step": 6060
},
{
"epoch": 0.9344481540992755,
"grad_norm": 1.6953125,
"learning_rate": 2.597701709884426e-07,
"loss": 1.2589,
"step": 6070
},
{
"epoch": 0.9359876074009218,
"grad_norm": 1.671875,
"learning_rate": 2.4773964573099997e-07,
"loss": 1.2219,
"step": 6080
},
{
"epoch": 0.937527060702568,
"grad_norm": 1.7734375,
"learning_rate": 2.359908516805731e-07,
"loss": 1.2027,
"step": 6090
},
{
"epoch": 0.9390665140042143,
"grad_norm": 1.640625,
"learning_rate": 2.2452412824585546e-07,
"loss": 1.1985,
"step": 6100
},
{
"epoch": 0.9406059673058605,
"grad_norm": 1.6875,
"learning_rate": 2.1333980668685416e-07,
"loss": 1.2069,
"step": 6110
},
{
"epoch": 0.9421454206075067,
"grad_norm": 1.6484375,
"learning_rate": 2.0243821010532015e-07,
"loss": 1.2327,
"step": 6120
},
{
"epoch": 0.943684873909153,
"grad_norm": 1.609375,
"learning_rate": 1.9181965343541887e-07,
"loss": 1.2065,
"step": 6130
},
{
"epoch": 0.9452243272107993,
"grad_norm": 1.71875,
"learning_rate": 1.8148444343462857e-07,
"loss": 1.2099,
"step": 6140
},
{
"epoch": 0.9467637805124455,
"grad_norm": 1.796875,
"learning_rate": 1.7143287867487756e-07,
"loss": 1.2252,
"step": 6150
},
{
"epoch": 0.9483032338140918,
"grad_norm": 1.734375,
"learning_rate": 1.6166524953392104e-07,
"loss": 1.2313,
"step": 6160
},
{
"epoch": 0.949842687115738,
"grad_norm": 1.6328125,
"learning_rate": 1.521818381869544e-07,
"loss": 1.2127,
"step": 6170
},
{
"epoch": 0.9513821404173842,
"grad_norm": 1.6875,
"learning_rate": 1.4298291859845215e-07,
"loss": 1.195,
"step": 6180
},
{
"epoch": 0.9529215937190305,
"grad_norm": 1.7734375,
"learning_rate": 1.3406875651426843e-07,
"loss": 1.2191,
"step": 6190
},
{
"epoch": 0.9544610470206768,
"grad_norm": 1.7265625,
"learning_rate": 1.2543960945394452e-07,
"loss": 1.2605,
"step": 6200
},
{
"epoch": 0.9560005003223231,
"grad_norm": 1.6875,
"learning_rate": 1.170957267032824e-07,
"loss": 1.2346,
"step": 6210
},
{
"epoch": 0.9575399536239693,
"grad_norm": 1.7734375,
"learning_rate": 1.0903734930713283e-07,
"loss": 1.2341,
"step": 6220
},
{
"epoch": 0.9590794069256156,
"grad_norm": 1.71875,
"learning_rate": 1.0126471006243866e-07,
"loss": 1.1935,
"step": 6230
},
{
"epoch": 0.9606188602272618,
"grad_norm": 1.7265625,
"learning_rate": 9.377803351150683e-08,
"loss": 1.2718,
"step": 6240
},
{
"epoch": 0.962158313528908,
"grad_norm": 1.703125,
"learning_rate": 8.657753593552142e-08,
"loss": 1.2041,
"step": 6250
},
{
"epoch": 0.9636977668305543,
"grad_norm": 1.7421875,
"learning_rate": 7.966342534829752e-08,
"loss": 1.222,
"step": 6260
},
{
"epoch": 0.9652372201322006,
"grad_norm": 1.734375,
"learning_rate": 7.303590149026818e-08,
"loss": 1.2267,
"step": 6270
},
{
"epoch": 0.9667766734338468,
"grad_norm": 1.609375,
"learning_rate": 6.669515582271913e-08,
"loss": 1.2352,
"step": 6280
},
{
"epoch": 0.968316126735493,
"grad_norm": 1.90625,
"learning_rate": 6.064137152225425e-08,
"loss": 1.2402,
"step": 6290
},
{
"epoch": 0.9698555800371393,
"grad_norm": 1.734375,
"learning_rate": 5.4874723475504267e-08,
"loss": 1.2357,
"step": 6300
},
{
"epoch": 0.9713950333387855,
"grad_norm": 1.6171875,
"learning_rate": 4.939537827407415e-08,
"loss": 1.1789,
"step": 6310
},
{
"epoch": 0.9729344866404318,
"grad_norm": 1.625,
"learning_rate": 4.420349420973358e-08,
"loss": 1.2433,
"step": 6320
},
{
"epoch": 0.9744739399420781,
"grad_norm": 2.125,
"learning_rate": 3.929922126983954e-08,
"loss": 1.1677,
"step": 6330
},
{
"epoch": 0.9760133932437243,
"grad_norm": 1.625,
"learning_rate": 3.4682701133008645e-08,
"loss": 1.2134,
"step": 6340
},
{
"epoch": 0.9775528465453706,
"grad_norm": 1.7265625,
"learning_rate": 3.035406716501599e-08,
"loss": 1.2336,
"step": 6350
},
{
"epoch": 0.9790922998470168,
"grad_norm": 1.71875,
"learning_rate": 2.6313444414952648e-08,
"loss": 1.2341,
"step": 6360
},
{
"epoch": 0.9806317531486631,
"grad_norm": 1.7578125,
"learning_rate": 2.2560949611605264e-08,
"loss": 1.1928,
"step": 6370
},
{
"epoch": 0.9821712064503093,
"grad_norm": 1.6171875,
"learning_rate": 1.90966911600865e-08,
"loss": 1.2076,
"step": 6380
},
{
"epoch": 0.9837106597519556,
"grad_norm": 1.7890625,
"learning_rate": 1.592076913870644e-08,
"loss": 1.2274,
"step": 6390
},
{
"epoch": 0.9852501130536019,
"grad_norm": 1.828125,
"learning_rate": 1.3033275296073789e-08,
"loss": 1.2503,
"step": 6400
},
{
"epoch": 0.9867895663552481,
"grad_norm": 1.65625,
"learning_rate": 1.0434293048453559e-08,
"loss": 1.2615,
"step": 6410
},
{
"epoch": 0.9883290196568943,
"grad_norm": 1.6015625,
"learning_rate": 8.123897477354537e-09,
"loss": 1.2132,
"step": 6420
},
{
"epoch": 0.9898684729585406,
"grad_norm": 1.8125,
"learning_rate": 6.102155327355475e-09,
"loss": 1.2083,
"step": 6430
},
{
"epoch": 0.9914079262601868,
"grad_norm": 1.640625,
"learning_rate": 4.369125004185515e-09,
"loss": 1.2451,
"step": 6440
},
{
"epoch": 0.9929473795618331,
"grad_norm": 1.75,
"learning_rate": 2.924856573028878e-09,
"loss": 1.2202,
"step": 6450
},
{
"epoch": 0.9944868328634794,
"grad_norm": 1.65625,
"learning_rate": 1.7693917570837938e-09,
"loss": 1.2657,
"step": 6460
},
{
"epoch": 0.9960262861651256,
"grad_norm": 1.734375,
"learning_rate": 9.027639363545782e-10,
"loss": 1.2212,
"step": 6470
},
{
"epoch": 0.9975657394667719,
"grad_norm": 1.859375,
"learning_rate": 3.249981466901808e-10,
"loss": 1.2315,
"step": 6480
},
{
"epoch": 0.9991051927684181,
"grad_norm": 1.671875,
"learning_rate": 3.6111079055878294e-11,
"loss": 1.22,
"step": 6490
}
],
"logging_steps": 10,
"max_steps": 6495,
"num_input_tokens_seen": 0,
"num_train_epochs": 1,
"save_steps": 500,
"stateful_callbacks": {
"TrainerControl": {
"args": {
"should_epoch_stop": false,
"should_evaluate": false,
"should_log": false,
"should_save": true,
"should_training_stop": true
},
"attributes": {}
}
},
"total_flos": 1.756027118157693e+18,
"train_batch_size": 2,
"trial_name": null,
"trial_params": null
}