| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 9.6, | |
| "eval_steps": 500, | |
| "global_step": 6000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.016, | |
| "grad_norm": 8.851228713989258, | |
| "learning_rate": 3e-06, | |
| "loss": 0.7188, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.032, | |
| "grad_norm": 2.412295341491699, | |
| "learning_rate": 6.333333333333334e-06, | |
| "loss": 0.5143, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.048, | |
| "grad_norm": 3.2334506511688232, | |
| "learning_rate": 9.666666666666667e-06, | |
| "loss": 0.3371, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.064, | |
| "grad_norm": 1.3187706470489502, | |
| "learning_rate": 1.3000000000000001e-05, | |
| "loss": 0.2471, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.5272440910339355, | |
| "learning_rate": 1.6333333333333335e-05, | |
| "loss": 0.2275, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.096, | |
| "grad_norm": 1.3218804597854614, | |
| "learning_rate": 1.9666666666666666e-05, | |
| "loss": 0.187, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.112, | |
| "grad_norm": 1.6101323366165161, | |
| "learning_rate": 2.3000000000000003e-05, | |
| "loss": 0.1352, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.128, | |
| "grad_norm": 0.6902962327003479, | |
| "learning_rate": 2.633333333333333e-05, | |
| "loss": 0.1325, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.144, | |
| "grad_norm": 0.6235231757164001, | |
| "learning_rate": 2.9666666666666672e-05, | |
| "loss": 0.1, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.2747114896774292, | |
| "learning_rate": 3.3e-05, | |
| "loss": 0.1163, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.176, | |
| "grad_norm": 1.5847718715667725, | |
| "learning_rate": 3.633333333333333e-05, | |
| "loss": 0.103, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.192, | |
| "grad_norm": 1.1942260265350342, | |
| "learning_rate": 3.966666666666667e-05, | |
| "loss": 0.0901, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.208, | |
| "grad_norm": 0.7288371920585632, | |
| "learning_rate": 4.3e-05, | |
| "loss": 0.0896, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.224, | |
| "grad_norm": 1.4482511281967163, | |
| "learning_rate": 4.633333333333333e-05, | |
| "loss": 0.0832, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 0.8507036566734314, | |
| "learning_rate": 4.966666666666667e-05, | |
| "loss": 0.0743, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.256, | |
| "grad_norm": 0.9521995782852173, | |
| "learning_rate": 5.300000000000001e-05, | |
| "loss": 0.0772, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.272, | |
| "grad_norm": 0.7656130790710449, | |
| "learning_rate": 5.633333333333334e-05, | |
| "loss": 0.0779, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.288, | |
| "grad_norm": 0.9457734227180481, | |
| "learning_rate": 5.966666666666667e-05, | |
| "loss": 0.0743, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.304, | |
| "grad_norm": 1.1752697229385376, | |
| "learning_rate": 6.3e-05, | |
| "loss": 0.0628, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.0764414072036743, | |
| "learning_rate": 6.633333333333334e-05, | |
| "loss": 0.0744, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.336, | |
| "grad_norm": 1.4322071075439453, | |
| "learning_rate": 6.966666666666668e-05, | |
| "loss": 0.0723, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.352, | |
| "grad_norm": 0.4898614287376404, | |
| "learning_rate": 7.3e-05, | |
| "loss": 0.0675, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.368, | |
| "grad_norm": 0.7180267572402954, | |
| "learning_rate": 7.633333333333334e-05, | |
| "loss": 0.0629, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.384, | |
| "grad_norm": 0.7256841063499451, | |
| "learning_rate": 7.966666666666666e-05, | |
| "loss": 0.0607, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.1692602634429932, | |
| "learning_rate": 8.3e-05, | |
| "loss": 0.0622, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.416, | |
| "grad_norm": 0.7514578104019165, | |
| "learning_rate": 8.633333333333334e-05, | |
| "loss": 0.0617, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.432, | |
| "grad_norm": 0.6715297102928162, | |
| "learning_rate": 8.966666666666666e-05, | |
| "loss": 0.0693, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.448, | |
| "grad_norm": 0.7234903573989868, | |
| "learning_rate": 9.300000000000001e-05, | |
| "loss": 0.0571, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.464, | |
| "grad_norm": 0.7658254504203796, | |
| "learning_rate": 9.633333333333335e-05, | |
| "loss": 0.07, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 0.5826389789581299, | |
| "learning_rate": 9.966666666666667e-05, | |
| "loss": 0.0589, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.496, | |
| "grad_norm": 0.7807655930519104, | |
| "learning_rate": 9.999938485971279e-05, | |
| "loss": 0.0685, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.512, | |
| "grad_norm": 0.8024008870124817, | |
| "learning_rate": 9.999725846827562e-05, | |
| "loss": 0.0618, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.528, | |
| "grad_norm": 0.8496400117874146, | |
| "learning_rate": 9.999361329594254e-05, | |
| "loss": 0.0585, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.544, | |
| "grad_norm": 0.6503838896751404, | |
| "learning_rate": 9.998844945344405e-05, | |
| "loss": 0.0553, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 0.5574076175689697, | |
| "learning_rate": 9.99817670976436e-05, | |
| "loss": 0.0549, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.576, | |
| "grad_norm": 0.662759006023407, | |
| "learning_rate": 9.997356643153303e-05, | |
| "loss": 0.055, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.592, | |
| "grad_norm": 0.49278223514556885, | |
| "learning_rate": 9.996384770422629e-05, | |
| "loss": 0.0598, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.608, | |
| "grad_norm": 0.4331170916557312, | |
| "learning_rate": 9.995261121095194e-05, | |
| "loss": 0.0472, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.624, | |
| "grad_norm": 0.4225797653198242, | |
| "learning_rate": 9.993985729304408e-05, | |
| "loss": 0.0545, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 0.6946377754211426, | |
| "learning_rate": 9.992558633793212e-05, | |
| "loss": 0.0512, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.656, | |
| "grad_norm": 0.7852202653884888, | |
| "learning_rate": 9.990979877912891e-05, | |
| "loss": 0.0551, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.672, | |
| "grad_norm": 0.5462689399719238, | |
| "learning_rate": 9.989249509621759e-05, | |
| "loss": 0.05, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.688, | |
| "grad_norm": 0.5613320469856262, | |
| "learning_rate": 9.987367581483705e-05, | |
| "loss": 0.0489, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.704, | |
| "grad_norm": 0.4775191843509674, | |
| "learning_rate": 9.985334150666592e-05, | |
| "loss": 0.0508, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 0.5137596130371094, | |
| "learning_rate": 9.983149278940526e-05, | |
| "loss": 0.0435, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.736, | |
| "grad_norm": 0.4369506537914276, | |
| "learning_rate": 9.980813032675974e-05, | |
| "loss": 0.0509, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.752, | |
| "grad_norm": 0.2921866774559021, | |
| "learning_rate": 9.978325482841753e-05, | |
| "loss": 0.0474, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.768, | |
| "grad_norm": 0.45863571763038635, | |
| "learning_rate": 9.975686705002867e-05, | |
| "loss": 0.0477, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.784, | |
| "grad_norm": 0.4813050925731659, | |
| "learning_rate": 9.972896779318219e-05, | |
| "loss": 0.0496, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 0.4225307106971741, | |
| "learning_rate": 9.969955790538175e-05, | |
| "loss": 0.0473, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.816, | |
| "grad_norm": 0.4511106014251709, | |
| "learning_rate": 9.966863828001982e-05, | |
| "loss": 0.0528, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.832, | |
| "grad_norm": 0.5999813675880432, | |
| "learning_rate": 9.963620985635065e-05, | |
| "loss": 0.0471, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.848, | |
| "grad_norm": 0.5479815602302551, | |
| "learning_rate": 9.960227361946164e-05, | |
| "loss": 0.0545, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.864, | |
| "grad_norm": 0.6936839818954468, | |
| "learning_rate": 9.95668306002435e-05, | |
| "loss": 0.0477, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 0.7541530132293701, | |
| "learning_rate": 9.952988187535886e-05, | |
| "loss": 0.0456, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.896, | |
| "grad_norm": 0.8502527475357056, | |
| "learning_rate": 9.949142856720961e-05, | |
| "loss": 0.0426, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.912, | |
| "grad_norm": 0.48964205384254456, | |
| "learning_rate": 9.945147184390278e-05, | |
| "loss": 0.0415, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.928, | |
| "grad_norm": 0.5725044012069702, | |
| "learning_rate": 9.941001291921512e-05, | |
| "loss": 0.0431, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.944, | |
| "grad_norm": 0.47582870721817017, | |
| "learning_rate": 9.936705305255612e-05, | |
| "loss": 0.0448, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 0.6699385046958923, | |
| "learning_rate": 9.932259354892984e-05, | |
| "loss": 0.0463, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.976, | |
| "grad_norm": 0.8922427296638489, | |
| "learning_rate": 9.927663575889521e-05, | |
| "loss": 0.0497, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.992, | |
| "grad_norm": 0.5667390823364258, | |
| "learning_rate": 9.922918107852504e-05, | |
| "loss": 0.0417, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.008, | |
| "grad_norm": 0.37358856201171875, | |
| "learning_rate": 9.918023094936363e-05, | |
| "loss": 0.0337, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.024, | |
| "grad_norm": 0.6175826787948608, | |
| "learning_rate": 9.912978685838294e-05, | |
| "loss": 0.0373, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "grad_norm": 0.6547279357910156, | |
| "learning_rate": 9.90778503379374e-05, | |
| "loss": 0.0433, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.056, | |
| "grad_norm": 0.4020252525806427, | |
| "learning_rate": 9.902442296571743e-05, | |
| "loss": 0.0381, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.072, | |
| "grad_norm": 0.5817873477935791, | |
| "learning_rate": 9.896950636470147e-05, | |
| "loss": 0.05, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.088, | |
| "grad_norm": 0.8197876811027527, | |
| "learning_rate": 9.891310220310666e-05, | |
| "loss": 0.0443, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.104, | |
| "grad_norm": 0.9050861597061157, | |
| "learning_rate": 9.885521219433823e-05, | |
| "loss": 0.0457, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "grad_norm": 0.5616909265518188, | |
| "learning_rate": 9.879583809693738e-05, | |
| "loss": 0.0438, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.1360000000000001, | |
| "grad_norm": 0.5354202389717102, | |
| "learning_rate": 9.873498171452789e-05, | |
| "loss": 0.0474, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.152, | |
| "grad_norm": 0.4463129937648773, | |
| "learning_rate": 9.867264489576135e-05, | |
| "loss": 0.0385, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.168, | |
| "grad_norm": 0.3294244408607483, | |
| "learning_rate": 9.860882953426099e-05, | |
| "loss": 0.0411, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.184, | |
| "grad_norm": 0.5170273780822754, | |
| "learning_rate": 9.854353756856412e-05, | |
| "loss": 0.0346, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "grad_norm": 0.4675655961036682, | |
| "learning_rate": 9.847677098206332e-05, | |
| "loss": 0.04, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.216, | |
| "grad_norm": 0.4460495114326477, | |
| "learning_rate": 9.840853180294608e-05, | |
| "loss": 0.0355, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.232, | |
| "grad_norm": 0.5752704739570618, | |
| "learning_rate": 9.833882210413332e-05, | |
| "loss": 0.0365, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.248, | |
| "grad_norm": 0.4172804653644562, | |
| "learning_rate": 9.826764400321633e-05, | |
| "loss": 0.038, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.264, | |
| "grad_norm": 0.35003262758255005, | |
| "learning_rate": 9.819499966239243e-05, | |
| "loss": 0.0339, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 0.27016302943229675, | |
| "learning_rate": 9.812089128839938e-05, | |
| "loss": 0.041, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.296, | |
| "grad_norm": 0.4664139449596405, | |
| "learning_rate": 9.804532113244828e-05, | |
| "loss": 0.0393, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.312, | |
| "grad_norm": 0.6527088284492493, | |
| "learning_rate": 9.796829149015517e-05, | |
| "loss": 0.0365, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.328, | |
| "grad_norm": 0.5943476557731628, | |
| "learning_rate": 9.788980470147132e-05, | |
| "loss": 0.0399, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.3439999999999999, | |
| "grad_norm": 0.4523983299732208, | |
| "learning_rate": 9.780986315061218e-05, | |
| "loss": 0.0338, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.3599999999999999, | |
| "grad_norm": 0.44878169894218445, | |
| "learning_rate": 9.772846926598491e-05, | |
| "loss": 0.0417, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.376, | |
| "grad_norm": 0.47023284435272217, | |
| "learning_rate": 9.76456255201146e-05, | |
| "loss": 0.038, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.392, | |
| "grad_norm": 0.5424619317054749, | |
| "learning_rate": 9.756133442956923e-05, | |
| "loss": 0.0389, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.408, | |
| "grad_norm": 0.6425168514251709, | |
| "learning_rate": 9.747559855488313e-05, | |
| "loss": 0.0332, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.424, | |
| "grad_norm": 0.3850903809070587, | |
| "learning_rate": 9.73884205004793e-05, | |
| "loss": 0.0335, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "grad_norm": 0.47856953740119934, | |
| "learning_rate": 9.729980291459019e-05, | |
| "loss": 0.0386, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.456, | |
| "grad_norm": 0.5677933096885681, | |
| "learning_rate": 9.720974848917735e-05, | |
| "loss": 0.0327, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.472, | |
| "grad_norm": 0.47432729601860046, | |
| "learning_rate": 9.711825995984957e-05, | |
| "loss": 0.0399, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.488, | |
| "grad_norm": 0.3429190516471863, | |
| "learning_rate": 9.702534010577991e-05, | |
| "loss": 0.0358, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.504, | |
| "grad_norm": 0.48413529992103577, | |
| "learning_rate": 9.693099174962103e-05, | |
| "loss": 0.032, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "grad_norm": 0.4788775146007538, | |
| "learning_rate": 9.683521775741977e-05, | |
| "loss": 0.0422, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.536, | |
| "grad_norm": 0.4298361837863922, | |
| "learning_rate": 9.673802103852979e-05, | |
| "loss": 0.0308, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.552, | |
| "grad_norm": 0.6861462593078613, | |
| "learning_rate": 9.663940454552342e-05, | |
| "loss": 0.0328, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.568, | |
| "grad_norm": 0.41925859451293945, | |
| "learning_rate": 9.65393712741018e-05, | |
| "loss": 0.0316, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.584, | |
| "grad_norm": 0.3705498278141022, | |
| "learning_rate": 9.6437924263004e-05, | |
| "loss": 0.0393, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "grad_norm": 0.25880423188209534, | |
| "learning_rate": 9.63350665939146e-05, | |
| "loss": 0.0358, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.616, | |
| "grad_norm": 0.3926333785057068, | |
| "learning_rate": 9.623080139137023e-05, | |
| "loss": 0.0315, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.6320000000000001, | |
| "grad_norm": 0.23299971222877502, | |
| "learning_rate": 9.612513182266447e-05, | |
| "loss": 0.0318, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.6480000000000001, | |
| "grad_norm": 0.35742828249931335, | |
| "learning_rate": 9.601806109775179e-05, | |
| "loss": 0.0309, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.6640000000000001, | |
| "grad_norm": 0.2977737486362457, | |
| "learning_rate": 9.590959246914995e-05, | |
| "loss": 0.0356, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.6800000000000002, | |
| "grad_norm": 0.4727614223957062, | |
| "learning_rate": 9.579972923184122e-05, | |
| "loss": 0.0307, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.696, | |
| "grad_norm": 0.6114331483840942, | |
| "learning_rate": 9.568847472317232e-05, | |
| "loss": 0.0358, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.712, | |
| "grad_norm": 0.4858382046222687, | |
| "learning_rate": 9.557583232275303e-05, | |
| "loss": 0.0352, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.728, | |
| "grad_norm": 0.5206499099731445, | |
| "learning_rate": 9.546180545235344e-05, | |
| "loss": 0.0319, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.744, | |
| "grad_norm": 0.24364350736141205, | |
| "learning_rate": 9.534639757580013e-05, | |
| "loss": 0.0285, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.76, | |
| "grad_norm": 0.4177013337612152, | |
| "learning_rate": 9.522961219887092e-05, | |
| "loss": 0.0322, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.776, | |
| "grad_norm": 0.5254295468330383, | |
| "learning_rate": 9.511145286918828e-05, | |
| "loss": 0.0369, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.792, | |
| "grad_norm": 0.3894183933734894, | |
| "learning_rate": 9.499192317611167e-05, | |
| "loss": 0.0317, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.808, | |
| "grad_norm": 0.49535396695137024, | |
| "learning_rate": 9.487102675062851e-05, | |
| "loss": 0.0388, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.8239999999999998, | |
| "grad_norm": 0.404508113861084, | |
| "learning_rate": 9.474876726524374e-05, | |
| "loss": 0.0333, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.8399999999999999, | |
| "grad_norm": 0.42432475090026855, | |
| "learning_rate": 9.462514843386845e-05, | |
| "loss": 0.0367, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.8559999999999999, | |
| "grad_norm": 0.4332296848297119, | |
| "learning_rate": 9.450017401170689e-05, | |
| "loss": 0.0352, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.8719999999999999, | |
| "grad_norm": 0.3122684359550476, | |
| "learning_rate": 9.437384779514256e-05, | |
| "loss": 0.0349, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.888, | |
| "grad_norm": 0.4381789267063141, | |
| "learning_rate": 9.424617362162271e-05, | |
| "loss": 0.0319, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.904, | |
| "grad_norm": 0.4371034502983093, | |
| "learning_rate": 9.411715536954196e-05, | |
| "loss": 0.0307, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 0.4959498941898346, | |
| "learning_rate": 9.39867969581243e-05, | |
| "loss": 0.0342, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.936, | |
| "grad_norm": 0.361444890499115, | |
| "learning_rate": 9.385510234730415e-05, | |
| "loss": 0.0378, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.952, | |
| "grad_norm": 0.4075774848461151, | |
| "learning_rate": 9.372207553760603e-05, | |
| "loss": 0.0263, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.968, | |
| "grad_norm": 0.3635106086730957, | |
| "learning_rate": 9.358772057002312e-05, | |
| "loss": 0.0358, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.984, | |
| "grad_norm": 0.40886828303337097, | |
| "learning_rate": 9.345204152589428e-05, | |
| "loss": 0.0305, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.5389621257781982, | |
| "learning_rate": 9.331504252678037e-05, | |
| "loss": 0.0305, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.016, | |
| "grad_norm": 0.27906057238578796, | |
| "learning_rate": 9.317672773433876e-05, | |
| "loss": 0.0332, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.032, | |
| "grad_norm": 0.5719487071037292, | |
| "learning_rate": 9.30371013501972e-05, | |
| "loss": 0.0372, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.048, | |
| "grad_norm": 0.3138829171657562, | |
| "learning_rate": 9.289616761582587e-05, | |
| "loss": 0.0293, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.064, | |
| "grad_norm": 0.580001711845398, | |
| "learning_rate": 9.275393081240882e-05, | |
| "loss": 0.0357, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.08, | |
| "grad_norm": 0.3736608028411865, | |
| "learning_rate": 9.261039526071374e-05, | |
| "loss": 0.0315, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.096, | |
| "grad_norm": 0.5086607336997986, | |
| "learning_rate": 9.246556532096078e-05, | |
| "loss": 0.0304, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.112, | |
| "grad_norm": 0.40150344371795654, | |
| "learning_rate": 9.231944539269009e-05, | |
| "loss": 0.0333, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.128, | |
| "grad_norm": 0.3828193247318268, | |
| "learning_rate": 9.217203991462815e-05, | |
| "loss": 0.0288, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.144, | |
| "grad_norm": 0.4235898554325104, | |
| "learning_rate": 9.202335336455296e-05, | |
| "loss": 0.0308, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 0.4063931405544281, | |
| "learning_rate": 9.187339025915802e-05, | |
| "loss": 0.0303, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.176, | |
| "grad_norm": 0.3451571464538574, | |
| "learning_rate": 9.17221551539151e-05, | |
| "loss": 0.0295, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.192, | |
| "grad_norm": 0.40614256262779236, | |
| "learning_rate": 9.156965264293586e-05, | |
| "loss": 0.0236, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.208, | |
| "grad_norm": 0.3754469156265259, | |
| "learning_rate": 9.141588735883232e-05, | |
| "loss": 0.032, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.224, | |
| "grad_norm": 0.3802752196788788, | |
| "learning_rate": 9.126086397257612e-05, | |
| "loss": 0.0275, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "grad_norm": 0.469360888004303, | |
| "learning_rate": 9.110458719335659e-05, | |
| "loss": 0.0298, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.2560000000000002, | |
| "grad_norm": 0.3433290421962738, | |
| "learning_rate": 9.094706176843777e-05, | |
| "loss": 0.0278, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.2720000000000002, | |
| "grad_norm": 0.3529592454433441, | |
| "learning_rate": 9.078829248301417e-05, | |
| "loss": 0.0312, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.288, | |
| "grad_norm": 0.5080022215843201, | |
| "learning_rate": 9.062828416006539e-05, | |
| "loss": 0.0326, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.304, | |
| "grad_norm": 0.6208224892616272, | |
| "learning_rate": 9.046704166020961e-05, | |
| "loss": 0.0337, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.6062721610069275, | |
| "learning_rate": 9.030456988155596e-05, | |
| "loss": 0.036, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.336, | |
| "grad_norm": 0.44274309277534485, | |
| "learning_rate": 9.014087375955573e-05, | |
| "loss": 0.0376, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.352, | |
| "grad_norm": 0.5820019245147705, | |
| "learning_rate": 8.997595826685243e-05, | |
| "loss": 0.0339, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.368, | |
| "grad_norm": 0.43849194049835205, | |
| "learning_rate": 8.980982841313074e-05, | |
| "loss": 0.0362, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.384, | |
| "grad_norm": 0.4618302881717682, | |
| "learning_rate": 8.964248924496435e-05, | |
| "loss": 0.0322, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "grad_norm": 0.26285621523857117, | |
| "learning_rate": 8.947394584566258e-05, | |
| "loss": 0.0327, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.416, | |
| "grad_norm": 0.4021395146846771, | |
| "learning_rate": 8.930420333511606e-05, | |
| "loss": 0.0325, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.432, | |
| "grad_norm": 0.3816533386707306, | |
| "learning_rate": 8.913326686964117e-05, | |
| "loss": 0.0318, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.448, | |
| "grad_norm": 0.393133282661438, | |
| "learning_rate": 8.89611416418234e-05, | |
| "loss": 0.0288, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.464, | |
| "grad_norm": 0.45759648084640503, | |
| "learning_rate": 8.878783288035957e-05, | |
| "loss": 0.033, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "grad_norm": 0.4825354516506195, | |
| "learning_rate": 8.86133458498991e-05, | |
| "loss": 0.031, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.496, | |
| "grad_norm": 0.300304114818573, | |
| "learning_rate": 8.843768585088393e-05, | |
| "loss": 0.0286, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.512, | |
| "grad_norm": 0.3737391233444214, | |
| "learning_rate": 8.82608582193877e-05, | |
| "loss": 0.0275, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.528, | |
| "grad_norm": 0.3656758666038513, | |
| "learning_rate": 8.80828683269535e-05, | |
| "loss": 0.029, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.544, | |
| "grad_norm": 0.37524253129959106, | |
| "learning_rate": 8.790372158043074e-05, | |
| "loss": 0.0291, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 0.4036988317966461, | |
| "learning_rate": 8.772342342181095e-05, | |
| "loss": 0.0279, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.576, | |
| "grad_norm": 0.26159197092056274, | |
| "learning_rate": 8.75419793280624e-05, | |
| "loss": 0.0268, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.592, | |
| "grad_norm": 0.3767406940460205, | |
| "learning_rate": 8.735939481096378e-05, | |
| "loss": 0.0258, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.608, | |
| "grad_norm": 0.3658304512500763, | |
| "learning_rate": 8.717567541693673e-05, | |
| "loss": 0.0235, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.624, | |
| "grad_norm": 0.4060811400413513, | |
| "learning_rate": 8.699082672687734e-05, | |
| "loss": 0.0272, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.47172242403030396, | |
| "learning_rate": 8.680485435598673e-05, | |
| "loss": 0.0337, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.656, | |
| "grad_norm": 0.5268769264221191, | |
| "learning_rate": 8.661776395360029e-05, | |
| "loss": 0.0278, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.672, | |
| "grad_norm": 0.25358352065086365, | |
| "learning_rate": 8.642956120301626e-05, | |
| "loss": 0.0243, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.6879999999999997, | |
| "grad_norm": 0.30180642008781433, | |
| "learning_rate": 8.624025182132292e-05, | |
| "loss": 0.0286, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.7039999999999997, | |
| "grad_norm": 0.3005179166793823, | |
| "learning_rate": 8.604984155922506e-05, | |
| "loss": 0.0309, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.7199999999999998, | |
| "grad_norm": 0.348069965839386, | |
| "learning_rate": 8.585833620086918e-05, | |
| "loss": 0.0243, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.7359999999999998, | |
| "grad_norm": 0.26462775468826294, | |
| "learning_rate": 8.566574156366784e-05, | |
| "loss": 0.0281, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.752, | |
| "grad_norm": 0.18482527136802673, | |
| "learning_rate": 8.547206349812298e-05, | |
| "loss": 0.0279, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.768, | |
| "grad_norm": 0.30108001828193665, | |
| "learning_rate": 8.527730788764805e-05, | |
| "loss": 0.0321, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.784, | |
| "grad_norm": 0.37178272008895874, | |
| "learning_rate": 8.508148064838948e-05, | |
| "loss": 0.0299, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 0.21368908882141113, | |
| "learning_rate": 8.488458772904684e-05, | |
| "loss": 0.0299, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.816, | |
| "grad_norm": 0.3662227690219879, | |
| "learning_rate": 8.468663511069217e-05, | |
| "loss": 0.0298, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.832, | |
| "grad_norm": 0.4634190499782562, | |
| "learning_rate": 8.448762880658825e-05, | |
| "loss": 0.0291, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.848, | |
| "grad_norm": 0.4424201548099518, | |
| "learning_rate": 8.428757486200603e-05, | |
| "loss": 0.0302, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.864, | |
| "grad_norm": 0.44733360409736633, | |
| "learning_rate": 8.40864793540409e-05, | |
| "loss": 0.0322, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "grad_norm": 0.39951372146606445, | |
| "learning_rate": 8.388434839142813e-05, | |
| "loss": 0.0299, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.896, | |
| "grad_norm": 0.4851149618625641, | |
| "learning_rate": 8.368118811435726e-05, | |
| "loss": 0.0277, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.912, | |
| "grad_norm": 0.3297540843486786, | |
| "learning_rate": 8.347700469428564e-05, | |
| "loss": 0.0282, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.928, | |
| "grad_norm": 0.3784344792366028, | |
| "learning_rate": 8.327180433375091e-05, | |
| "loss": 0.0298, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.944, | |
| "grad_norm": 0.4221504032611847, | |
| "learning_rate": 8.306559326618259e-05, | |
| "loss": 0.0272, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.3750646710395813, | |
| "learning_rate": 8.285837775571276e-05, | |
| "loss": 0.0257, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 2.976, | |
| "grad_norm": 0.362826406955719, | |
| "learning_rate": 8.265016409698573e-05, | |
| "loss": 0.0324, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 2.992, | |
| "grad_norm": 0.38823533058166504, | |
| "learning_rate": 8.244095861496686e-05, | |
| "loss": 0.0302, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.008, | |
| "grad_norm": 0.26094621419906616, | |
| "learning_rate": 8.223076766475035e-05, | |
| "loss": 0.0275, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.024, | |
| "grad_norm": 0.311184823513031, | |
| "learning_rate": 8.201959763136633e-05, | |
| "loss": 0.0269, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "grad_norm": 0.6415846943855286, | |
| "learning_rate": 8.180745492958674e-05, | |
| "loss": 0.0317, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.056, | |
| "grad_norm": 0.5095533728599548, | |
| "learning_rate": 8.159434600373061e-05, | |
| "loss": 0.0298, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.072, | |
| "grad_norm": 0.44364774227142334, | |
| "learning_rate": 8.138027732746818e-05, | |
| "loss": 0.0295, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.088, | |
| "grad_norm": 0.4945635497570038, | |
| "learning_rate": 8.116525540362434e-05, | |
| "loss": 0.0288, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.104, | |
| "grad_norm": 0.43596047163009644, | |
| "learning_rate": 8.094928676398101e-05, | |
| "loss": 0.027, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.12, | |
| "grad_norm": 0.37635689973831177, | |
| "learning_rate": 8.073237796907882e-05, | |
| "loss": 0.0322, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.136, | |
| "grad_norm": 0.4278242588043213, | |
| "learning_rate": 8.051453560801772e-05, | |
| "loss": 0.0217, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.152, | |
| "grad_norm": 0.28399866819381714, | |
| "learning_rate": 8.029576629825687e-05, | |
| "loss": 0.0277, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.168, | |
| "grad_norm": 0.5590260624885559, | |
| "learning_rate": 8.007607668541362e-05, | |
| "loss": 0.0309, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.184, | |
| "grad_norm": 0.3806508481502533, | |
| "learning_rate": 7.985547344306161e-05, | |
| "loss": 0.0262, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 0.33946818113327026, | |
| "learning_rate": 7.963396327252812e-05, | |
| "loss": 0.0258, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.216, | |
| "grad_norm": 0.3746076226234436, | |
| "learning_rate": 7.941155290269038e-05, | |
| "loss": 0.0246, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.232, | |
| "grad_norm": 0.33296412229537964, | |
| "learning_rate": 7.918824908977123e-05, | |
| "loss": 0.027, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.248, | |
| "grad_norm": 0.46303591132164, | |
| "learning_rate": 7.896405861713394e-05, | |
| "loss": 0.0278, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.2640000000000002, | |
| "grad_norm": 0.40460407733917236, | |
| "learning_rate": 7.873898829507606e-05, | |
| "loss": 0.0308, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.2800000000000002, | |
| "grad_norm": 0.26011455059051514, | |
| "learning_rate": 7.851304496062254e-05, | |
| "loss": 0.0283, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.296, | |
| "grad_norm": 0.5932962894439697, | |
| "learning_rate": 7.828623547731818e-05, | |
| "loss": 0.0252, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.312, | |
| "grad_norm": 0.47543007135391235, | |
| "learning_rate": 7.80585667350189e-05, | |
| "loss": 0.023, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.328, | |
| "grad_norm": 0.7409189343452454, | |
| "learning_rate": 7.783004564968263e-05, | |
| "loss": 0.0266, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.344, | |
| "grad_norm": 0.23538251221179962, | |
| "learning_rate": 7.760067916315921e-05, | |
| "loss": 0.0229, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.36, | |
| "grad_norm": 0.25726088881492615, | |
| "learning_rate": 7.737047424297941e-05, | |
| "loss": 0.0259, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.376, | |
| "grad_norm": 0.2773289382457733, | |
| "learning_rate": 7.713943788214337e-05, | |
| "loss": 0.0236, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.392, | |
| "grad_norm": 0.3606955409049988, | |
| "learning_rate": 7.690757709890812e-05, | |
| "loss": 0.0232, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.408, | |
| "grad_norm": 0.42424535751342773, | |
| "learning_rate": 7.66748989365744e-05, | |
| "loss": 0.0253, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.424, | |
| "grad_norm": 0.3485054671764374, | |
| "learning_rate": 7.644141046327271e-05, | |
| "loss": 0.0247, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 0.2407926321029663, | |
| "learning_rate": 7.620711877174866e-05, | |
| "loss": 0.0311, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.456, | |
| "grad_norm": 0.44037678837776184, | |
| "learning_rate": 7.597203097914732e-05, | |
| "loss": 0.0218, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.472, | |
| "grad_norm": 0.19860859215259552, | |
| "learning_rate": 7.573615422679726e-05, | |
| "loss": 0.0225, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.488, | |
| "grad_norm": 0.28846150636672974, | |
| "learning_rate": 7.549949567999345e-05, | |
| "loss": 0.0213, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.504, | |
| "grad_norm": 0.4492376446723938, | |
| "learning_rate": 7.526206252777968e-05, | |
| "loss": 0.0227, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "grad_norm": 0.2822663486003876, | |
| "learning_rate": 7.50238619827301e-05, | |
| "loss": 0.025, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.536, | |
| "grad_norm": 0.15630508959293365, | |
| "learning_rate": 7.478490128073022e-05, | |
| "loss": 0.0223, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.552, | |
| "grad_norm": 0.4379076659679413, | |
| "learning_rate": 7.454518768075704e-05, | |
| "loss": 0.0243, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.568, | |
| "grad_norm": 0.21169213950634003, | |
| "learning_rate": 7.430472846465856e-05, | |
| "loss": 0.0236, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.584, | |
| "grad_norm": 0.291696161031723, | |
| "learning_rate": 7.406353093693253e-05, | |
| "loss": 0.0217, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.3981015682220459, | |
| "learning_rate": 7.382160242450469e-05, | |
| "loss": 0.0232, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.616, | |
| "grad_norm": 0.447848379611969, | |
| "learning_rate": 7.357895027650598e-05, | |
| "loss": 0.026, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.632, | |
| "grad_norm": 0.34005206823349, | |
| "learning_rate": 7.333558186404958e-05, | |
| "loss": 0.0237, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.648, | |
| "grad_norm": 0.2622120976448059, | |
| "learning_rate": 7.309150458000668e-05, | |
| "loss": 0.0266, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.664, | |
| "grad_norm": 0.18652892112731934, | |
| "learning_rate": 7.284672583878219e-05, | |
| "loss": 0.0262, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "grad_norm": 0.25321805477142334, | |
| "learning_rate": 7.260125307608929e-05, | |
| "loss": 0.0175, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.6959999999999997, | |
| "grad_norm": 0.23243199288845062, | |
| "learning_rate": 7.235509374872373e-05, | |
| "loss": 0.026, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.7119999999999997, | |
| "grad_norm": 0.30440691113471985, | |
| "learning_rate": 7.210825533433719e-05, | |
| "loss": 0.0214, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.7279999999999998, | |
| "grad_norm": 0.27811628580093384, | |
| "learning_rate": 7.186074533121013e-05, | |
| "loss": 0.0258, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.7439999999999998, | |
| "grad_norm": 0.31081423163414, | |
| "learning_rate": 7.161257125802413e-05, | |
| "loss": 0.0249, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "grad_norm": 0.3148125112056732, | |
| "learning_rate": 7.136374065363334e-05, | |
| "loss": 0.0279, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.776, | |
| "grad_norm": 0.4866085946559906, | |
| "learning_rate": 7.11142610768356e-05, | |
| "loss": 0.0294, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.792, | |
| "grad_norm": 0.4648098349571228, | |
| "learning_rate": 7.086414010614276e-05, | |
| "loss": 0.0236, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.808, | |
| "grad_norm": 0.3126159608364105, | |
| "learning_rate": 7.061338533955043e-05, | |
| "loss": 0.0264, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.824, | |
| "grad_norm": 0.2963082194328308, | |
| "learning_rate": 7.036200439430725e-05, | |
| "loss": 0.0283, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 0.28914204239845276, | |
| "learning_rate": 7.01100049066835e-05, | |
| "loss": 0.0277, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.856, | |
| "grad_norm": 0.2665204405784607, | |
| "learning_rate": 6.985739453173903e-05, | |
| "loss": 0.022, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.872, | |
| "grad_norm": 0.24629130959510803, | |
| "learning_rate": 6.960418094309085e-05, | |
| "loss": 0.0245, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.888, | |
| "grad_norm": 0.3073159456253052, | |
| "learning_rate": 6.93503718326799e-05, | |
| "loss": 0.0199, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.904, | |
| "grad_norm": 0.30077695846557617, | |
| "learning_rate": 6.909597491053751e-05, | |
| "loss": 0.0272, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 0.2868819832801819, | |
| "learning_rate": 6.884099790455113e-05, | |
| "loss": 0.0233, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.936, | |
| "grad_norm": 0.3010862171649933, | |
| "learning_rate": 6.858544856022952e-05, | |
| "loss": 0.0265, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 3.952, | |
| "grad_norm": 0.4242578148841858, | |
| "learning_rate": 6.83293346404676e-05, | |
| "loss": 0.0238, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 3.968, | |
| "grad_norm": 0.2540842294692993, | |
| "learning_rate": 6.80726639253105e-05, | |
| "loss": 0.0198, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 3.984, | |
| "grad_norm": 0.4174778461456299, | |
| "learning_rate": 6.781544421171732e-05, | |
| "loss": 0.0251, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.40387827157974243, | |
| "learning_rate": 6.755768331332424e-05, | |
| "loss": 0.0225, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.016, | |
| "grad_norm": 0.2764705717563629, | |
| "learning_rate": 6.729938906020713e-05, | |
| "loss": 0.0235, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 4.032, | |
| "grad_norm": 0.27031779289245605, | |
| "learning_rate": 6.704056929864376e-05, | |
| "loss": 0.0234, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 4.048, | |
| "grad_norm": 0.28792449831962585, | |
| "learning_rate": 6.67812318908754e-05, | |
| "loss": 0.0191, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 4.064, | |
| "grad_norm": 0.4237053692340851, | |
| "learning_rate": 6.6521384714868e-05, | |
| "loss": 0.0225, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 0.35232630372047424, | |
| "learning_rate": 6.626103566407295e-05, | |
| "loss": 0.0257, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 4.096, | |
| "grad_norm": 0.4114013910293579, | |
| "learning_rate": 6.600019264718713e-05, | |
| "loss": 0.0254, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 4.112, | |
| "grad_norm": 0.28505682945251465, | |
| "learning_rate": 6.573886358791285e-05, | |
| "loss": 0.021, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 4.128, | |
| "grad_norm": 0.3418331742286682, | |
| "learning_rate": 6.547705642471703e-05, | |
| "loss": 0.0244, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 4.144, | |
| "grad_norm": 0.34200969338417053, | |
| "learning_rate": 6.521477911059008e-05, | |
| "loss": 0.0262, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "grad_norm": 0.29658079147338867, | |
| "learning_rate": 6.495203961280434e-05, | |
| "loss": 0.0213, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.176, | |
| "grad_norm": 0.4683709442615509, | |
| "learning_rate": 6.468884591267204e-05, | |
| "loss": 0.0212, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 4.192, | |
| "grad_norm": 0.35053446888923645, | |
| "learning_rate": 6.44252060053028e-05, | |
| "loss": 0.0195, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 4.208, | |
| "grad_norm": 0.33383941650390625, | |
| "learning_rate": 6.416112789936086e-05, | |
| "loss": 0.0216, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 4.224, | |
| "grad_norm": 0.445821613073349, | |
| "learning_rate": 6.389661961682173e-05, | |
| "loss": 0.0192, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.28353312611579895, | |
| "learning_rate": 6.363168919272846e-05, | |
| "loss": 0.0194, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 4.256, | |
| "grad_norm": 0.2120702862739563, | |
| "learning_rate": 6.336634467494768e-05, | |
| "loss": 0.0288, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 4.272, | |
| "grad_norm": 0.35858669877052307, | |
| "learning_rate": 6.310059412392505e-05, | |
| "loss": 0.0272, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 4.288, | |
| "grad_norm": 0.3167365491390228, | |
| "learning_rate": 6.283444561244042e-05, | |
| "loss": 0.0228, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 4.304, | |
| "grad_norm": 0.3228168487548828, | |
| "learning_rate": 6.256790722536251e-05, | |
| "loss": 0.0186, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 4.32, | |
| "grad_norm": 0.2647988498210907, | |
| "learning_rate": 6.230098705940354e-05, | |
| "loss": 0.0234, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 4.336, | |
| "grad_norm": 0.35195767879486084, | |
| "learning_rate": 6.203369322287306e-05, | |
| "loss": 0.0203, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 4.352, | |
| "grad_norm": 0.45781210064888, | |
| "learning_rate": 6.17660338354317e-05, | |
| "loss": 0.0237, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 4.368, | |
| "grad_norm": 0.2607864439487457, | |
| "learning_rate": 6.149801702784456e-05, | |
| "loss": 0.0242, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 4.384, | |
| "grad_norm": 0.3549015522003174, | |
| "learning_rate": 6.122965094173424e-05, | |
| "loss": 0.0217, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "grad_norm": 0.3188204765319824, | |
| "learning_rate": 6.0960943729333374e-05, | |
| "loss": 0.0177, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 4.416, | |
| "grad_norm": 0.3218193054199219, | |
| "learning_rate": 6.069190355323717e-05, | |
| "loss": 0.0204, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 4.432, | |
| "grad_norm": 0.19881951808929443, | |
| "learning_rate": 6.042253858615532e-05, | |
| "loss": 0.0179, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 4.448, | |
| "grad_norm": 0.27880778908729553, | |
| "learning_rate": 6.015285701066382e-05, | |
| "loss": 0.0211, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.464, | |
| "grad_norm": 0.3260197341442108, | |
| "learning_rate": 5.988286701895631e-05, | |
| "loss": 0.0175, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 0.3047335147857666, | |
| "learning_rate": 5.961257681259535e-05, | |
| "loss": 0.019, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.496, | |
| "grad_norm": 0.17397429049015045, | |
| "learning_rate": 5.934199460226317e-05, | |
| "loss": 0.0222, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.5120000000000005, | |
| "grad_norm": 0.2983797490596771, | |
| "learning_rate": 5.9071128607512285e-05, | |
| "loss": 0.0159, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.5280000000000005, | |
| "grad_norm": 0.1442362517118454, | |
| "learning_rate": 5.8799987056515804e-05, | |
| "loss": 0.0205, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.5440000000000005, | |
| "grad_norm": 0.3567107319831848, | |
| "learning_rate": 5.8528578185817514e-05, | |
| "loss": 0.0204, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.5600000000000005, | |
| "grad_norm": 0.19634947180747986, | |
| "learning_rate": 5.825691024008162e-05, | |
| "loss": 0.0196, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.576, | |
| "grad_norm": 0.26159587502479553, | |
| "learning_rate": 5.798499147184233e-05, | |
| "loss": 0.0201, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 4.592, | |
| "grad_norm": 0.4290419816970825, | |
| "learning_rate": 5.771283014125317e-05, | |
| "loss": 0.0202, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 4.608, | |
| "grad_norm": 0.3767322897911072, | |
| "learning_rate": 5.7440434515836064e-05, | |
| "loss": 0.0187, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 4.624, | |
| "grad_norm": 0.5074912905693054, | |
| "learning_rate": 5.7167812870230094e-05, | |
| "loss": 0.0205, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "grad_norm": 0.16044597327709198, | |
| "learning_rate": 5.689497348594035e-05, | |
| "loss": 0.0226, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.656, | |
| "grad_norm": 0.3979272246360779, | |
| "learning_rate": 5.662192465108613e-05, | |
| "loss": 0.0286, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 4.672, | |
| "grad_norm": 0.2906138002872467, | |
| "learning_rate": 5.634867466014932e-05, | |
| "loss": 0.021, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 4.688, | |
| "grad_norm": 0.36478230357170105, | |
| "learning_rate": 5.607523181372234e-05, | |
| "loss": 0.0197, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 4.704, | |
| "grad_norm": 0.39902612566947937, | |
| "learning_rate": 5.5801604418256117e-05, | |
| "loss": 0.0216, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 0.3256303668022156, | |
| "learning_rate": 5.552780078580756e-05, | |
| "loss": 0.0186, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.736, | |
| "grad_norm": 0.350939005613327, | |
| "learning_rate": 5.525382923378728e-05, | |
| "loss": 0.0225, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 4.752, | |
| "grad_norm": 0.3618016242980957, | |
| "learning_rate": 5.49796980847068e-05, | |
| "loss": 0.0193, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 4.768, | |
| "grad_norm": 0.26276373863220215, | |
| "learning_rate": 5.470541566592573e-05, | |
| "loss": 0.0201, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 4.784, | |
| "grad_norm": 0.1853194385766983, | |
| "learning_rate": 5.443099030939887e-05, | |
| "loss": 0.0175, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "grad_norm": 0.24054738879203796, | |
| "learning_rate": 5.415643035142309e-05, | |
| "loss": 0.019, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.816, | |
| "grad_norm": 0.31713417172431946, | |
| "learning_rate": 5.3881744132384104e-05, | |
| "loss": 0.0209, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 4.832, | |
| "grad_norm": 0.3177143931388855, | |
| "learning_rate": 5.360693999650303e-05, | |
| "loss": 0.0232, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 4.848, | |
| "grad_norm": 0.32203736901283264, | |
| "learning_rate": 5.3332026291583016e-05, | |
| "loss": 0.0207, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.864, | |
| "grad_norm": 0.21070876717567444, | |
| "learning_rate": 5.305701136875566e-05, | |
| "loss": 0.0163, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 0.3464716672897339, | |
| "learning_rate": 5.278190358222721e-05, | |
| "loss": 0.0174, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.896, | |
| "grad_norm": 0.27028194069862366, | |
| "learning_rate": 5.25067112890249e-05, | |
| "loss": 0.0224, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.912, | |
| "grad_norm": 0.36943814158439636, | |
| "learning_rate": 5.2231442848743064e-05, | |
| "loss": 0.0236, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.928, | |
| "grad_norm": 0.2328462153673172, | |
| "learning_rate": 5.1956106623289145e-05, | |
| "loss": 0.0207, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 4.944, | |
| "grad_norm": 0.19769224524497986, | |
| "learning_rate": 5.168071097662972e-05, | |
| "loss": 0.0184, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "grad_norm": 0.2721625864505768, | |
| "learning_rate": 5.1405264274536445e-05, | |
| "loss": 0.0222, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 4.976, | |
| "grad_norm": 0.2759113907814026, | |
| "learning_rate": 5.112977488433188e-05, | |
| "loss": 0.024, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 4.992, | |
| "grad_norm": 0.3259311616420746, | |
| "learning_rate": 5.085425117463533e-05, | |
| "loss": 0.0223, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 5.008, | |
| "grad_norm": 0.30562376976013184, | |
| "learning_rate": 5.057870151510864e-05, | |
| "loss": 0.017, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 5.024, | |
| "grad_norm": 0.2627156674861908, | |
| "learning_rate": 5.030313427620197e-05, | |
| "loss": 0.0268, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "grad_norm": 0.245286762714386, | |
| "learning_rate": 5.0027557828899426e-05, | |
| "loss": 0.0189, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 5.056, | |
| "grad_norm": 0.22621245682239532, | |
| "learning_rate": 4.975198054446492e-05, | |
| "loss": 0.0164, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 5.072, | |
| "grad_norm": 0.3449943959712982, | |
| "learning_rate": 4.947641079418773e-05, | |
| "loss": 0.0211, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 5.088, | |
| "grad_norm": 0.1819818764925003, | |
| "learning_rate": 4.920085694912828e-05, | |
| "loss": 0.019, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 5.104, | |
| "grad_norm": 0.24473543465137482, | |
| "learning_rate": 4.892532737986387e-05, | |
| "loss": 0.0161, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "grad_norm": 0.2644115388393402, | |
| "learning_rate": 4.864983045623434e-05, | |
| "loss": 0.0219, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 5.136, | |
| "grad_norm": 0.28054749965667725, | |
| "learning_rate": 4.837437454708784e-05, | |
| "loss": 0.0193, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 5.152, | |
| "grad_norm": 0.3118099272251129, | |
| "learning_rate": 4.809896802002662e-05, | |
| "loss": 0.0187, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 5.168, | |
| "grad_norm": 0.37181559205055237, | |
| "learning_rate": 4.7823619241152854e-05, | |
| "loss": 0.021, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 5.184, | |
| "grad_norm": 0.3354870676994324, | |
| "learning_rate": 4.754833657481445e-05, | |
| "loss": 0.0155, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "grad_norm": 0.31353050470352173, | |
| "learning_rate": 4.7273128383351015e-05, | |
| "loss": 0.0166, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 5.216, | |
| "grad_norm": 0.24215592443943024, | |
| "learning_rate": 4.699800302683981e-05, | |
| "loss": 0.0266, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 5.232, | |
| "grad_norm": 0.22024405002593994, | |
| "learning_rate": 4.6722968862841806e-05, | |
| "loss": 0.0147, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 5.248, | |
| "grad_norm": 0.16653631627559662, | |
| "learning_rate": 4.6448034246147754e-05, | |
| "loss": 0.019, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 5.264, | |
| "grad_norm": 0.25161346793174744, | |
| "learning_rate": 4.6173207528524476e-05, | |
| "loss": 0.0157, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "grad_norm": 0.19032499194145203, | |
| "learning_rate": 4.58984970584611e-05, | |
| "loss": 0.0147, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 5.296, | |
| "grad_norm": 0.28522875905036926, | |
| "learning_rate": 4.562391118091544e-05, | |
| "loss": 0.0169, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 5.312, | |
| "grad_norm": 0.16369828581809998, | |
| "learning_rate": 4.534945823706056e-05, | |
| "loss": 0.0183, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 5.328, | |
| "grad_norm": 0.33436375856399536, | |
| "learning_rate": 4.507514656403137e-05, | |
| "loss": 0.0182, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 5.344, | |
| "grad_norm": 0.1621371954679489, | |
| "learning_rate": 4.480098449467132e-05, | |
| "loss": 0.0196, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "grad_norm": 0.3182526230812073, | |
| "learning_rate": 4.452698035727929e-05, | |
| "loss": 0.022, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 5.376, | |
| "grad_norm": 0.257004052400589, | |
| "learning_rate": 4.425314247535668e-05, | |
| "loss": 0.0179, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 5.392, | |
| "grad_norm": 0.31039920449256897, | |
| "learning_rate": 4.3979479167354477e-05, | |
| "loss": 0.0239, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 5.408, | |
| "grad_norm": 0.29256734251976013, | |
| "learning_rate": 4.370599874642055e-05, | |
| "loss": 0.0184, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 5.424, | |
| "grad_norm": 0.23054800927639008, | |
| "learning_rate": 4.3432709520147205e-05, | |
| "loss": 0.0215, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 5.44, | |
| "grad_norm": 0.21826224029064178, | |
| "learning_rate": 4.315961979031875e-05, | |
| "loss": 0.0176, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 5.456, | |
| "grad_norm": 0.2913340926170349, | |
| "learning_rate": 4.2886737852659325e-05, | |
| "loss": 0.0205, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 5.4719999999999995, | |
| "grad_norm": 0.301816463470459, | |
| "learning_rate": 4.261407199658093e-05, | |
| "loss": 0.018, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 5.4879999999999995, | |
| "grad_norm": 0.40480971336364746, | |
| "learning_rate": 4.234163050493158e-05, | |
| "loss": 0.0173, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 5.504, | |
| "grad_norm": 0.24364496767520905, | |
| "learning_rate": 4.2069421653743706e-05, | |
| "loss": 0.016, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "grad_norm": 0.273919016122818, | |
| "learning_rate": 4.179745371198276e-05, | |
| "loss": 0.0204, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 5.536, | |
| "grad_norm": 0.32271808385849, | |
| "learning_rate": 4.1525734941296026e-05, | |
| "loss": 0.0209, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 5.552, | |
| "grad_norm": 0.19895771145820618, | |
| "learning_rate": 4.125427359576162e-05, | |
| "loss": 0.0178, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 5.568, | |
| "grad_norm": 0.2813350558280945, | |
| "learning_rate": 4.0983077921637815e-05, | |
| "loss": 0.0147, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 5.584, | |
| "grad_norm": 0.26414069533348083, | |
| "learning_rate": 4.07121561571125e-05, | |
| "loss": 0.0194, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 5.6, | |
| "grad_norm": 0.2959575951099396, | |
| "learning_rate": 4.044151653205292e-05, | |
| "loss": 0.0189, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.616, | |
| "grad_norm": 0.3964207172393799, | |
| "learning_rate": 4.0171167267755696e-05, | |
| "loss": 0.0182, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 5.632, | |
| "grad_norm": 0.2166132777929306, | |
| "learning_rate": 3.9901116576697083e-05, | |
| "loss": 0.0169, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 5.648, | |
| "grad_norm": 0.36999547481536865, | |
| "learning_rate": 3.963137266228349e-05, | |
| "loss": 0.0159, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 5.664, | |
| "grad_norm": 0.29925259947776794, | |
| "learning_rate": 3.93619437186023e-05, | |
| "loss": 0.0181, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 5.68, | |
| "grad_norm": 0.2723861038684845, | |
| "learning_rate": 3.9092837930172884e-05, | |
| "loss": 0.0211, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.696, | |
| "grad_norm": 0.29387396574020386, | |
| "learning_rate": 3.8824063471698105e-05, | |
| "loss": 0.0199, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 5.712, | |
| "grad_norm": 0.362053245306015, | |
| "learning_rate": 3.855562850781589e-05, | |
| "loss": 0.0211, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 5.728, | |
| "grad_norm": 0.19860872626304626, | |
| "learning_rate": 3.828754119285123e-05, | |
| "loss": 0.0167, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 5.744, | |
| "grad_norm": 0.12621618807315826, | |
| "learning_rate": 3.801980967056851e-05, | |
| "loss": 0.0186, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 5.76, | |
| "grad_norm": 0.2625875473022461, | |
| "learning_rate": 3.77524420739241e-05, | |
| "loss": 0.0182, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.776, | |
| "grad_norm": 0.19731885194778442, | |
| "learning_rate": 3.748544652481927e-05, | |
| "loss": 0.0151, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 5.792, | |
| "grad_norm": 0.2267191857099533, | |
| "learning_rate": 3.721883113385353e-05, | |
| "loss": 0.0167, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 5.808, | |
| "grad_norm": 0.32113417983055115, | |
| "learning_rate": 3.695260400007819e-05, | |
| "loss": 0.0171, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 5.824, | |
| "grad_norm": 0.25330817699432373, | |
| "learning_rate": 3.6686773210750385e-05, | |
| "loss": 0.0196, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 5.84, | |
| "grad_norm": 0.3203773498535156, | |
| "learning_rate": 3.642134684108737e-05, | |
| "loss": 0.0165, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.856, | |
| "grad_norm": 0.31569764018058777, | |
| "learning_rate": 3.615633295402123e-05, | |
| "loss": 0.0137, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 5.872, | |
| "grad_norm": 0.22229187190532684, | |
| "learning_rate": 3.5891739599953945e-05, | |
| "loss": 0.0169, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 5.888, | |
| "grad_norm": 0.21588951349258423, | |
| "learning_rate": 3.5627574816512846e-05, | |
| "loss": 0.0167, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 5.904, | |
| "grad_norm": 0.2931743264198303, | |
| "learning_rate": 3.536384662830648e-05, | |
| "loss": 0.0183, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "grad_norm": 0.19359277188777924, | |
| "learning_rate": 3.5100563046680764e-05, | |
| "loss": 0.0179, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 5.936, | |
| "grad_norm": 0.22053644061088562, | |
| "learning_rate": 3.483773206947572e-05, | |
| "loss": 0.016, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 5.952, | |
| "grad_norm": 0.14229431748390198, | |
| "learning_rate": 3.457536168078247e-05, | |
| "loss": 0.0194, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 5.968, | |
| "grad_norm": 0.12497511506080627, | |
| "learning_rate": 3.431345985070067e-05, | |
| "loss": 0.015, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 5.984, | |
| "grad_norm": 0.2512384057044983, | |
| "learning_rate": 3.40520345350965e-05, | |
| "loss": 0.0171, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.3151179552078247, | |
| "learning_rate": 3.379109367536089e-05, | |
| "loss": 0.0176, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 6.016, | |
| "grad_norm": 0.2615306079387665, | |
| "learning_rate": 3.3530645198168295e-05, | |
| "loss": 0.0163, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 6.032, | |
| "grad_norm": 0.19693830609321594, | |
| "learning_rate": 3.327069701523595e-05, | |
| "loss": 0.0175, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 6.048, | |
| "grad_norm": 0.3001508414745331, | |
| "learning_rate": 3.301125702308353e-05, | |
| "loss": 0.0172, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 6.064, | |
| "grad_norm": 0.2507281005382538, | |
| "learning_rate": 3.275233310279321e-05, | |
| "loss": 0.0127, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "grad_norm": 0.22329473495483398, | |
| "learning_rate": 3.249393311977037e-05, | |
| "loss": 0.0166, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 6.096, | |
| "grad_norm": 0.21224729716777802, | |
| "learning_rate": 3.223606492350451e-05, | |
| "loss": 0.0146, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 6.112, | |
| "grad_norm": 0.2867546081542969, | |
| "learning_rate": 3.197873634733096e-05, | |
| "loss": 0.0204, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 6.128, | |
| "grad_norm": 0.38894349336624146, | |
| "learning_rate": 3.172195520819285e-05, | |
| "loss": 0.0213, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 6.144, | |
| "grad_norm": 0.2423303872346878, | |
| "learning_rate": 3.146572930640362e-05, | |
| "loss": 0.0146, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 6.16, | |
| "grad_norm": 0.10929609835147858, | |
| "learning_rate": 3.121006642541014e-05, | |
| "loss": 0.0161, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 6.176, | |
| "grad_norm": 0.22880026698112488, | |
| "learning_rate": 3.095497433155626e-05, | |
| "loss": 0.0148, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 6.192, | |
| "grad_norm": 0.29095223546028137, | |
| "learning_rate": 3.070046077384682e-05, | |
| "loss": 0.0188, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 6.208, | |
| "grad_norm": 0.14561089873313904, | |
| "learning_rate": 3.0446533483712304e-05, | |
| "loss": 0.015, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 6.224, | |
| "grad_norm": 0.15440180897712708, | |
| "learning_rate": 3.0193200174774038e-05, | |
| "loss": 0.0125, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 6.24, | |
| "grad_norm": 0.14782895147800446, | |
| "learning_rate": 2.994046854260974e-05, | |
| "loss": 0.0144, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 6.256, | |
| "grad_norm": 0.24117444455623627, | |
| "learning_rate": 2.9688346264519866e-05, | |
| "loss": 0.0153, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 6.272, | |
| "grad_norm": 0.1934320479631424, | |
| "learning_rate": 2.943684099929436e-05, | |
| "loss": 0.0146, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 6.288, | |
| "grad_norm": 0.1928635537624359, | |
| "learning_rate": 2.918596038697995e-05, | |
| "loss": 0.0147, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 6.304, | |
| "grad_norm": 0.1921893060207367, | |
| "learning_rate": 2.8935712048648112e-05, | |
| "loss": 0.0152, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 6.32, | |
| "grad_norm": 0.23190532624721527, | |
| "learning_rate": 2.8686103586163626e-05, | |
| "loss": 0.0184, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 6.336, | |
| "grad_norm": 0.21398049592971802, | |
| "learning_rate": 2.843714258195346e-05, | |
| "loss": 0.0159, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 6.352, | |
| "grad_norm": 0.20892053842544556, | |
| "learning_rate": 2.8188836598776662e-05, | |
| "loss": 0.0157, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 6.368, | |
| "grad_norm": 0.2554166615009308, | |
| "learning_rate": 2.7941193179494484e-05, | |
| "loss": 0.0145, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 6.384, | |
| "grad_norm": 0.23498912155628204, | |
| "learning_rate": 2.7694219846841262e-05, | |
| "loss": 0.0151, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 6.4, | |
| "grad_norm": 0.21472951769828796, | |
| "learning_rate": 2.7447924103195976e-05, | |
| "loss": 0.0149, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 6.416, | |
| "grad_norm": 0.3393695652484894, | |
| "learning_rate": 2.7202313430354253e-05, | |
| "loss": 0.0146, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 6.432, | |
| "grad_norm": 0.34454214572906494, | |
| "learning_rate": 2.695739528930111e-05, | |
| "loss": 0.0162, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 6.448, | |
| "grad_norm": 0.2963325083255768, | |
| "learning_rate": 2.67131771199844e-05, | |
| "loss": 0.0158, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 6.464, | |
| "grad_norm": 0.2864843010902405, | |
| "learning_rate": 2.6469666341088677e-05, | |
| "loss": 0.0173, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 6.48, | |
| "grad_norm": 0.2502191960811615, | |
| "learning_rate": 2.6226870349809885e-05, | |
| "loss": 0.0152, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 6.496, | |
| "grad_norm": 0.3564821481704712, | |
| "learning_rate": 2.5984796521630737e-05, | |
| "loss": 0.0152, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 6.5120000000000005, | |
| "grad_norm": 0.25787878036499023, | |
| "learning_rate": 2.574345221009653e-05, | |
| "loss": 0.02, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 6.5280000000000005, | |
| "grad_norm": 0.31801271438598633, | |
| "learning_rate": 2.5502844746591804e-05, | |
| "loss": 0.0144, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 6.5440000000000005, | |
| "grad_norm": 0.219718337059021, | |
| "learning_rate": 2.526298144011775e-05, | |
| "loss": 0.0152, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 6.5600000000000005, | |
| "grad_norm": 0.21489684283733368, | |
| "learning_rate": 2.5023869577070013e-05, | |
| "loss": 0.0154, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 6.576, | |
| "grad_norm": 0.22638309001922607, | |
| "learning_rate": 2.478551642101743e-05, | |
| "loss": 0.0159, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 6.592, | |
| "grad_norm": 0.21957838535308838, | |
| "learning_rate": 2.4547929212481435e-05, | |
| "loss": 0.0133, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 6.608, | |
| "grad_norm": 0.25034192204475403, | |
| "learning_rate": 2.4311115168716013e-05, | |
| "loss": 0.0144, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 6.624, | |
| "grad_norm": 0.250244677066803, | |
| "learning_rate": 2.4075081483488494e-05, | |
| "loss": 0.0156, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "grad_norm": 0.20421521365642548, | |
| "learning_rate": 2.3839835326861104e-05, | |
| "loss": 0.0153, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 6.656, | |
| "grad_norm": 0.2354811578989029, | |
| "learning_rate": 2.3605383844972966e-05, | |
| "loss": 0.0117, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 6.672, | |
| "grad_norm": 0.2745189070701599, | |
| "learning_rate": 2.3371734159823284e-05, | |
| "loss": 0.0138, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 6.688, | |
| "grad_norm": 0.13854728639125824, | |
| "learning_rate": 2.3138893369054766e-05, | |
| "loss": 0.0182, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 6.704, | |
| "grad_norm": 0.16858164966106415, | |
| "learning_rate": 2.2906868545738102e-05, | |
| "loss": 0.0188, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 6.72, | |
| "grad_norm": 0.18250826001167297, | |
| "learning_rate": 2.2675666738157186e-05, | |
| "loss": 0.0148, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 6.736, | |
| "grad_norm": 0.23907795548439026, | |
| "learning_rate": 2.2445294969594844e-05, | |
| "loss": 0.0127, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 6.752, | |
| "grad_norm": 0.25471317768096924, | |
| "learning_rate": 2.22157602381196e-05, | |
| "loss": 0.0126, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 6.768, | |
| "grad_norm": 0.35891488194465637, | |
| "learning_rate": 2.1987069516373098e-05, | |
| "loss": 0.0123, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 6.784, | |
| "grad_norm": 0.22966614365577698, | |
| "learning_rate": 2.1759229751358217e-05, | |
| "loss": 0.0147, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 6.8, | |
| "grad_norm": 0.24113744497299194, | |
| "learning_rate": 2.1532247864228084e-05, | |
| "loss": 0.016, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 6.816, | |
| "grad_norm": 0.3109719753265381, | |
| "learning_rate": 2.1306130750075865e-05, | |
| "loss": 0.0184, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 6.832, | |
| "grad_norm": 0.3147088289260864, | |
| "learning_rate": 2.1080885277725236e-05, | |
| "loss": 0.0137, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 6.848, | |
| "grad_norm": 0.21128635108470917, | |
| "learning_rate": 2.085651828952175e-05, | |
| "loss": 0.0122, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 6.864, | |
| "grad_norm": 0.2620614767074585, | |
| "learning_rate": 2.063303660112506e-05, | |
| "loss": 0.0126, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "grad_norm": 0.25589150190353394, | |
| "learning_rate": 2.0410447001301753e-05, | |
| "loss": 0.0136, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 6.896, | |
| "grad_norm": 0.16606681048870087, | |
| "learning_rate": 2.0188756251719203e-05, | |
| "loss": 0.0126, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 6.912, | |
| "grad_norm": 0.2236870974302292, | |
| "learning_rate": 1.9967971086740195e-05, | |
| "loss": 0.0138, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 6.928, | |
| "grad_norm": 0.13562509417533875, | |
| "learning_rate": 1.974809821321827e-05, | |
| "loss": 0.0122, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 6.944, | |
| "grad_norm": 0.2511296570301056, | |
| "learning_rate": 1.9529144310294023e-05, | |
| "loss": 0.0116, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 6.96, | |
| "grad_norm": 0.2233523279428482, | |
| "learning_rate": 1.9311116029192278e-05, | |
| "loss": 0.0128, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 6.976, | |
| "grad_norm": 0.2191087007522583, | |
| "learning_rate": 1.909401999301993e-05, | |
| "loss": 0.0131, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 6.992, | |
| "grad_norm": 0.22106347978115082, | |
| "learning_rate": 1.887786279656482e-05, | |
| "loss": 0.0133, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 7.008, | |
| "grad_norm": 0.18437567353248596, | |
| "learning_rate": 1.8662651006095387e-05, | |
| "loss": 0.0096, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 7.024, | |
| "grad_norm": 0.24440501630306244, | |
| "learning_rate": 1.8448391159161204e-05, | |
| "loss": 0.0134, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 7.04, | |
| "grad_norm": 0.16453684866428375, | |
| "learning_rate": 1.8235089764394408e-05, | |
| "loss": 0.0163, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 7.056, | |
| "grad_norm": 0.15823127329349518, | |
| "learning_rate": 1.8022753301311935e-05, | |
| "loss": 0.0162, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 7.072, | |
| "grad_norm": 0.21095283329486847, | |
| "learning_rate": 1.7811388220118707e-05, | |
| "loss": 0.0149, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 7.088, | |
| "grad_norm": 0.2918134033679962, | |
| "learning_rate": 1.7601000941511757e-05, | |
| "loss": 0.0133, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 7.104, | |
| "grad_norm": 0.21150650084018707, | |
| "learning_rate": 1.7391597856485083e-05, | |
| "loss": 0.0128, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 7.12, | |
| "grad_norm": 0.15610255300998688, | |
| "learning_rate": 1.7183185326135543e-05, | |
| "loss": 0.0149, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 7.136, | |
| "grad_norm": 0.23003989458084106, | |
| "learning_rate": 1.6975769681469705e-05, | |
| "loss": 0.0128, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 7.152, | |
| "grad_norm": 0.1882975697517395, | |
| "learning_rate": 1.676935722321139e-05, | |
| "loss": 0.0118, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 7.168, | |
| "grad_norm": 0.14895379543304443, | |
| "learning_rate": 1.6563954221610355e-05, | |
| "loss": 0.0127, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 7.184, | |
| "grad_norm": 0.25033193826675415, | |
| "learning_rate": 1.6359566916251845e-05, | |
| "loss": 0.0141, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 7.2, | |
| "grad_norm": 0.22429972887039185, | |
| "learning_rate": 1.615620151586697e-05, | |
| "loss": 0.012, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 7.216, | |
| "grad_norm": 0.20187127590179443, | |
| "learning_rate": 1.5953864198144135e-05, | |
| "loss": 0.015, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 7.232, | |
| "grad_norm": 0.19396039843559265, | |
| "learning_rate": 1.5752561109541447e-05, | |
| "loss": 0.0142, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 7.248, | |
| "grad_norm": 0.13741613924503326, | |
| "learning_rate": 1.5552298365099882e-05, | |
| "loss": 0.0124, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 7.264, | |
| "grad_norm": 0.1726469099521637, | |
| "learning_rate": 1.5353082048257596e-05, | |
| "loss": 0.0118, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 7.28, | |
| "grad_norm": 0.16924871504306793, | |
| "learning_rate": 1.5154918210665148e-05, | |
| "loss": 0.0103, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 7.296, | |
| "grad_norm": 0.27352505922317505, | |
| "learning_rate": 1.4957812872001614e-05, | |
| "loss": 0.0124, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 7.312, | |
| "grad_norm": 0.5956441164016724, | |
| "learning_rate": 1.4761772019791748e-05, | |
| "loss": 0.014, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 7.328, | |
| "grad_norm": 0.22458574175834656, | |
| "learning_rate": 1.4566801609224096e-05, | |
| "loss": 0.0121, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 7.344, | |
| "grad_norm": 0.23270508646965027, | |
| "learning_rate": 1.4372907562970079e-05, | |
| "loss": 0.013, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 7.36, | |
| "grad_norm": 0.16435019671916962, | |
| "learning_rate": 1.4180095771004154e-05, | |
| "loss": 0.0108, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 7.376, | |
| "grad_norm": 0.23128677904605865, | |
| "learning_rate": 1.3988372090424773e-05, | |
| "loss": 0.0149, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 7.392, | |
| "grad_norm": 0.29726120829582214, | |
| "learning_rate": 1.3797742345276521e-05, | |
| "loss": 0.0119, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 7.408, | |
| "grad_norm": 0.23969723284244537, | |
| "learning_rate": 1.3608212326373249e-05, | |
| "loss": 0.0136, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 7.424, | |
| "grad_norm": 0.1867367923259735, | |
| "learning_rate": 1.3419787791122062e-05, | |
| "loss": 0.0108, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "grad_norm": 0.2569027841091156, | |
| "learning_rate": 1.323247446334847e-05, | |
| "loss": 0.0117, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 7.456, | |
| "grad_norm": 0.24020028114318848, | |
| "learning_rate": 1.3046278033122577e-05, | |
| "loss": 0.0165, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 7.4719999999999995, | |
| "grad_norm": 0.17290015518665314, | |
| "learning_rate": 1.286120415658611e-05, | |
| "loss": 0.0151, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 7.4879999999999995, | |
| "grad_norm": 0.15135730803012848, | |
| "learning_rate": 1.2677258455780683e-05, | |
| "loss": 0.0123, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 7.504, | |
| "grad_norm": 0.139713317155838, | |
| "learning_rate": 1.2494446518477022e-05, | |
| "loss": 0.0146, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 7.52, | |
| "grad_norm": 0.2007954716682434, | |
| "learning_rate": 1.2312773898005175e-05, | |
| "loss": 0.0169, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 7.536, | |
| "grad_norm": 0.2670764923095703, | |
| "learning_rate": 1.2132246113085822e-05, | |
| "loss": 0.0128, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 7.552, | |
| "grad_norm": 0.17625878751277924, | |
| "learning_rate": 1.1952868647662696e-05, | |
| "loss": 0.0117, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 7.568, | |
| "grad_norm": 0.2070666253566742, | |
| "learning_rate": 1.1774646950735913e-05, | |
| "loss": 0.0147, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 7.584, | |
| "grad_norm": 0.14905652403831482, | |
| "learning_rate": 1.1597586436196473e-05, | |
| "loss": 0.01, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 7.6, | |
| "grad_norm": 0.23670288920402527, | |
| "learning_rate": 1.1421692482661856e-05, | |
| "loss": 0.012, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 7.616, | |
| "grad_norm": 0.2268713414669037, | |
| "learning_rate": 1.124697043331256e-05, | |
| "loss": 0.0108, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 7.632, | |
| "grad_norm": 0.2760028541088104, | |
| "learning_rate": 1.107342559572977e-05, | |
| "loss": 0.0135, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 7.648, | |
| "grad_norm": 0.10061554610729218, | |
| "learning_rate": 1.090106324173426e-05, | |
| "loss": 0.0142, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 7.664, | |
| "grad_norm": 0.1480797976255417, | |
| "learning_rate": 1.0729888607226113e-05, | |
| "loss": 0.0135, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 7.68, | |
| "grad_norm": 0.1497049182653427, | |
| "learning_rate": 1.0559906892025745e-05, | |
| "loss": 0.0106, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 7.696, | |
| "grad_norm": 0.20382311940193176, | |
| "learning_rate": 1.0391123259715906e-05, | |
| "loss": 0.0129, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 7.712, | |
| "grad_norm": 0.1691877841949463, | |
| "learning_rate": 1.0223542837484839e-05, | |
| "loss": 0.0119, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 7.728, | |
| "grad_norm": 0.18750256299972534, | |
| "learning_rate": 1.0057170715970559e-05, | |
| "loss": 0.0113, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 7.744, | |
| "grad_norm": 0.19457529485225677, | |
| "learning_rate": 9.892011949106172e-06, | |
| "loss": 0.0224, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 7.76, | |
| "grad_norm": 0.1513475924730301, | |
| "learning_rate": 9.728071553966339e-06, | |
| "loss": 0.0144, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 7.776, | |
| "grad_norm": 0.22417619824409485, | |
| "learning_rate": 9.56535451061496e-06, | |
| "loss": 0.0097, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 7.792, | |
| "grad_norm": 0.16716934740543365, | |
| "learning_rate": 9.403865761953779e-06, | |
| "loss": 0.0138, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 7.808, | |
| "grad_norm": 0.1531108170747757, | |
| "learning_rate": 9.243610213572285e-06, | |
| "loss": 0.0139, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 7.824, | |
| "grad_norm": 0.13675959408283234, | |
| "learning_rate": 9.084592733598735e-06, | |
| "loss": 0.0106, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 7.84, | |
| "grad_norm": 0.1930346041917801, | |
| "learning_rate": 8.92681815255219e-06, | |
| "loss": 0.0121, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 7.856, | |
| "grad_norm": 0.2675817310810089, | |
| "learning_rate": 8.770291263195819e-06, | |
| "loss": 0.0114, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 7.872, | |
| "grad_norm": 0.19790367782115936, | |
| "learning_rate": 8.615016820391342e-06, | |
| "loss": 0.0102, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 7.888, | |
| "grad_norm": 0.21043841540813446, | |
| "learning_rate": 8.460999540954517e-06, | |
| "loss": 0.0107, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 7.904, | |
| "grad_norm": 0.1498277336359024, | |
| "learning_rate": 8.308244103511909e-06, | |
| "loss": 0.0117, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 7.92, | |
| "grad_norm": 0.1649218648672104, | |
| "learning_rate": 8.156755148358764e-06, | |
| "loss": 0.012, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 7.936, | |
| "grad_norm": 0.1961694359779358, | |
| "learning_rate": 8.00653727731801e-06, | |
| "loss": 0.0122, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 7.952, | |
| "grad_norm": 0.22549936175346375, | |
| "learning_rate": 7.857595053600513e-06, | |
| "loss": 0.0132, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 7.968, | |
| "grad_norm": 0.2856388986110687, | |
| "learning_rate": 7.709933001666431e-06, | |
| "loss": 0.0157, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 7.984, | |
| "grad_norm": 0.14908449351787567, | |
| "learning_rate": 7.56355560708778e-06, | |
| "loss": 0.0091, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.521977961063385, | |
| "learning_rate": 7.418467316412158e-06, | |
| "loss": 0.0143, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 8.016, | |
| "grad_norm": 0.17278841137886047, | |
| "learning_rate": 7.2746725370277435e-06, | |
| "loss": 0.0109, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 8.032, | |
| "grad_norm": 0.12947969138622284, | |
| "learning_rate": 7.132175637029293e-06, | |
| "loss": 0.0126, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 8.048, | |
| "grad_norm": 0.1735750287771225, | |
| "learning_rate": 6.9909809450855345e-06, | |
| "loss": 0.0129, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 8.064, | |
| "grad_norm": 0.20186170935630798, | |
| "learning_rate": 6.851092750307686e-06, | |
| "loss": 0.0107, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 8.08, | |
| "grad_norm": 0.32423511147499084, | |
| "learning_rate": 6.712515302119077e-06, | |
| "loss": 0.0135, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 8.096, | |
| "grad_norm": 0.17486923933029175, | |
| "learning_rate": 6.575252810126143e-06, | |
| "loss": 0.0103, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 8.112, | |
| "grad_norm": 0.2205979824066162, | |
| "learning_rate": 6.439309443990532e-06, | |
| "loss": 0.0105, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 8.128, | |
| "grad_norm": 0.11239255964756012, | |
| "learning_rate": 6.304689333302416e-06, | |
| "loss": 0.011, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 8.144, | |
| "grad_norm": 0.13867992162704468, | |
| "learning_rate": 6.171396567455051e-06, | |
| "loss": 0.0115, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 8.16, | |
| "grad_norm": 0.23485629260540009, | |
| "learning_rate": 6.039435195520604e-06, | |
| "loss": 0.0153, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 8.176, | |
| "grad_norm": 0.15345227718353271, | |
| "learning_rate": 5.908809226127054e-06, | |
| "loss": 0.009, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 8.192, | |
| "grad_norm": 0.24234718084335327, | |
| "learning_rate": 5.779522627336537e-06, | |
| "loss": 0.0137, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 8.208, | |
| "grad_norm": 0.1632985919713974, | |
| "learning_rate": 5.651579326524709e-06, | |
| "loss": 0.0103, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 8.224, | |
| "grad_norm": 0.1566225290298462, | |
| "learning_rate": 5.524983210261481e-06, | |
| "loss": 0.0098, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 8.24, | |
| "grad_norm": 0.16245003044605255, | |
| "learning_rate": 5.399738124192988e-06, | |
| "loss": 0.0108, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 8.256, | |
| "grad_norm": 0.11000703275203705, | |
| "learning_rate": 5.2758478729247164e-06, | |
| "loss": 0.012, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 8.272, | |
| "grad_norm": 0.12327095866203308, | |
| "learning_rate": 5.153316219905946e-06, | |
| "loss": 0.0083, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 8.288, | |
| "grad_norm": 0.15688049793243408, | |
| "learning_rate": 5.032146887315448e-06, | |
| "loss": 0.0101, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 8.304, | |
| "grad_norm": 0.10358070582151413, | |
| "learning_rate": 4.91234355594839e-06, | |
| "loss": 0.0122, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 8.32, | |
| "grad_norm": 0.18736572563648224, | |
| "learning_rate": 4.7939098651045235e-06, | |
| "loss": 0.0147, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 8.336, | |
| "grad_norm": 0.22404837608337402, | |
| "learning_rate": 4.67684941247768e-06, | |
| "loss": 0.012, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 8.352, | |
| "grad_norm": 0.22107815742492676, | |
| "learning_rate": 4.5611657540464036e-06, | |
| "loss": 0.0121, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 8.368, | |
| "grad_norm": 0.16686831414699554, | |
| "learning_rate": 4.446862403965984e-06, | |
| "loss": 0.0131, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 8.384, | |
| "grad_norm": 0.1687074899673462, | |
| "learning_rate": 4.333942834461702e-06, | |
| "loss": 0.0114, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 8.4, | |
| "grad_norm": 0.1413266956806183, | |
| "learning_rate": 4.222410475723326e-06, | |
| "loss": 0.0089, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 8.416, | |
| "grad_norm": 0.14043653011322021, | |
| "learning_rate": 4.112268715800943e-06, | |
| "loss": 0.0113, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 8.432, | |
| "grad_norm": 0.2124572992324829, | |
| "learning_rate": 4.003520900502028e-06, | |
| "loss": 0.0133, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 8.448, | |
| "grad_norm": 0.1962769776582718, | |
| "learning_rate": 3.8961703332898e-06, | |
| "loss": 0.0124, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 8.464, | |
| "grad_norm": 0.22625181078910828, | |
| "learning_rate": 3.790220275182854e-06, | |
| "loss": 0.0107, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 8.48, | |
| "grad_norm": 0.14229370653629303, | |
| "learning_rate": 3.685673944656176e-06, | |
| "loss": 0.0091, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 8.496, | |
| "grad_norm": 0.165771484375, | |
| "learning_rate": 3.582534517543268e-06, | |
| "loss": 0.0107, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 8.512, | |
| "grad_norm": 0.2058086097240448, | |
| "learning_rate": 3.4808051269397512e-06, | |
| "loss": 0.0094, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 8.528, | |
| "grad_norm": 0.16586236655712128, | |
| "learning_rate": 3.380488863108183e-06, | |
| "loss": 0.0091, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 8.544, | |
| "grad_norm": 0.0816253200173378, | |
| "learning_rate": 3.2815887733841365e-06, | |
| "loss": 0.0101, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "grad_norm": 0.15894082188606262, | |
| "learning_rate": 3.1841078620836683e-06, | |
| "loss": 0.011, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 8.576, | |
| "grad_norm": 0.22336991131305695, | |
| "learning_rate": 3.0880490904120874e-06, | |
| "loss": 0.0115, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 8.592, | |
| "grad_norm": 0.18074090778827667, | |
| "learning_rate": 2.9934153763739205e-06, | |
| "loss": 0.0108, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 8.608, | |
| "grad_norm": 0.2321094572544098, | |
| "learning_rate": 2.9002095946843277e-06, | |
| "loss": 0.0131, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 8.624, | |
| "grad_norm": 0.16386102139949799, | |
| "learning_rate": 2.8084345766817676e-06, | |
| "loss": 0.0114, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 8.64, | |
| "grad_norm": 0.14855244755744934, | |
| "learning_rate": 2.718093110241976e-06, | |
| "loss": 0.0104, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 8.656, | |
| "grad_norm": 0.14371752738952637, | |
| "learning_rate": 2.6291879396933004e-06, | |
| "loss": 0.0132, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 8.672, | |
| "grad_norm": 0.10209663212299347, | |
| "learning_rate": 2.541721765733318e-06, | |
| "loss": 0.0106, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 8.688, | |
| "grad_norm": 0.1142280250787735, | |
| "learning_rate": 2.455697245346783e-06, | |
| "loss": 0.0082, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 8.704, | |
| "grad_norm": 0.16117703914642334, | |
| "learning_rate": 2.371116991724953e-06, | |
| "loss": 0.0109, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 8.72, | |
| "grad_norm": 0.2877863347530365, | |
| "learning_rate": 2.2879835741861586e-06, | |
| "loss": 0.0139, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 8.736, | |
| "grad_norm": 0.1805974543094635, | |
| "learning_rate": 2.206299518097804e-06, | |
| "loss": 0.0088, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 8.752, | |
| "grad_norm": 0.20757748186588287, | |
| "learning_rate": 2.1260673047996227e-06, | |
| "loss": 0.012, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 8.768, | |
| "grad_norm": 0.22510504722595215, | |
| "learning_rate": 2.047289371528299e-06, | |
| "loss": 0.0095, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 8.784, | |
| "grad_norm": 0.3317309617996216, | |
| "learning_rate": 1.96996811134344e-06, | |
| "loss": 0.011, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 8.8, | |
| "grad_norm": 0.2029017060995102, | |
| "learning_rate": 1.8941058730549132e-06, | |
| "loss": 0.0087, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 8.816, | |
| "grad_norm": 0.2369084507226944, | |
| "learning_rate": 1.8197049611514194e-06, | |
| "loss": 0.0095, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 8.832, | |
| "grad_norm": 0.10593786090612411, | |
| "learning_rate": 1.7467676357305561e-06, | |
| "loss": 0.009, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 8.848, | |
| "grad_norm": 0.1802545189857483, | |
| "learning_rate": 1.6752961124301415e-06, | |
| "loss": 0.0102, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 8.864, | |
| "grad_norm": 0.169978067278862, | |
| "learning_rate": 1.6052925623609049e-06, | |
| "loss": 0.0147, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 8.88, | |
| "grad_norm": 0.13325443863868713, | |
| "learning_rate": 1.5367591120405256e-06, | |
| "loss": 0.0085, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 8.896, | |
| "grad_norm": 0.12190234661102295, | |
| "learning_rate": 1.4696978433290653e-06, | |
| "loss": 0.0095, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 8.912, | |
| "grad_norm": 0.18234238028526306, | |
| "learning_rate": 1.4041107933656928e-06, | |
| "loss": 0.0084, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 8.928, | |
| "grad_norm": 0.13647478818893433, | |
| "learning_rate": 1.339999954506821e-06, | |
| "loss": 0.0088, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 8.943999999999999, | |
| "grad_norm": 0.15723392367362976, | |
| "learning_rate": 1.2773672742655784e-06, | |
| "loss": 0.0111, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 8.96, | |
| "grad_norm": 0.13917504251003265, | |
| "learning_rate": 1.2162146552526399e-06, | |
| "loss": 0.0137, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 8.975999999999999, | |
| "grad_norm": 0.29914140701293945, | |
| "learning_rate": 1.1565439551184664e-06, | |
| "loss": 0.0136, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 8.992, | |
| "grad_norm": 0.18706418573856354, | |
| "learning_rate": 1.0983569864968346e-06, | |
| "loss": 0.01, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 9.008, | |
| "grad_norm": 0.2464447021484375, | |
| "learning_rate": 1.0416555169497688e-06, | |
| "loss": 0.0095, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 9.024, | |
| "grad_norm": 0.08482413738965988, | |
| "learning_rate": 9.864412689139123e-07, | |
| "loss": 0.0099, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 9.04, | |
| "grad_norm": 0.10894379019737244, | |
| "learning_rate": 9.327159196481138e-07, | |
| "loss": 0.0153, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 9.056, | |
| "grad_norm": 0.2444671392440796, | |
| "learning_rate": 8.804811011825398e-07, | |
| "loss": 0.0089, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 9.072, | |
| "grad_norm": 0.11199894547462463, | |
| "learning_rate": 8.297384002690866e-07, | |
| "loss": 0.0118, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 9.088, | |
| "grad_norm": 0.1470123827457428, | |
| "learning_rate": 7.804893583331696e-07, | |
| "loss": 0.0095, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 9.104, | |
| "grad_norm": 0.148183673620224, | |
| "learning_rate": 7.32735471426893e-07, | |
| "loss": 0.0123, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "grad_norm": 0.15440213680267334, | |
| "learning_rate": 6.864781901836259e-07, | |
| "loss": 0.0138, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 9.136, | |
| "grad_norm": 0.1314764767885208, | |
| "learning_rate": 6.417189197739093e-07, | |
| "loss": 0.0083, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 9.152, | |
| "grad_norm": 0.16717897355556488, | |
| "learning_rate": 5.984590198627849e-07, | |
| "loss": 0.0152, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 9.168, | |
| "grad_norm": 0.12494492530822754, | |
| "learning_rate": 5.566998045685112e-07, | |
| "loss": 0.0127, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 9.184, | |
| "grad_norm": 0.12919697165489197, | |
| "learning_rate": 5.164425424226016e-07, | |
| "loss": 0.0125, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 9.2, | |
| "grad_norm": 0.16202719509601593, | |
| "learning_rate": 4.776884563313266e-07, | |
| "loss": 0.0098, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 9.216, | |
| "grad_norm": 0.1486562341451645, | |
| "learning_rate": 4.404387235385443e-07, | |
| "loss": 0.0097, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 9.232, | |
| "grad_norm": 0.11309578269720078, | |
| "learning_rate": 4.0469447558995065e-07, | |
| "loss": 0.0126, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 9.248, | |
| "grad_norm": 0.1437896192073822, | |
| "learning_rate": 3.7045679829870175e-07, | |
| "loss": 0.0099, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 9.264, | |
| "grad_norm": 0.13565130531787872, | |
| "learning_rate": 3.377267317124233e-07, | |
| "loss": 0.0101, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 9.28, | |
| "grad_norm": 0.2143915295600891, | |
| "learning_rate": 3.0650527008162513e-07, | |
| "loss": 0.0116, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 9.296, | |
| "grad_norm": 0.11689619719982147, | |
| "learning_rate": 2.767933618295082e-07, | |
| "loss": 0.0095, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 9.312, | |
| "grad_norm": 0.1398339420557022, | |
| "learning_rate": 2.485919095231326e-07, | |
| "loss": 0.0098, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 9.328, | |
| "grad_norm": 0.18729491531848907, | |
| "learning_rate": 2.219017698460002e-07, | |
| "loss": 0.0127, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 9.344, | |
| "grad_norm": 0.16763921082019806, | |
| "learning_rate": 1.9672375357206452e-07, | |
| "loss": 0.0119, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "grad_norm": 0.2026500701904297, | |
| "learning_rate": 1.73058625541056e-07, | |
| "loss": 0.0127, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 9.376, | |
| "grad_norm": 0.14854931831359863, | |
| "learning_rate": 1.5090710463527836e-07, | |
| "loss": 0.0124, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 9.392, | |
| "grad_norm": 0.1597309410572052, | |
| "learning_rate": 1.3026986375776485e-07, | |
| "loss": 0.0099, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 9.408, | |
| "grad_norm": 0.23859287798404694, | |
| "learning_rate": 1.1114752981183917e-07, | |
| "loss": 0.0129, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 9.424, | |
| "grad_norm": 0.15764664113521576, | |
| "learning_rate": 9.354068368204739e-08, | |
| "loss": 0.0145, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 9.44, | |
| "grad_norm": 0.1235203966498375, | |
| "learning_rate": 7.744986021656076e-08, | |
| "loss": 0.0085, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 9.456, | |
| "grad_norm": 0.13421794772148132, | |
| "learning_rate": 6.287554821087783e-08, | |
| "loss": 0.011, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 9.472, | |
| "grad_norm": 0.1450832039117813, | |
| "learning_rate": 4.981819039300284e-08, | |
| "loss": 0.0117, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 9.488, | |
| "grad_norm": 0.12155359238386154, | |
| "learning_rate": 3.827818341000655e-08, | |
| "loss": 0.0122, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 9.504, | |
| "grad_norm": 0.1258874088525772, | |
| "learning_rate": 2.8255877815946963e-08, | |
| "loss": 0.009, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 9.52, | |
| "grad_norm": 0.17948633432388306, | |
| "learning_rate": 1.9751578061244504e-08, | |
| "loss": 0.0083, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 9.536, | |
| "grad_norm": 0.08846985548734665, | |
| "learning_rate": 1.2765542483417214e-08, | |
| "loss": 0.0104, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 9.552, | |
| "grad_norm": 0.22791078686714172, | |
| "learning_rate": 7.2979832992592365e-09, | |
| "loss": 0.0091, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 9.568, | |
| "grad_norm": 0.14047648012638092, | |
| "learning_rate": 3.349066598362649e-09, | |
| "loss": 0.0083, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 9.584, | |
| "grad_norm": 0.07709770649671555, | |
| "learning_rate": 9.189123380826114e-10, | |
| "loss": 0.0093, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 9.6, | |
| "grad_norm": 0.1003396213054657, | |
| "learning_rate": 7.594339912486703e-12, | |
| "loss": 0.0104, | |
| "step": 6000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 10, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |