|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 269, |
|
"global_step": 1073, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0009319664492078285, |
|
"grad_norm": 23.36980628967285, |
|
"learning_rate": 2.0000000000000003e-06, |
|
"loss": 9.2544, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0009319664492078285, |
|
"eval_loss": 9.81535816192627, |
|
"eval_runtime": 126.0986, |
|
"eval_samples_per_second": 14.33, |
|
"eval_steps_per_second": 3.584, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.001863932898415657, |
|
"grad_norm": 18.088346481323242, |
|
"learning_rate": 4.000000000000001e-06, |
|
"loss": 9.099, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0027958993476234857, |
|
"grad_norm": 18.9443359375, |
|
"learning_rate": 6e-06, |
|
"loss": 8.8085, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.003727865796831314, |
|
"grad_norm": 19.623462677001953, |
|
"learning_rate": 8.000000000000001e-06, |
|
"loss": 8.7971, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.004659832246039142, |
|
"grad_norm": 20.289655685424805, |
|
"learning_rate": 1e-05, |
|
"loss": 8.7141, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.005591798695246971, |
|
"grad_norm": 24.600669860839844, |
|
"learning_rate": 1.2e-05, |
|
"loss": 9.3088, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0065237651444548, |
|
"grad_norm": 19.402236938476562, |
|
"learning_rate": 1.4000000000000001e-05, |
|
"loss": 9.213, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.007455731593662628, |
|
"grad_norm": 17.378143310546875, |
|
"learning_rate": 1.6000000000000003e-05, |
|
"loss": 8.2946, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.008387698042870456, |
|
"grad_norm": 20.91484832763672, |
|
"learning_rate": 1.8e-05, |
|
"loss": 8.6097, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.009319664492078284, |
|
"grad_norm": 20.42852020263672, |
|
"learning_rate": 2e-05, |
|
"loss": 8.1296, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.010251630941286114, |
|
"grad_norm": 17.04209327697754, |
|
"learning_rate": 2.2000000000000003e-05, |
|
"loss": 8.5266, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.011183597390493943, |
|
"grad_norm": 21.660465240478516, |
|
"learning_rate": 2.4e-05, |
|
"loss": 9.4731, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.012115563839701771, |
|
"grad_norm": 18.74623680114746, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 7.6045, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0130475302889096, |
|
"grad_norm": 19.178571701049805, |
|
"learning_rate": 2.8000000000000003e-05, |
|
"loss": 7.9309, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.013979496738117428, |
|
"grad_norm": 19.65428924560547, |
|
"learning_rate": 3e-05, |
|
"loss": 8.9349, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.014911463187325256, |
|
"grad_norm": 19.72906494140625, |
|
"learning_rate": 3.2000000000000005e-05, |
|
"loss": 8.135, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.015843429636533086, |
|
"grad_norm": 17.341209411621094, |
|
"learning_rate": 3.4000000000000007e-05, |
|
"loss": 7.803, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.016775396085740912, |
|
"grad_norm": 16.997432708740234, |
|
"learning_rate": 3.6e-05, |
|
"loss": 7.5903, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.017707362534948742, |
|
"grad_norm": 18.027070999145508, |
|
"learning_rate": 3.8e-05, |
|
"loss": 7.8597, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.01863932898415657, |
|
"grad_norm": 18.275178909301758, |
|
"learning_rate": 4e-05, |
|
"loss": 8.2936, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0195712954333644, |
|
"grad_norm": 19.839303970336914, |
|
"learning_rate": 4.2e-05, |
|
"loss": 8.1574, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.02050326188257223, |
|
"grad_norm": 19.413339614868164, |
|
"learning_rate": 4.4000000000000006e-05, |
|
"loss": 8.2292, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.021435228331780055, |
|
"grad_norm": 25.84449577331543, |
|
"learning_rate": 4.600000000000001e-05, |
|
"loss": 6.6394, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.022367194780987885, |
|
"grad_norm": 29.13704490661621, |
|
"learning_rate": 4.8e-05, |
|
"loss": 6.0639, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.023299161230195712, |
|
"grad_norm": 106.7900619506836, |
|
"learning_rate": 5e-05, |
|
"loss": 6.7721, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.024231127679403542, |
|
"grad_norm": 17.748001098632812, |
|
"learning_rate": 5.2000000000000004e-05, |
|
"loss": 5.3139, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.02516309412861137, |
|
"grad_norm": 19.872982025146484, |
|
"learning_rate": 5.4000000000000005e-05, |
|
"loss": 4.7766, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0260950605778192, |
|
"grad_norm": 19.577491760253906, |
|
"learning_rate": 5.6000000000000006e-05, |
|
"loss": 4.7364, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.02702702702702703, |
|
"grad_norm": 20.095836639404297, |
|
"learning_rate": 5.8e-05, |
|
"loss": 3.182, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.027958993476234855, |
|
"grad_norm": 18.048362731933594, |
|
"learning_rate": 6e-05, |
|
"loss": 2.9527, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.028890959925442685, |
|
"grad_norm": 18.98998260498047, |
|
"learning_rate": 6.2e-05, |
|
"loss": 2.4878, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.02982292637465051, |
|
"grad_norm": 17.988750457763672, |
|
"learning_rate": 6.400000000000001e-05, |
|
"loss": 2.4282, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.03075489282385834, |
|
"grad_norm": 15.404643058776855, |
|
"learning_rate": 6.6e-05, |
|
"loss": 2.4879, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.03168685927306617, |
|
"grad_norm": 16.794700622558594, |
|
"learning_rate": 6.800000000000001e-05, |
|
"loss": 1.8821, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.032618825722273995, |
|
"grad_norm": 12.591143608093262, |
|
"learning_rate": 7e-05, |
|
"loss": 2.0967, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.033550792171481825, |
|
"grad_norm": 12.896982192993164, |
|
"learning_rate": 7.2e-05, |
|
"loss": 1.4124, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.034482758620689655, |
|
"grad_norm": 13.917445182800293, |
|
"learning_rate": 7.4e-05, |
|
"loss": 1.6743, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.035414725069897485, |
|
"grad_norm": 11.748924255371094, |
|
"learning_rate": 7.6e-05, |
|
"loss": 1.7939, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.036346691519105315, |
|
"grad_norm": 12.96652603149414, |
|
"learning_rate": 7.800000000000001e-05, |
|
"loss": 1.1786, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.03727865796831314, |
|
"grad_norm": 20.441051483154297, |
|
"learning_rate": 8e-05, |
|
"loss": 1.1189, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.03821062441752097, |
|
"grad_norm": 15.231740951538086, |
|
"learning_rate": 8.2e-05, |
|
"loss": 1.3477, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0391425908667288, |
|
"grad_norm": 11.24478530883789, |
|
"learning_rate": 8.4e-05, |
|
"loss": 1.0997, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.04007455731593663, |
|
"grad_norm": 18.6605224609375, |
|
"learning_rate": 8.6e-05, |
|
"loss": 1.2903, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.04100652376514446, |
|
"grad_norm": 13.082992553710938, |
|
"learning_rate": 8.800000000000001e-05, |
|
"loss": 0.7943, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.04193849021435228, |
|
"grad_norm": 12.835314750671387, |
|
"learning_rate": 9e-05, |
|
"loss": 1.2972, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.04287045666356011, |
|
"grad_norm": 12.402058601379395, |
|
"learning_rate": 9.200000000000001e-05, |
|
"loss": 1.7079, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.04380242311276794, |
|
"grad_norm": 12.079973220825195, |
|
"learning_rate": 9.4e-05, |
|
"loss": 2.0969, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.04473438956197577, |
|
"grad_norm": 11.028013229370117, |
|
"learning_rate": 9.6e-05, |
|
"loss": 1.0664, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.045666356011183594, |
|
"grad_norm": 24.05512809753418, |
|
"learning_rate": 9.8e-05, |
|
"loss": 1.1056, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.046598322460391424, |
|
"grad_norm": 89.79100036621094, |
|
"learning_rate": 0.0001, |
|
"loss": 1.385, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.047530288909599254, |
|
"grad_norm": 9.370144844055176, |
|
"learning_rate": 9.999976423021617e-05, |
|
"loss": 0.3987, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.048462255358807084, |
|
"grad_norm": 9.368326187133789, |
|
"learning_rate": 9.999905692308813e-05, |
|
"loss": 0.5446, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.049394221808014914, |
|
"grad_norm": 8.342625617980957, |
|
"learning_rate": 9.999787808528638e-05, |
|
"loss": 0.5475, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.05032618825722274, |
|
"grad_norm": 3.533601999282837, |
|
"learning_rate": 9.999622772792828e-05, |
|
"loss": 0.2045, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.05125815470643057, |
|
"grad_norm": 5.094264507293701, |
|
"learning_rate": 9.9994105866578e-05, |
|
"loss": 0.4675, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0521901211556384, |
|
"grad_norm": 8.645957946777344, |
|
"learning_rate": 9.999151252124639e-05, |
|
"loss": 0.2873, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.05312208760484623, |
|
"grad_norm": 10.678857803344727, |
|
"learning_rate": 9.998844771639073e-05, |
|
"loss": 0.182, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.05405405405405406, |
|
"grad_norm": 9.783062934875488, |
|
"learning_rate": 9.998491148091457e-05, |
|
"loss": 0.5509, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.05498602050326188, |
|
"grad_norm": 4.751331329345703, |
|
"learning_rate": 9.99809038481674e-05, |
|
"loss": 0.1962, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.05591798695246971, |
|
"grad_norm": 7.471037864685059, |
|
"learning_rate": 9.997642485594436e-05, |
|
"loss": 0.4997, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05684995340167754, |
|
"grad_norm": 3.7392351627349854, |
|
"learning_rate": 9.99714745464859e-05, |
|
"loss": 0.2825, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.05778191985088537, |
|
"grad_norm": 3.4164586067199707, |
|
"learning_rate": 9.996605296647737e-05, |
|
"loss": 0.3177, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.05871388630009319, |
|
"grad_norm": 7.206740856170654, |
|
"learning_rate": 9.996016016704854e-05, |
|
"loss": 0.2987, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.05964585274930102, |
|
"grad_norm": 3.8595774173736572, |
|
"learning_rate": 9.995379620377318e-05, |
|
"loss": 0.3315, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.06057781919850885, |
|
"grad_norm": 2.3879973888397217, |
|
"learning_rate": 9.99469611366685e-05, |
|
"loss": 0.2275, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.06150978564771668, |
|
"grad_norm": 3.690859794616699, |
|
"learning_rate": 9.993965503019457e-05, |
|
"loss": 0.25, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.06244175209692451, |
|
"grad_norm": 5.447560787200928, |
|
"learning_rate": 9.99318779532538e-05, |
|
"loss": 0.5862, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.06337371854613234, |
|
"grad_norm": 3.505098819732666, |
|
"learning_rate": 9.992362997919016e-05, |
|
"loss": 0.4662, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.06430568499534017, |
|
"grad_norm": 5.845356464385986, |
|
"learning_rate": 9.991491118578856e-05, |
|
"loss": 0.393, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.06523765144454799, |
|
"grad_norm": 5.72480583190918, |
|
"learning_rate": 9.990572165527413e-05, |
|
"loss": 0.5041, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.06616961789375582, |
|
"grad_norm": 5.941817283630371, |
|
"learning_rate": 9.98960614743114e-05, |
|
"loss": 0.4596, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.06710158434296365, |
|
"grad_norm": 9.465463638305664, |
|
"learning_rate": 9.988593073400354e-05, |
|
"loss": 0.6977, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.06803355079217148, |
|
"grad_norm": 8.731477737426758, |
|
"learning_rate": 9.987532952989145e-05, |
|
"loss": 0.4233, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.06896551724137931, |
|
"grad_norm": 15.845611572265625, |
|
"learning_rate": 9.986425796195287e-05, |
|
"loss": 0.6312, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.06989748369058714, |
|
"grad_norm": 37.47452926635742, |
|
"learning_rate": 9.985271613460144e-05, |
|
"loss": 1.1223, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.07082945013979497, |
|
"grad_norm": 10.461603164672852, |
|
"learning_rate": 9.984070415668574e-05, |
|
"loss": 0.3865, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0717614165890028, |
|
"grad_norm": 4.933948993682861, |
|
"learning_rate": 9.982822214148821e-05, |
|
"loss": 0.2947, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.07269338303821063, |
|
"grad_norm": 5.168891906738281, |
|
"learning_rate": 9.981527020672413e-05, |
|
"loss": 0.1991, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.07362534948741846, |
|
"grad_norm": 8.993535995483398, |
|
"learning_rate": 9.980184847454052e-05, |
|
"loss": 0.474, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.07455731593662628, |
|
"grad_norm": 10.690693855285645, |
|
"learning_rate": 9.978795707151492e-05, |
|
"loss": 0.5728, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.0754892823858341, |
|
"grad_norm": 7.1764044761657715, |
|
"learning_rate": 9.977359612865423e-05, |
|
"loss": 0.324, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.07642124883504194, |
|
"grad_norm": 3.059382438659668, |
|
"learning_rate": 9.975876578139355e-05, |
|
"loss": 0.2248, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.07735321528424977, |
|
"grad_norm": 1.5979907512664795, |
|
"learning_rate": 9.974346616959476e-05, |
|
"loss": 0.1305, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0782851817334576, |
|
"grad_norm": 4.272276878356934, |
|
"learning_rate": 9.972769743754531e-05, |
|
"loss": 0.288, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.07921714818266543, |
|
"grad_norm": 7.77036190032959, |
|
"learning_rate": 9.971145973395684e-05, |
|
"loss": 0.3033, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.08014911463187326, |
|
"grad_norm": 6.204690933227539, |
|
"learning_rate": 9.969475321196373e-05, |
|
"loss": 0.347, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.08108108108108109, |
|
"grad_norm": 8.32636833190918, |
|
"learning_rate": 9.96775780291217e-05, |
|
"loss": 0.2946, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.08201304753028892, |
|
"grad_norm": 4.755763530731201, |
|
"learning_rate": 9.965993434740633e-05, |
|
"loss": 0.3311, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.08294501397949673, |
|
"grad_norm": 1.5085945129394531, |
|
"learning_rate": 9.96418223332115e-05, |
|
"loss": 0.1361, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.08387698042870456, |
|
"grad_norm": 5.299545764923096, |
|
"learning_rate": 9.962324215734782e-05, |
|
"loss": 0.3351, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.08480894687791239, |
|
"grad_norm": 4.837111949920654, |
|
"learning_rate": 9.960419399504106e-05, |
|
"loss": 0.3379, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.08574091332712022, |
|
"grad_norm": 5.275478839874268, |
|
"learning_rate": 9.958467802593046e-05, |
|
"loss": 0.2991, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.08667287977632805, |
|
"grad_norm": 6.431980609893799, |
|
"learning_rate": 9.956469443406706e-05, |
|
"loss": 0.441, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.08760484622553588, |
|
"grad_norm": 5.4865264892578125, |
|
"learning_rate": 9.954424340791196e-05, |
|
"loss": 0.3432, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.08853681267474371, |
|
"grad_norm": 5.279883861541748, |
|
"learning_rate": 9.952332514033447e-05, |
|
"loss": 0.3338, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.08946877912395154, |
|
"grad_norm": 2.8239941596984863, |
|
"learning_rate": 9.950193982861047e-05, |
|
"loss": 0.1205, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.09040074557315937, |
|
"grad_norm": 10.564650535583496, |
|
"learning_rate": 9.948008767442034e-05, |
|
"loss": 0.1239, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.09133271202236719, |
|
"grad_norm": 5.720789909362793, |
|
"learning_rate": 9.94577688838472e-05, |
|
"loss": 0.2613, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.09226467847157502, |
|
"grad_norm": 28.29906463623047, |
|
"learning_rate": 9.943498366737487e-05, |
|
"loss": 0.6763, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.09319664492078285, |
|
"grad_norm": 85.7387924194336, |
|
"learning_rate": 9.941173223988602e-05, |
|
"loss": 1.8956, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.09412861136999068, |
|
"grad_norm": 2.3096601963043213, |
|
"learning_rate": 9.938801482065998e-05, |
|
"loss": 0.2865, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.09506057781919851, |
|
"grad_norm": 3.718655586242676, |
|
"learning_rate": 9.93638316333708e-05, |
|
"loss": 0.3133, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.09599254426840634, |
|
"grad_norm": 3.8024020195007324, |
|
"learning_rate": 9.933918290608509e-05, |
|
"loss": 0.183, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.09692451071761417, |
|
"grad_norm": 2.2328927516937256, |
|
"learning_rate": 9.93140688712598e-05, |
|
"loss": 0.2483, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.097856477166822, |
|
"grad_norm": 3.0202109813690186, |
|
"learning_rate": 9.928848976574019e-05, |
|
"loss": 0.2782, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.09878844361602983, |
|
"grad_norm": 1.373205304145813, |
|
"learning_rate": 9.926244583075748e-05, |
|
"loss": 0.0962, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.09972041006523766, |
|
"grad_norm": 7.907721519470215, |
|
"learning_rate": 9.923593731192655e-05, |
|
"loss": 0.3763, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.10065237651444547, |
|
"grad_norm": 4.7565598487854, |
|
"learning_rate": 9.920896445924373e-05, |
|
"loss": 0.2433, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1015843429636533, |
|
"grad_norm": 3.6429123878479004, |
|
"learning_rate": 9.918152752708438e-05, |
|
"loss": 0.1288, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.10251630941286113, |
|
"grad_norm": 3.3588905334472656, |
|
"learning_rate": 9.915362677420044e-05, |
|
"loss": 0.2031, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.10344827586206896, |
|
"grad_norm": 2.161036491394043, |
|
"learning_rate": 9.912526246371814e-05, |
|
"loss": 0.0812, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1043802423112768, |
|
"grad_norm": 2.162220001220703, |
|
"learning_rate": 9.909643486313533e-05, |
|
"loss": 0.1037, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.10531220876048462, |
|
"grad_norm": 3.823596239089966, |
|
"learning_rate": 9.906714424431913e-05, |
|
"loss": 0.1591, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.10624417520969245, |
|
"grad_norm": 3.6801645755767822, |
|
"learning_rate": 9.903739088350325e-05, |
|
"loss": 0.147, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.10717614165890028, |
|
"grad_norm": 6.397568702697754, |
|
"learning_rate": 9.900717506128541e-05, |
|
"loss": 0.3958, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.10810810810810811, |
|
"grad_norm": 4.3636794090271, |
|
"learning_rate": 9.897649706262473e-05, |
|
"loss": 0.2412, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.10904007455731593, |
|
"grad_norm": 2.604064702987671, |
|
"learning_rate": 9.894535717683902e-05, |
|
"loss": 0.2321, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.10997204100652376, |
|
"grad_norm": 3.3178787231445312, |
|
"learning_rate": 9.891375569760204e-05, |
|
"loss": 0.2164, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.11090400745573159, |
|
"grad_norm": 1.7841252088546753, |
|
"learning_rate": 9.888169292294076e-05, |
|
"loss": 0.1164, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.11183597390493942, |
|
"grad_norm": 3.5098044872283936, |
|
"learning_rate": 9.884916915523251e-05, |
|
"loss": 0.2846, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.11276794035414725, |
|
"grad_norm": 2.65773606300354, |
|
"learning_rate": 9.881618470120215e-05, |
|
"loss": 0.2409, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.11369990680335508, |
|
"grad_norm": 5.54008674621582, |
|
"learning_rate": 9.87827398719192e-05, |
|
"loss": 0.2004, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.11463187325256291, |
|
"grad_norm": 5.397515773773193, |
|
"learning_rate": 9.874883498279484e-05, |
|
"loss": 0.2435, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.11556383970177074, |
|
"grad_norm": 7.1401777267456055, |
|
"learning_rate": 9.871447035357903e-05, |
|
"loss": 0.2561, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.11649580615097857, |
|
"grad_norm": 23.130657196044922, |
|
"learning_rate": 9.867964630835743e-05, |
|
"loss": 0.6442, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.11742777260018639, |
|
"grad_norm": 2.3777284622192383, |
|
"learning_rate": 9.864436317554831e-05, |
|
"loss": 0.1827, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.11835973904939422, |
|
"grad_norm": 2.1437063217163086, |
|
"learning_rate": 9.860862128789953e-05, |
|
"loss": 0.2189, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.11929170549860205, |
|
"grad_norm": 3.7361228466033936, |
|
"learning_rate": 9.857242098248542e-05, |
|
"loss": 0.3052, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.12022367194780988, |
|
"grad_norm": 1.8870540857315063, |
|
"learning_rate": 9.853576260070347e-05, |
|
"loss": 0.2004, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1211556383970177, |
|
"grad_norm": 4.224307537078857, |
|
"learning_rate": 9.849864648827125e-05, |
|
"loss": 0.2409, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.12208760484622554, |
|
"grad_norm": 2.4363510608673096, |
|
"learning_rate": 9.846107299522304e-05, |
|
"loss": 0.077, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.12301957129543337, |
|
"grad_norm": 1.9710980653762817, |
|
"learning_rate": 9.842304247590667e-05, |
|
"loss": 0.1215, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.1239515377446412, |
|
"grad_norm": 1.8237957954406738, |
|
"learning_rate": 9.838455528897998e-05, |
|
"loss": 0.1306, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.12488350419384903, |
|
"grad_norm": 1.345870018005371, |
|
"learning_rate": 9.834561179740762e-05, |
|
"loss": 0.0764, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.12581547064305684, |
|
"grad_norm": 1.505752682685852, |
|
"learning_rate": 9.830621236845754e-05, |
|
"loss": 0.1416, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.1267474370922647, |
|
"grad_norm": 1.9039655923843384, |
|
"learning_rate": 9.826635737369751e-05, |
|
"loss": 0.1965, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.1276794035414725, |
|
"grad_norm": 1.7544796466827393, |
|
"learning_rate": 9.82260471889917e-05, |
|
"loss": 0.122, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.12861136999068035, |
|
"grad_norm": 3.835097312927246, |
|
"learning_rate": 9.818528219449705e-05, |
|
"loss": 0.2722, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.12954333643988816, |
|
"grad_norm": 2.6565046310424805, |
|
"learning_rate": 9.814406277465968e-05, |
|
"loss": 0.1866, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.13047530288909598, |
|
"grad_norm": 1.8572436571121216, |
|
"learning_rate": 9.810238931821138e-05, |
|
"loss": 0.1344, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.13140726933830382, |
|
"grad_norm": 1.8303385972976685, |
|
"learning_rate": 9.806026221816581e-05, |
|
"loss": 0.1429, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.13233923578751164, |
|
"grad_norm": 2.8735477924346924, |
|
"learning_rate": 9.801768187181486e-05, |
|
"loss": 0.1621, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.13327120223671948, |
|
"grad_norm": 2.632936477661133, |
|
"learning_rate": 9.797464868072488e-05, |
|
"loss": 0.2389, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.1342031686859273, |
|
"grad_norm": 4.749107360839844, |
|
"learning_rate": 9.793116305073292e-05, |
|
"loss": 0.3371, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.13513513513513514, |
|
"grad_norm": 3.6311898231506348, |
|
"learning_rate": 9.788722539194291e-05, |
|
"loss": 0.2557, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.13606710158434296, |
|
"grad_norm": 3.9647810459136963, |
|
"learning_rate": 9.784283611872169e-05, |
|
"loss": 0.0985, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.1369990680335508, |
|
"grad_norm": 7.041486740112305, |
|
"learning_rate": 9.77979956496953e-05, |
|
"loss": 0.3009, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 2.975712299346924, |
|
"learning_rate": 9.77527044077448e-05, |
|
"loss": 0.1418, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.13886300093196646, |
|
"grad_norm": 6.61073637008667, |
|
"learning_rate": 9.770696282000244e-05, |
|
"loss": 0.2365, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.13979496738117428, |
|
"grad_norm": 8.437379837036133, |
|
"learning_rate": 9.766077131784763e-05, |
|
"loss": 0.2798, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.1407269338303821, |
|
"grad_norm": 3.151240587234497, |
|
"learning_rate": 9.761413033690276e-05, |
|
"loss": 0.1862, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.14165890027958994, |
|
"grad_norm": 4.774233818054199, |
|
"learning_rate": 9.756704031702918e-05, |
|
"loss": 0.4372, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.14259086672879775, |
|
"grad_norm": 5.088889122009277, |
|
"learning_rate": 9.751950170232309e-05, |
|
"loss": 0.3417, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.1435228331780056, |
|
"grad_norm": 4.445777893066406, |
|
"learning_rate": 9.74715149411112e-05, |
|
"loss": 0.2874, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.14445479962721341, |
|
"grad_norm": 3.9014739990234375, |
|
"learning_rate": 9.742308048594665e-05, |
|
"loss": 0.3016, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.14538676607642126, |
|
"grad_norm": 2.5364391803741455, |
|
"learning_rate": 9.73741987936047e-05, |
|
"loss": 0.1844, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.14631873252562907, |
|
"grad_norm": 1.6550700664520264, |
|
"learning_rate": 9.732487032507837e-05, |
|
"loss": 0.1792, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.14725069897483692, |
|
"grad_norm": 1.4614055156707764, |
|
"learning_rate": 9.727509554557417e-05, |
|
"loss": 0.1203, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.14818266542404473, |
|
"grad_norm": 1.4385958909988403, |
|
"learning_rate": 9.722487492450764e-05, |
|
"loss": 0.1653, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.14911463187325255, |
|
"grad_norm": 3.6554222106933594, |
|
"learning_rate": 9.717420893549902e-05, |
|
"loss": 0.1831, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.1500465983224604, |
|
"grad_norm": 2.7897613048553467, |
|
"learning_rate": 9.712309805636863e-05, |
|
"loss": 0.155, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.1509785647716682, |
|
"grad_norm": 7.741319179534912, |
|
"learning_rate": 9.707154276913256e-05, |
|
"loss": 0.3681, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.15191053122087605, |
|
"grad_norm": 6.076152801513672, |
|
"learning_rate": 9.701954355999792e-05, |
|
"loss": 0.3269, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.15284249767008387, |
|
"grad_norm": 5.800349712371826, |
|
"learning_rate": 9.696710091935841e-05, |
|
"loss": 0.4059, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.15377446411929171, |
|
"grad_norm": 3.289402484893799, |
|
"learning_rate": 9.691421534178966e-05, |
|
"loss": 0.1641, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.15470643056849953, |
|
"grad_norm": 1.5645993947982788, |
|
"learning_rate": 9.686088732604449e-05, |
|
"loss": 0.1461, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.15563839701770738, |
|
"grad_norm": 1.1692672967910767, |
|
"learning_rate": 9.68071173750483e-05, |
|
"loss": 0.0978, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.1565703634669152, |
|
"grad_norm": 0.9249667525291443, |
|
"learning_rate": 9.675290599589429e-05, |
|
"loss": 0.1108, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.157502329916123, |
|
"grad_norm": 2.5620081424713135, |
|
"learning_rate": 9.669825369983865e-05, |
|
"loss": 0.2324, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.15843429636533085, |
|
"grad_norm": 2.108088254928589, |
|
"learning_rate": 9.664316100229578e-05, |
|
"loss": 0.143, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.15936626281453867, |
|
"grad_norm": 2.3042526245117188, |
|
"learning_rate": 9.658762842283343e-05, |
|
"loss": 0.1438, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.1602982292637465, |
|
"grad_norm": 3.5743720531463623, |
|
"learning_rate": 9.653165648516776e-05, |
|
"loss": 0.0584, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.16123019571295433, |
|
"grad_norm": 5.048624515533447, |
|
"learning_rate": 9.647524571715843e-05, |
|
"loss": 0.2915, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.16216216216216217, |
|
"grad_norm": 2.8932971954345703, |
|
"learning_rate": 9.641839665080363e-05, |
|
"loss": 0.1311, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.16309412861137, |
|
"grad_norm": 3.3463146686553955, |
|
"learning_rate": 9.636110982223505e-05, |
|
"loss": 0.1041, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.16402609506057783, |
|
"grad_norm": 0.9479485154151917, |
|
"learning_rate": 9.630338577171282e-05, |
|
"loss": 0.0865, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.16495806150978565, |
|
"grad_norm": 2.0499370098114014, |
|
"learning_rate": 9.624522504362039e-05, |
|
"loss": 0.1282, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.16589002795899346, |
|
"grad_norm": 2.943985939025879, |
|
"learning_rate": 9.618662818645948e-05, |
|
"loss": 0.274, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.1668219944082013, |
|
"grad_norm": 1.6030714511871338, |
|
"learning_rate": 9.612759575284483e-05, |
|
"loss": 0.1708, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.16775396085740912, |
|
"grad_norm": 1.8890039920806885, |
|
"learning_rate": 9.606812829949896e-05, |
|
"loss": 0.2742, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.16868592730661697, |
|
"grad_norm": 2.0088531970977783, |
|
"learning_rate": 9.600822638724705e-05, |
|
"loss": 0.3307, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.16961789375582478, |
|
"grad_norm": 1.0239542722702026, |
|
"learning_rate": 9.594789058101153e-05, |
|
"loss": 0.171, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.17054986020503263, |
|
"grad_norm": 1.3413492441177368, |
|
"learning_rate": 9.588712144980681e-05, |
|
"loss": 0.111, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.17148182665424044, |
|
"grad_norm": 1.7198429107666016, |
|
"learning_rate": 9.582591956673387e-05, |
|
"loss": 0.1318, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 2.8803393840789795, |
|
"learning_rate": 9.576428550897489e-05, |
|
"loss": 0.1807, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.1733457595526561, |
|
"grad_norm": 1.4469658136367798, |
|
"learning_rate": 9.570221985778785e-05, |
|
"loss": 0.1252, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.17427772600186392, |
|
"grad_norm": 3.2417330741882324, |
|
"learning_rate": 9.563972319850092e-05, |
|
"loss": 0.2001, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.17520969245107176, |
|
"grad_norm": 1.7459640502929688, |
|
"learning_rate": 9.557679612050708e-05, |
|
"loss": 0.1286, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.17614165890027958, |
|
"grad_norm": 2.3768630027770996, |
|
"learning_rate": 9.551343921725843e-05, |
|
"loss": 0.1013, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.17707362534948742, |
|
"grad_norm": 1.7927972078323364, |
|
"learning_rate": 9.544965308626074e-05, |
|
"loss": 0.1465, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.17800559179869524, |
|
"grad_norm": 1.5043256282806396, |
|
"learning_rate": 9.538543832906773e-05, |
|
"loss": 0.0714, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.17893755824790308, |
|
"grad_norm": 2.0259816646575928, |
|
"learning_rate": 9.53207955512753e-05, |
|
"loss": 0.1627, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.1798695246971109, |
|
"grad_norm": 3.1285605430603027, |
|
"learning_rate": 9.525572536251607e-05, |
|
"loss": 0.1725, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.18080149114631874, |
|
"grad_norm": 2.060331344604492, |
|
"learning_rate": 9.519022837645337e-05, |
|
"loss": 0.1713, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.18173345759552656, |
|
"grad_norm": 2.1062660217285156, |
|
"learning_rate": 9.512430521077565e-05, |
|
"loss": 0.123, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.18266542404473438, |
|
"grad_norm": 2.594912052154541, |
|
"learning_rate": 9.505795648719048e-05, |
|
"loss": 0.1772, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.18359739049394222, |
|
"grad_norm": 5.78408670425415, |
|
"learning_rate": 9.499118283141887e-05, |
|
"loss": 0.272, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.18452935694315004, |
|
"grad_norm": 3.905864715576172, |
|
"learning_rate": 9.492398487318921e-05, |
|
"loss": 0.1512, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.18546132339235788, |
|
"grad_norm": 6.496005058288574, |
|
"learning_rate": 9.485636324623146e-05, |
|
"loss": 0.1588, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.1863932898415657, |
|
"grad_norm": 28.41652488708496, |
|
"learning_rate": 9.478831858827104e-05, |
|
"loss": 1.0845, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.18732525629077354, |
|
"grad_norm": 1.8884375095367432, |
|
"learning_rate": 9.471985154102292e-05, |
|
"loss": 0.1585, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.18825722273998136, |
|
"grad_norm": 3.1585066318511963, |
|
"learning_rate": 9.465096275018556e-05, |
|
"loss": 0.2897, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.1891891891891892, |
|
"grad_norm": 5.4817423820495605, |
|
"learning_rate": 9.458165286543476e-05, |
|
"loss": 0.4149, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.19012115563839702, |
|
"grad_norm": 3.001276969909668, |
|
"learning_rate": 9.451192254041758e-05, |
|
"loss": 0.1301, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.19105312208760486, |
|
"grad_norm": 4.782146453857422, |
|
"learning_rate": 9.444177243274618e-05, |
|
"loss": 0.2397, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.19198508853681268, |
|
"grad_norm": 2.695045232772827, |
|
"learning_rate": 9.437120320399158e-05, |
|
"loss": 0.1019, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.1929170549860205, |
|
"grad_norm": 0.7935613393783569, |
|
"learning_rate": 9.430021551967744e-05, |
|
"loss": 0.0681, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.19384902143522834, |
|
"grad_norm": 1.3984178304672241, |
|
"learning_rate": 9.422881004927381e-05, |
|
"loss": 0.1025, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.19478098788443615, |
|
"grad_norm": 1.7235757112503052, |
|
"learning_rate": 9.415698746619079e-05, |
|
"loss": 0.1831, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.195712954333644, |
|
"grad_norm": 0.9581148028373718, |
|
"learning_rate": 9.408474844777217e-05, |
|
"loss": 0.0724, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.1966449207828518, |
|
"grad_norm": 1.098017692565918, |
|
"learning_rate": 9.401209367528908e-05, |
|
"loss": 0.0322, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.19757688723205966, |
|
"grad_norm": 1.6468275785446167, |
|
"learning_rate": 9.393902383393347e-05, |
|
"loss": 0.2123, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.19850885368126747, |
|
"grad_norm": 2.875089645385742, |
|
"learning_rate": 9.386553961281179e-05, |
|
"loss": 0.3043, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.19944082013047532, |
|
"grad_norm": 1.3007657527923584, |
|
"learning_rate": 9.379164170493844e-05, |
|
"loss": 0.0726, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.20037278657968313, |
|
"grad_norm": 1.3006619215011597, |
|
"learning_rate": 9.371733080722911e-05, |
|
"loss": 0.0819, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.20130475302889095, |
|
"grad_norm": 1.5371683835983276, |
|
"learning_rate": 9.36426076204944e-05, |
|
"loss": 0.1184, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.2022367194780988, |
|
"grad_norm": 2.0229947566986084, |
|
"learning_rate": 9.35674728494331e-05, |
|
"loss": 0.1667, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.2031686859273066, |
|
"grad_norm": 1.4043385982513428, |
|
"learning_rate": 9.349192720262555e-05, |
|
"loss": 0.1001, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.20410065237651445, |
|
"grad_norm": 1.893733263015747, |
|
"learning_rate": 9.341597139252698e-05, |
|
"loss": 0.1334, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.20503261882572227, |
|
"grad_norm": 2.8895277976989746, |
|
"learning_rate": 9.333960613546079e-05, |
|
"loss": 0.1791, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.2059645852749301, |
|
"grad_norm": 6.10243034362793, |
|
"learning_rate": 9.326283215161178e-05, |
|
"loss": 0.1588, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.20689655172413793, |
|
"grad_norm": 1.887194037437439, |
|
"learning_rate": 9.31856501650194e-05, |
|
"loss": 0.1906, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.20782851817334577, |
|
"grad_norm": 3.0247702598571777, |
|
"learning_rate": 9.310806090357082e-05, |
|
"loss": 0.2346, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.2087604846225536, |
|
"grad_norm": 4.188446998596191, |
|
"learning_rate": 9.303006509899418e-05, |
|
"loss": 0.1994, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.2096924510717614, |
|
"grad_norm": 9.785820960998535, |
|
"learning_rate": 9.29516634868517e-05, |
|
"loss": 0.2581, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.21062441752096925, |
|
"grad_norm": 1.3063938617706299, |
|
"learning_rate": 9.287285680653254e-05, |
|
"loss": 0.0835, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.21155638397017706, |
|
"grad_norm": 2.1981046199798584, |
|
"learning_rate": 9.279364580124614e-05, |
|
"loss": 0.1189, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.2124883504193849, |
|
"grad_norm": 3.547961473464966, |
|
"learning_rate": 9.27140312180149e-05, |
|
"loss": 0.1961, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.21342031686859272, |
|
"grad_norm": 6.077332973480225, |
|
"learning_rate": 9.263401380766739e-05, |
|
"loss": 0.4945, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.21435228331780057, |
|
"grad_norm": 4.83152961730957, |
|
"learning_rate": 9.255359432483105e-05, |
|
"loss": 0.3202, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.21528424976700838, |
|
"grad_norm": 4.037288665771484, |
|
"learning_rate": 9.247277352792533e-05, |
|
"loss": 0.2901, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.21621621621621623, |
|
"grad_norm": 1.8266874551773071, |
|
"learning_rate": 9.239155217915422e-05, |
|
"loss": 0.0553, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.21714818266542404, |
|
"grad_norm": 1.9196511507034302, |
|
"learning_rate": 9.230993104449939e-05, |
|
"loss": 0.1628, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.21808014911463186, |
|
"grad_norm": 2.83617901802063, |
|
"learning_rate": 9.222791089371265e-05, |
|
"loss": 0.1828, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.2190121155638397, |
|
"grad_norm": 1.9007632732391357, |
|
"learning_rate": 9.214549250030899e-05, |
|
"loss": 0.0901, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.21994408201304752, |
|
"grad_norm": 5.230377674102783, |
|
"learning_rate": 9.206267664155907e-05, |
|
"loss": 0.2777, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.22087604846225536, |
|
"grad_norm": 2.382657289505005, |
|
"learning_rate": 9.197946409848194e-05, |
|
"loss": 0.1518, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.22180801491146318, |
|
"grad_norm": 3.907731771469116, |
|
"learning_rate": 9.189585565583778e-05, |
|
"loss": 0.2514, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.22273998136067102, |
|
"grad_norm": 6.802032947540283, |
|
"learning_rate": 9.181185210212034e-05, |
|
"loss": 0.2172, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.22367194780987884, |
|
"grad_norm": 2.1859257221221924, |
|
"learning_rate": 9.172745422954961e-05, |
|
"loss": 0.1372, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.22460391425908668, |
|
"grad_norm": 2.6559228897094727, |
|
"learning_rate": 9.164266283406433e-05, |
|
"loss": 0.1124, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.2255358807082945, |
|
"grad_norm": 1.4599460363388062, |
|
"learning_rate": 9.155747871531444e-05, |
|
"loss": 0.0548, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.22646784715750232, |
|
"grad_norm": 2.3699145317077637, |
|
"learning_rate": 9.14719026766536e-05, |
|
"loss": 0.1298, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.22739981360671016, |
|
"grad_norm": 1.8961679935455322, |
|
"learning_rate": 9.13859355251316e-05, |
|
"loss": 0.1816, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.22833178005591798, |
|
"grad_norm": 1.3365075588226318, |
|
"learning_rate": 9.129957807148666e-05, |
|
"loss": 0.0788, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.22926374650512582, |
|
"grad_norm": 2.557935953140259, |
|
"learning_rate": 9.121283113013795e-05, |
|
"loss": 0.0993, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.23019571295433364, |
|
"grad_norm": 2.207437038421631, |
|
"learning_rate": 9.112569551917774e-05, |
|
"loss": 0.1826, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.23112767940354148, |
|
"grad_norm": 1.3832250833511353, |
|
"learning_rate": 9.103817206036382e-05, |
|
"loss": 0.0532, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.2320596458527493, |
|
"grad_norm": 0.8267796039581299, |
|
"learning_rate": 9.095026157911166e-05, |
|
"loss": 0.0325, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.23299161230195714, |
|
"grad_norm": 22.60721206665039, |
|
"learning_rate": 9.086196490448668e-05, |
|
"loss": 0.3952, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.23392357875116496, |
|
"grad_norm": 1.598435640335083, |
|
"learning_rate": 9.077328286919638e-05, |
|
"loss": 0.2231, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.23485554520037277, |
|
"grad_norm": 2.5959858894348145, |
|
"learning_rate": 9.068421630958254e-05, |
|
"loss": 0.2467, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.23578751164958062, |
|
"grad_norm": 1.6885298490524292, |
|
"learning_rate": 9.059476606561328e-05, |
|
"loss": 0.1445, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.23671947809878843, |
|
"grad_norm": 2.016430616378784, |
|
"learning_rate": 9.050493298087523e-05, |
|
"loss": 0.2079, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.23765144454799628, |
|
"grad_norm": 1.5078867673873901, |
|
"learning_rate": 9.041471790256542e-05, |
|
"loss": 0.1513, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.2385834109972041, |
|
"grad_norm": 1.6564357280731201, |
|
"learning_rate": 9.032412168148345e-05, |
|
"loss": 0.1056, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.23951537744641194, |
|
"grad_norm": 1.5224450826644897, |
|
"learning_rate": 9.02331451720234e-05, |
|
"loss": 0.1486, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.24044734389561975, |
|
"grad_norm": 3.1949379444122314, |
|
"learning_rate": 9.014178923216572e-05, |
|
"loss": 0.1631, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.2413793103448276, |
|
"grad_norm": 1.828347086906433, |
|
"learning_rate": 9.005005472346924e-05, |
|
"loss": 0.1613, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.2423112767940354, |
|
"grad_norm": 2.3112988471984863, |
|
"learning_rate": 8.995794251106295e-05, |
|
"loss": 0.1969, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.24324324324324326, |
|
"grad_norm": 2.2977964878082275, |
|
"learning_rate": 8.986545346363792e-05, |
|
"loss": 0.1895, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.24417520969245107, |
|
"grad_norm": 2.2505693435668945, |
|
"learning_rate": 8.977258845343905e-05, |
|
"loss": 0.0995, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.2451071761416589, |
|
"grad_norm": 1.345773458480835, |
|
"learning_rate": 8.967934835625689e-05, |
|
"loss": 0.0899, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.24603914259086673, |
|
"grad_norm": 0.8952029943466187, |
|
"learning_rate": 8.958573405141932e-05, |
|
"loss": 0.059, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.24697110904007455, |
|
"grad_norm": 1.5607550144195557, |
|
"learning_rate": 8.949174642178332e-05, |
|
"loss": 0.1651, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.2479030754892824, |
|
"grad_norm": 0.8190938234329224, |
|
"learning_rate": 8.939738635372664e-05, |
|
"loss": 0.0636, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.2488350419384902, |
|
"grad_norm": 1.721642255783081, |
|
"learning_rate": 8.930265473713938e-05, |
|
"loss": 0.0876, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.24976700838769805, |
|
"grad_norm": 0.5578976273536682, |
|
"learning_rate": 8.920755246541563e-05, |
|
"loss": 0.0285, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.2506989748369059, |
|
"grad_norm": 1.9145925045013428, |
|
"learning_rate": 8.911208043544513e-05, |
|
"loss": 0.1488, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.2506989748369059, |
|
"eval_loss": 0.107957623898983, |
|
"eval_runtime": 128.7777, |
|
"eval_samples_per_second": 14.032, |
|
"eval_steps_per_second": 3.51, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.2516309412861137, |
|
"grad_norm": 3.0175607204437256, |
|
"learning_rate": 8.90162395476046e-05, |
|
"loss": 0.1851, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.25256290773532153, |
|
"grad_norm": 0.9700628519058228, |
|
"learning_rate": 8.89200307057495e-05, |
|
"loss": 0.0514, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.2534948741845294, |
|
"grad_norm": 1.3989373445510864, |
|
"learning_rate": 8.882345481720533e-05, |
|
"loss": 0.0319, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.25442684063373716, |
|
"grad_norm": 1.5159484148025513, |
|
"learning_rate": 8.872651279275916e-05, |
|
"loss": 0.0655, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.255358807082945, |
|
"grad_norm": 4.176044940948486, |
|
"learning_rate": 8.862920554665098e-05, |
|
"loss": 0.3077, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.25629077353215285, |
|
"grad_norm": 44.685333251953125, |
|
"learning_rate": 8.853153399656513e-05, |
|
"loss": 0.4734, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.2572227399813607, |
|
"grad_norm": 1.0462782382965088, |
|
"learning_rate": 8.843349906362163e-05, |
|
"loss": 0.0498, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.2581547064305685, |
|
"grad_norm": 2.0390098094940186, |
|
"learning_rate": 8.833510167236747e-05, |
|
"loss": 0.0617, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.2590866728797763, |
|
"grad_norm": 2.5294477939605713, |
|
"learning_rate": 8.823634275076791e-05, |
|
"loss": 0.1067, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.26001863932898417, |
|
"grad_norm": 4.861917972564697, |
|
"learning_rate": 8.813722323019773e-05, |
|
"loss": 0.2199, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.26095060577819196, |
|
"grad_norm": 3.0162718296051025, |
|
"learning_rate": 8.803774404543247e-05, |
|
"loss": 0.2016, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2618825722273998, |
|
"grad_norm": 2.129127264022827, |
|
"learning_rate": 8.793790613463955e-05, |
|
"loss": 0.1283, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.26281453867660765, |
|
"grad_norm": 0.9576233625411987, |
|
"learning_rate": 8.783771043936948e-05, |
|
"loss": 0.0752, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.2637465051258155, |
|
"grad_norm": 2.868595600128174, |
|
"learning_rate": 8.773715790454695e-05, |
|
"loss": 0.1673, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.2646784715750233, |
|
"grad_norm": 1.5952013731002808, |
|
"learning_rate": 8.763624947846195e-05, |
|
"loss": 0.114, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.2656104380242311, |
|
"grad_norm": 2.122833251953125, |
|
"learning_rate": 8.753498611276079e-05, |
|
"loss": 0.137, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.26654240447343897, |
|
"grad_norm": 1.3185533285140991, |
|
"learning_rate": 8.743336876243712e-05, |
|
"loss": 0.0877, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.2674743709226468, |
|
"grad_norm": 1.3917877674102783, |
|
"learning_rate": 8.7331398385823e-05, |
|
"loss": 0.1248, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.2684063373718546, |
|
"grad_norm": 1.9148253202438354, |
|
"learning_rate": 8.722907594457974e-05, |
|
"loss": 0.179, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.26933830382106244, |
|
"grad_norm": 1.8025591373443604, |
|
"learning_rate": 8.712640240368899e-05, |
|
"loss": 0.1714, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.2702702702702703, |
|
"grad_norm": 2.925537109375, |
|
"learning_rate": 8.702337873144343e-05, |
|
"loss": 0.2086, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.2712022367194781, |
|
"grad_norm": 3.2652766704559326, |
|
"learning_rate": 8.692000589943785e-05, |
|
"loss": 0.1623, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.2721342031686859, |
|
"grad_norm": 2.3668527603149414, |
|
"learning_rate": 8.681628488255986e-05, |
|
"loss": 0.1309, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.27306616961789376, |
|
"grad_norm": 3.6899619102478027, |
|
"learning_rate": 8.671221665898073e-05, |
|
"loss": 0.2109, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.2739981360671016, |
|
"grad_norm": 5.232030868530273, |
|
"learning_rate": 8.660780221014616e-05, |
|
"loss": 0.1588, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.2749301025163094, |
|
"grad_norm": 3.900122880935669, |
|
"learning_rate": 8.650304252076703e-05, |
|
"loss": 0.1674, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 4.121029853820801, |
|
"learning_rate": 8.639793857881011e-05, |
|
"loss": 0.1738, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.2767940354147251, |
|
"grad_norm": 4.410455703735352, |
|
"learning_rate": 8.629249137548874e-05, |
|
"loss": 0.1614, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.2777260018639329, |
|
"grad_norm": 3.216705083847046, |
|
"learning_rate": 8.618670190525352e-05, |
|
"loss": 0.147, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.2786579683131407, |
|
"grad_norm": 3.378570318222046, |
|
"learning_rate": 8.608057116578283e-05, |
|
"loss": 0.0914, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.27958993476234856, |
|
"grad_norm": 7.987005710601807, |
|
"learning_rate": 8.597410015797358e-05, |
|
"loss": 0.3069, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.2805219012115564, |
|
"grad_norm": 0.6978614926338196, |
|
"learning_rate": 8.586728988593158e-05, |
|
"loss": 0.0613, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.2814538676607642, |
|
"grad_norm": 2.7040798664093018, |
|
"learning_rate": 8.576014135696226e-05, |
|
"loss": 0.2154, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.28238583410997203, |
|
"grad_norm": 1.6410375833511353, |
|
"learning_rate": 8.565265558156101e-05, |
|
"loss": 0.14, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.2833178005591799, |
|
"grad_norm": 1.9833245277404785, |
|
"learning_rate": 8.554483357340379e-05, |
|
"loss": 0.1751, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.2842497670083877, |
|
"grad_norm": 1.208056926727295, |
|
"learning_rate": 8.543667634933742e-05, |
|
"loss": 0.1013, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.2851817334575955, |
|
"grad_norm": 1.0126078128814697, |
|
"learning_rate": 8.532818492937014e-05, |
|
"loss": 0.0376, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.28611369990680335, |
|
"grad_norm": 1.7472506761550903, |
|
"learning_rate": 8.521936033666187e-05, |
|
"loss": 0.1245, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.2870456663560112, |
|
"grad_norm": 0.9348568916320801, |
|
"learning_rate": 8.511020359751466e-05, |
|
"loss": 0.1106, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.287977632805219, |
|
"grad_norm": 5.611238479614258, |
|
"learning_rate": 8.500071574136295e-05, |
|
"loss": 0.1801, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.28890959925442683, |
|
"grad_norm": 1.2382830381393433, |
|
"learning_rate": 8.489089780076386e-05, |
|
"loss": 0.0415, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.2898415657036347, |
|
"grad_norm": 0.8619039058685303, |
|
"learning_rate": 8.478075081138745e-05, |
|
"loss": 0.0524, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.2907735321528425, |
|
"grad_norm": 3.3732051849365234, |
|
"learning_rate": 8.467027581200702e-05, |
|
"loss": 0.1547, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.2917054986020503, |
|
"grad_norm": 2.319859027862549, |
|
"learning_rate": 8.455947384448924e-05, |
|
"loss": 0.1353, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.29263746505125815, |
|
"grad_norm": 3.4164938926696777, |
|
"learning_rate": 8.444834595378434e-05, |
|
"loss": 0.2439, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.293569431500466, |
|
"grad_norm": 1.6757917404174805, |
|
"learning_rate": 8.433689318791626e-05, |
|
"loss": 0.2057, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.29450139794967384, |
|
"grad_norm": 2.328280210494995, |
|
"learning_rate": 8.422511659797279e-05, |
|
"loss": 0.1578, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.2954333643988816, |
|
"grad_norm": 2.0828404426574707, |
|
"learning_rate": 8.411301723809563e-05, |
|
"loss": 0.12, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.29636533084808947, |
|
"grad_norm": 1.3504449129104614, |
|
"learning_rate": 8.400059616547047e-05, |
|
"loss": 0.0851, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.2972972972972973, |
|
"grad_norm": 1.7693885564804077, |
|
"learning_rate": 8.388785444031695e-05, |
|
"loss": 0.175, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.2982292637465051, |
|
"grad_norm": 2.22578501701355, |
|
"learning_rate": 8.377479312587879e-05, |
|
"loss": 0.1396, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.29916123019571295, |
|
"grad_norm": 2.099825382232666, |
|
"learning_rate": 8.366141328841366e-05, |
|
"loss": 0.148, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.3000931966449208, |
|
"grad_norm": 2.9881668090820312, |
|
"learning_rate": 8.354771599718312e-05, |
|
"loss": 0.197, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.30102516309412863, |
|
"grad_norm": 4.4058966636657715, |
|
"learning_rate": 8.343370232444261e-05, |
|
"loss": 0.1629, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.3019571295433364, |
|
"grad_norm": 2.186525344848633, |
|
"learning_rate": 8.331937334543132e-05, |
|
"loss": 0.062, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.30288909599254427, |
|
"grad_norm": 20.07304573059082, |
|
"learning_rate": 8.320473013836196e-05, |
|
"loss": 0.2938, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.3038210624417521, |
|
"grad_norm": 0.5368430614471436, |
|
"learning_rate": 8.308977378441072e-05, |
|
"loss": 0.046, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.3047530288909599, |
|
"grad_norm": 1.0717397928237915, |
|
"learning_rate": 8.297450536770697e-05, |
|
"loss": 0.0932, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.30568499534016774, |
|
"grad_norm": 1.006714940071106, |
|
"learning_rate": 8.28589259753231e-05, |
|
"loss": 0.0887, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.3066169617893756, |
|
"grad_norm": 2.4907236099243164, |
|
"learning_rate": 8.274303669726426e-05, |
|
"loss": 0.1883, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.30754892823858343, |
|
"grad_norm": 1.739026665687561, |
|
"learning_rate": 8.262683862645804e-05, |
|
"loss": 0.1322, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.3084808946877912, |
|
"grad_norm": 0.5353955030441284, |
|
"learning_rate": 8.251033285874421e-05, |
|
"loss": 0.0189, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.30941286113699906, |
|
"grad_norm": 1.517392635345459, |
|
"learning_rate": 8.239352049286435e-05, |
|
"loss": 0.0856, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.3103448275862069, |
|
"grad_norm": 1.9752682447433472, |
|
"learning_rate": 8.22764026304515e-05, |
|
"loss": 0.1543, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.31127679403541475, |
|
"grad_norm": 1.9139810800552368, |
|
"learning_rate": 8.21589803760198e-05, |
|
"loss": 0.1547, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.31220876048462254, |
|
"grad_norm": 2.1578001976013184, |
|
"learning_rate": 8.204125483695403e-05, |
|
"loss": 0.1611, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.3131407269338304, |
|
"grad_norm": 1.5856783390045166, |
|
"learning_rate": 8.192322712349917e-05, |
|
"loss": 0.0656, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.3140726933830382, |
|
"grad_norm": 2.2837724685668945, |
|
"learning_rate": 8.180489834875e-05, |
|
"loss": 0.1479, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.315004659832246, |
|
"grad_norm": 2.434117555618286, |
|
"learning_rate": 8.168626962864045e-05, |
|
"loss": 0.2402, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.31593662628145386, |
|
"grad_norm": 1.2002735137939453, |
|
"learning_rate": 8.156734208193327e-05, |
|
"loss": 0.0429, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.3168685927306617, |
|
"grad_norm": 1.260638952255249, |
|
"learning_rate": 8.144811683020932e-05, |
|
"loss": 0.0626, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.31780055917986955, |
|
"grad_norm": 1.155396580696106, |
|
"learning_rate": 8.132859499785707e-05, |
|
"loss": 0.0704, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.31873252562907733, |
|
"grad_norm": 1.6018532514572144, |
|
"learning_rate": 8.120877771206201e-05, |
|
"loss": 0.0605, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.3196644920782852, |
|
"grad_norm": 1.430546522140503, |
|
"learning_rate": 8.108866610279593e-05, |
|
"loss": 0.1085, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.320596458527493, |
|
"grad_norm": 1.5464401245117188, |
|
"learning_rate": 8.096826130280639e-05, |
|
"loss": 0.097, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.32152842497670087, |
|
"grad_norm": 1.4850672483444214, |
|
"learning_rate": 8.08475644476059e-05, |
|
"loss": 0.0894, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.32246039142590865, |
|
"grad_norm": 2.387909412384033, |
|
"learning_rate": 8.072657667546136e-05, |
|
"loss": 0.0806, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.3233923578751165, |
|
"grad_norm": 1.3122079372406006, |
|
"learning_rate": 8.060529912738315e-05, |
|
"loss": 0.0819, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.32432432432432434, |
|
"grad_norm": 1.7305047512054443, |
|
"learning_rate": 8.048373294711455e-05, |
|
"loss": 0.0461, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.32525629077353213, |
|
"grad_norm": 2.7955398559570312, |
|
"learning_rate": 8.036187928112087e-05, |
|
"loss": 0.2149, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.32618825722274, |
|
"grad_norm": 5.184839725494385, |
|
"learning_rate": 8.023973927857857e-05, |
|
"loss": 0.1819, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3271202236719478, |
|
"grad_norm": 0.8407028317451477, |
|
"learning_rate": 8.011731409136454e-05, |
|
"loss": 0.0468, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.32805219012115566, |
|
"grad_norm": 1.0992075204849243, |
|
"learning_rate": 7.99946048740452e-05, |
|
"loss": 0.0529, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.32898415657036345, |
|
"grad_norm": 1.9874184131622314, |
|
"learning_rate": 7.987161278386554e-05, |
|
"loss": 0.1567, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.3299161230195713, |
|
"grad_norm": 1.0550768375396729, |
|
"learning_rate": 7.974833898073832e-05, |
|
"loss": 0.0591, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.33084808946877914, |
|
"grad_norm": 2.2478857040405273, |
|
"learning_rate": 7.962478462723307e-05, |
|
"loss": 0.1689, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.3317800559179869, |
|
"grad_norm": 0.4727104604244232, |
|
"learning_rate": 7.950095088856508e-05, |
|
"loss": 0.0178, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.33271202236719477, |
|
"grad_norm": 0.5580483078956604, |
|
"learning_rate": 7.937683893258454e-05, |
|
"loss": 0.0216, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.3336439888164026, |
|
"grad_norm": 2.7151288986206055, |
|
"learning_rate": 7.925244992976538e-05, |
|
"loss": 0.2424, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.33457595526561046, |
|
"grad_norm": 1.1772783994674683, |
|
"learning_rate": 7.912778505319436e-05, |
|
"loss": 0.0902, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.33550792171481825, |
|
"grad_norm": 1.3066917657852173, |
|
"learning_rate": 7.900284547855991e-05, |
|
"loss": 0.0794, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.3364398881640261, |
|
"grad_norm": 1.881451964378357, |
|
"learning_rate": 7.88776323841411e-05, |
|
"loss": 0.1948, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.33737185461323393, |
|
"grad_norm": 0.8755617141723633, |
|
"learning_rate": 7.875214695079647e-05, |
|
"loss": 0.0362, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.3383038210624418, |
|
"grad_norm": 1.7075937986373901, |
|
"learning_rate": 7.862639036195298e-05, |
|
"loss": 0.0519, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.33923578751164957, |
|
"grad_norm": 0.9367966651916504, |
|
"learning_rate": 7.850036380359478e-05, |
|
"loss": 0.0451, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.3401677539608574, |
|
"grad_norm": 7.803852558135986, |
|
"learning_rate": 7.837406846425204e-05, |
|
"loss": 0.1543, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.34109972041006525, |
|
"grad_norm": 8.1025972366333, |
|
"learning_rate": 7.824750553498976e-05, |
|
"loss": 0.2252, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.34203168685927304, |
|
"grad_norm": 1.4972351789474487, |
|
"learning_rate": 7.812067620939652e-05, |
|
"loss": 0.1005, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.3429636533084809, |
|
"grad_norm": 3.528541088104248, |
|
"learning_rate": 7.799358168357323e-05, |
|
"loss": 0.192, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.34389561975768873, |
|
"grad_norm": 2.0039918422698975, |
|
"learning_rate": 7.786622315612183e-05, |
|
"loss": 0.1503, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 1.0715596675872803, |
|
"learning_rate": 7.773860182813403e-05, |
|
"loss": 0.041, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.34575955265610436, |
|
"grad_norm": 2.713217258453369, |
|
"learning_rate": 7.761071890317994e-05, |
|
"loss": 0.1438, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.3466915191053122, |
|
"grad_norm": 1.925072431564331, |
|
"learning_rate": 7.748257558729677e-05, |
|
"loss": 0.0374, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.34762348555452005, |
|
"grad_norm": 2.379302501678467, |
|
"learning_rate": 7.735417308897736e-05, |
|
"loss": 0.0951, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.34855545200372784, |
|
"grad_norm": 0.7751803398132324, |
|
"learning_rate": 7.72255126191589e-05, |
|
"loss": 0.0113, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.3494874184529357, |
|
"grad_norm": 14.013057708740234, |
|
"learning_rate": 7.709659539121144e-05, |
|
"loss": 0.2808, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.3504193849021435, |
|
"grad_norm": 0.7865239977836609, |
|
"learning_rate": 7.696742262092644e-05, |
|
"loss": 0.0394, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.35135135135135137, |
|
"grad_norm": 1.1983258724212646, |
|
"learning_rate": 7.683799552650534e-05, |
|
"loss": 0.0412, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.35228331780055916, |
|
"grad_norm": 0.8813638091087341, |
|
"learning_rate": 7.67083153285481e-05, |
|
"loss": 0.0221, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.353215284249767, |
|
"grad_norm": 1.4917454719543457, |
|
"learning_rate": 7.657838325004159e-05, |
|
"loss": 0.068, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.35414725069897485, |
|
"grad_norm": 3.252878189086914, |
|
"learning_rate": 7.644820051634812e-05, |
|
"loss": 0.1152, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3550792171481827, |
|
"grad_norm": 1.7754201889038086, |
|
"learning_rate": 7.63177683551939e-05, |
|
"loss": 0.0733, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.3560111835973905, |
|
"grad_norm": 0.9449477791786194, |
|
"learning_rate": 7.618708799665744e-05, |
|
"loss": 0.0237, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.3569431500465983, |
|
"grad_norm": 1.0870510339736938, |
|
"learning_rate": 7.605616067315793e-05, |
|
"loss": 0.0547, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.35787511649580617, |
|
"grad_norm": 1.218623399734497, |
|
"learning_rate": 7.592498761944362e-05, |
|
"loss": 0.0661, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.35880708294501396, |
|
"grad_norm": 1.4543731212615967, |
|
"learning_rate": 7.579357007258022e-05, |
|
"loss": 0.1162, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.3597390493942218, |
|
"grad_norm": 1.6948089599609375, |
|
"learning_rate": 7.56619092719392e-05, |
|
"loss": 0.1639, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.36067101584342964, |
|
"grad_norm": 2.109131336212158, |
|
"learning_rate": 7.553000645918611e-05, |
|
"loss": 0.1001, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.3616029822926375, |
|
"grad_norm": 3.4576566219329834, |
|
"learning_rate": 7.539786287826885e-05, |
|
"loss": 0.1819, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.3625349487418453, |
|
"grad_norm": 2.4241981506347656, |
|
"learning_rate": 7.526547977540592e-05, |
|
"loss": 0.0459, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.3634669151910531, |
|
"grad_norm": 2.08624005317688, |
|
"learning_rate": 7.51328583990748e-05, |
|
"loss": 0.1933, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.36439888164026096, |
|
"grad_norm": 1.3993889093399048, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 0.0555, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.36533084808946875, |
|
"grad_norm": 0.9735451340675354, |
|
"learning_rate": 7.486690583114136e-05, |
|
"loss": 0.0553, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.3662628145386766, |
|
"grad_norm": 1.6305480003356934, |
|
"learning_rate": 7.473357714768222e-05, |
|
"loss": 0.0586, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.36719478098788444, |
|
"grad_norm": 4.290209770202637, |
|
"learning_rate": 7.460001520701755e-05, |
|
"loss": 0.2194, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.3681267474370923, |
|
"grad_norm": 2.2773101329803467, |
|
"learning_rate": 7.446622126874218e-05, |
|
"loss": 0.1027, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.36905871388630007, |
|
"grad_norm": 0.5855029821395874, |
|
"learning_rate": 7.433219659463881e-05, |
|
"loss": 0.0179, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.3699906803355079, |
|
"grad_norm": 2.2826790809631348, |
|
"learning_rate": 7.41979424486662e-05, |
|
"loss": 0.1378, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.37092264678471576, |
|
"grad_norm": 1.142947793006897, |
|
"learning_rate": 7.406346009694713e-05, |
|
"loss": 0.0291, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.3718546132339236, |
|
"grad_norm": 3.178868532180786, |
|
"learning_rate": 7.392875080775665e-05, |
|
"loss": 0.1048, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.3727865796831314, |
|
"grad_norm": 9.8319091796875, |
|
"learning_rate": 7.379381585150996e-05, |
|
"loss": 0.3356, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.37371854613233924, |
|
"grad_norm": 1.062192440032959, |
|
"learning_rate": 7.365865650075046e-05, |
|
"loss": 0.0727, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.3746505125815471, |
|
"grad_norm": 1.5438944101333618, |
|
"learning_rate": 7.35232740301378e-05, |
|
"loss": 0.1193, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.37558247903075487, |
|
"grad_norm": 1.8247103691101074, |
|
"learning_rate": 7.338766971643579e-05, |
|
"loss": 0.1114, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.3765144454799627, |
|
"grad_norm": 3.635122060775757, |
|
"learning_rate": 7.325184483850042e-05, |
|
"loss": 0.1463, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.37744641192917056, |
|
"grad_norm": 0.604207456111908, |
|
"learning_rate": 7.311580067726783e-05, |
|
"loss": 0.0194, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.3783783783783784, |
|
"grad_norm": 1.354001760482788, |
|
"learning_rate": 7.297953851574206e-05, |
|
"loss": 0.0441, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.3793103448275862, |
|
"grad_norm": 1.9031493663787842, |
|
"learning_rate": 7.284305963898314e-05, |
|
"loss": 0.1449, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.38024231127679403, |
|
"grad_norm": 1.7530853748321533, |
|
"learning_rate": 7.27063653340949e-05, |
|
"loss": 0.131, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.3811742777260019, |
|
"grad_norm": 1.8988293409347534, |
|
"learning_rate": 7.25694568902128e-05, |
|
"loss": 0.1702, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.3821062441752097, |
|
"grad_norm": 1.3123613595962524, |
|
"learning_rate": 7.243233559849179e-05, |
|
"loss": 0.0723, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.3830382106244175, |
|
"grad_norm": 1.0989536046981812, |
|
"learning_rate": 7.229500275209418e-05, |
|
"loss": 0.0745, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.38397017707362535, |
|
"grad_norm": 1.5770695209503174, |
|
"learning_rate": 7.215745964617737e-05, |
|
"loss": 0.0333, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.3849021435228332, |
|
"grad_norm": 1.0897724628448486, |
|
"learning_rate": 7.201970757788172e-05, |
|
"loss": 0.038, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.385834109972041, |
|
"grad_norm": 0.8779085874557495, |
|
"learning_rate": 7.188174784631824e-05, |
|
"loss": 0.0531, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.38676607642124883, |
|
"grad_norm": 1.3717111349105835, |
|
"learning_rate": 7.174358175255636e-05, |
|
"loss": 0.1259, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.38769804287045667, |
|
"grad_norm": 2.16155743598938, |
|
"learning_rate": 7.160521059961169e-05, |
|
"loss": 0.0633, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.3886300093196645, |
|
"grad_norm": 2.235919952392578, |
|
"learning_rate": 7.146663569243371e-05, |
|
"loss": 0.1122, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.3895619757688723, |
|
"grad_norm": 1.4645483493804932, |
|
"learning_rate": 7.132785833789344e-05, |
|
"loss": 0.0639, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.39049394221808015, |
|
"grad_norm": 1.0879848003387451, |
|
"learning_rate": 7.118887984477116e-05, |
|
"loss": 0.0231, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.391425908667288, |
|
"grad_norm": 1.1921757459640503, |
|
"learning_rate": 7.104970152374405e-05, |
|
"loss": 0.041, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3923578751164958, |
|
"grad_norm": 0.8043162822723389, |
|
"learning_rate": 7.091032468737381e-05, |
|
"loss": 0.0211, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.3932898415657036, |
|
"grad_norm": 1.0248724222183228, |
|
"learning_rate": 7.077075065009433e-05, |
|
"loss": 0.0221, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.39422180801491147, |
|
"grad_norm": 1.9075223207473755, |
|
"learning_rate": 7.063098072819919e-05, |
|
"loss": 0.1292, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.3951537744641193, |
|
"grad_norm": 2.7208824157714844, |
|
"learning_rate": 7.049101623982937e-05, |
|
"loss": 0.0734, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.3960857409133271, |
|
"grad_norm": 101.83853149414062, |
|
"learning_rate": 7.03508585049608e-05, |
|
"loss": 1.1532, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.39701770736253494, |
|
"grad_norm": 0.779060423374176, |
|
"learning_rate": 7.021050884539177e-05, |
|
"loss": 0.0463, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.3979496738117428, |
|
"grad_norm": 1.509827971458435, |
|
"learning_rate": 7.006996858473069e-05, |
|
"loss": 0.0837, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.39888164026095063, |
|
"grad_norm": 1.621672511100769, |
|
"learning_rate": 6.99292390483834e-05, |
|
"loss": 0.0827, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.3998136067101584, |
|
"grad_norm": 0.5434191823005676, |
|
"learning_rate": 6.97883215635408e-05, |
|
"loss": 0.0147, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.40074557315936626, |
|
"grad_norm": 1.5291364192962646, |
|
"learning_rate": 6.964721745916631e-05, |
|
"loss": 0.0406, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.4016775396085741, |
|
"grad_norm": 0.9197437167167664, |
|
"learning_rate": 6.950592806598327e-05, |
|
"loss": 0.0326, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.4026095060577819, |
|
"grad_norm": 0.8559898138046265, |
|
"learning_rate": 6.936445471646249e-05, |
|
"loss": 0.0433, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.40354147250698974, |
|
"grad_norm": 1.2630029916763306, |
|
"learning_rate": 6.922279874480959e-05, |
|
"loss": 0.0317, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.4044734389561976, |
|
"grad_norm": 1.6737178564071655, |
|
"learning_rate": 6.908096148695251e-05, |
|
"loss": 0.0879, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.40540540540540543, |
|
"grad_norm": 1.7928637266159058, |
|
"learning_rate": 6.89389442805288e-05, |
|
"loss": 0.1492, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.4063373718546132, |
|
"grad_norm": 2.602510452270508, |
|
"learning_rate": 6.879674846487314e-05, |
|
"loss": 0.1599, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.40726933830382106, |
|
"grad_norm": 1.5879359245300293, |
|
"learning_rate": 6.865437538100457e-05, |
|
"loss": 0.1823, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.4082013047530289, |
|
"grad_norm": 0.5362139940261841, |
|
"learning_rate": 6.851182637161396e-05, |
|
"loss": 0.0136, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.4091332712022367, |
|
"grad_norm": 1.3532445430755615, |
|
"learning_rate": 6.836910278105124e-05, |
|
"loss": 0.0812, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.41006523765144454, |
|
"grad_norm": 1.3916680812835693, |
|
"learning_rate": 6.822620595531286e-05, |
|
"loss": 0.033, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.4109972041006524, |
|
"grad_norm": 1.6064777374267578, |
|
"learning_rate": 6.808313724202894e-05, |
|
"loss": 0.1176, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.4119291705498602, |
|
"grad_norm": 1.1576989889144897, |
|
"learning_rate": 6.793989799045066e-05, |
|
"loss": 0.0485, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.412861136999068, |
|
"grad_norm": 1.1500450372695923, |
|
"learning_rate": 6.779648955143754e-05, |
|
"loss": 0.0453, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 1.5548428297042847, |
|
"learning_rate": 6.765291327744463e-05, |
|
"loss": 0.0538, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.4147250698974837, |
|
"grad_norm": 1.0851874351501465, |
|
"learning_rate": 6.750917052250982e-05, |
|
"loss": 0.0226, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.41565703634669154, |
|
"grad_norm": 0.5055509209632874, |
|
"learning_rate": 6.736526264224101e-05, |
|
"loss": 0.0136, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.41658900279589933, |
|
"grad_norm": 3.6784214973449707, |
|
"learning_rate": 6.722119099380345e-05, |
|
"loss": 0.216, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.4175209692451072, |
|
"grad_norm": 2.3906421661376953, |
|
"learning_rate": 6.707695693590676e-05, |
|
"loss": 0.1041, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.418452935694315, |
|
"grad_norm": 2.9953436851501465, |
|
"learning_rate": 6.693256182879225e-05, |
|
"loss": 0.1169, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.4193849021435228, |
|
"grad_norm": 8.506790161132812, |
|
"learning_rate": 6.678800703422003e-05, |
|
"loss": 0.2528, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.42031686859273065, |
|
"grad_norm": 1.6189558506011963, |
|
"learning_rate": 6.664329391545626e-05, |
|
"loss": 0.0498, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.4212488350419385, |
|
"grad_norm": 0.789339005947113, |
|
"learning_rate": 6.64984238372601e-05, |
|
"loss": 0.0302, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.42218080149114634, |
|
"grad_norm": 2.2830944061279297, |
|
"learning_rate": 6.635339816587109e-05, |
|
"loss": 0.0388, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.42311276794035413, |
|
"grad_norm": 1.3299132585525513, |
|
"learning_rate": 6.620821826899606e-05, |
|
"loss": 0.0892, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.424044734389562, |
|
"grad_norm": 1.234998345375061, |
|
"learning_rate": 6.606288551579629e-05, |
|
"loss": 0.0744, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.4249767008387698, |
|
"grad_norm": 1.1684775352478027, |
|
"learning_rate": 6.59174012768747e-05, |
|
"loss": 0.0461, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.42590866728797766, |
|
"grad_norm": 0.12061901390552521, |
|
"learning_rate": 6.577176692426279e-05, |
|
"loss": 0.0041, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.42684063373718545, |
|
"grad_norm": 1.531051754951477, |
|
"learning_rate": 6.562598383140772e-05, |
|
"loss": 0.093, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.4277726001863933, |
|
"grad_norm": 0.8624515533447266, |
|
"learning_rate": 6.548005337315944e-05, |
|
"loss": 0.0424, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.42870456663560114, |
|
"grad_norm": 0.7969793081283569, |
|
"learning_rate": 6.533397692575766e-05, |
|
"loss": 0.0549, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4296365330848089, |
|
"grad_norm": 0.7530978918075562, |
|
"learning_rate": 6.518775586681887e-05, |
|
"loss": 0.0508, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.43056849953401677, |
|
"grad_norm": 0.8213361501693726, |
|
"learning_rate": 6.504139157532337e-05, |
|
"loss": 0.0552, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.4315004659832246, |
|
"grad_norm": 1.5611070394515991, |
|
"learning_rate": 6.489488543160224e-05, |
|
"loss": 0.0992, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.43243243243243246, |
|
"grad_norm": 0.9949514269828796, |
|
"learning_rate": 6.47482388173244e-05, |
|
"loss": 0.0819, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.43336439888164024, |
|
"grad_norm": 1.4731316566467285, |
|
"learning_rate": 6.460145311548341e-05, |
|
"loss": 0.0341, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.4342963653308481, |
|
"grad_norm": 1.581841230392456, |
|
"learning_rate": 6.445452971038464e-05, |
|
"loss": 0.0797, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.43522833178005593, |
|
"grad_norm": 0.46201473474502563, |
|
"learning_rate": 6.430746998763204e-05, |
|
"loss": 0.0174, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.4361602982292637, |
|
"grad_norm": 0.8862415552139282, |
|
"learning_rate": 6.416027533411519e-05, |
|
"loss": 0.0413, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.43709226467847156, |
|
"grad_norm": 1.4375804662704468, |
|
"learning_rate": 6.401294713799618e-05, |
|
"loss": 0.0863, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.4380242311276794, |
|
"grad_norm": 2.5881032943725586, |
|
"learning_rate": 6.386548678869644e-05, |
|
"loss": 0.0682, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.43895619757688725, |
|
"grad_norm": 1.366281509399414, |
|
"learning_rate": 6.37178956768838e-05, |
|
"loss": 0.0385, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.43988816402609504, |
|
"grad_norm": 2.332908868789673, |
|
"learning_rate": 6.35701751944592e-05, |
|
"loss": 0.0655, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.4408201304753029, |
|
"grad_norm": 0.8943026661872864, |
|
"learning_rate": 6.342232673454371e-05, |
|
"loss": 0.0217, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.44175209692451073, |
|
"grad_norm": 0.4061681628227234, |
|
"learning_rate": 6.32743516914653e-05, |
|
"loss": 0.0063, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.4426840633737186, |
|
"grad_norm": 2.605149984359741, |
|
"learning_rate": 6.312625146074575e-05, |
|
"loss": 0.0873, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.44361602982292636, |
|
"grad_norm": 0.26587679982185364, |
|
"learning_rate": 6.297802743908741e-05, |
|
"loss": 0.0062, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.4445479962721342, |
|
"grad_norm": 1.5406039953231812, |
|
"learning_rate": 6.282968102436009e-05, |
|
"loss": 0.0739, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.44547996272134205, |
|
"grad_norm": 1.777808666229248, |
|
"learning_rate": 6.268121361558792e-05, |
|
"loss": 0.0769, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.44641192917054984, |
|
"grad_norm": 0.9425985217094421, |
|
"learning_rate": 6.253262661293604e-05, |
|
"loss": 0.0383, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.4473438956197577, |
|
"grad_norm": 1.4720977544784546, |
|
"learning_rate": 6.238392141769743e-05, |
|
"loss": 0.0769, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4482758620689655, |
|
"grad_norm": 0.7975260019302368, |
|
"learning_rate": 6.22350994322798e-05, |
|
"loss": 0.0518, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.44920782851817337, |
|
"grad_norm": 1.155370831489563, |
|
"learning_rate": 6.208616206019224e-05, |
|
"loss": 0.0326, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.45013979496738116, |
|
"grad_norm": 1.9402527809143066, |
|
"learning_rate": 6.193711070603202e-05, |
|
"loss": 0.1378, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.451071761416589, |
|
"grad_norm": 0.9330565929412842, |
|
"learning_rate": 6.178794677547137e-05, |
|
"loss": 0.0259, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.45200372786579684, |
|
"grad_norm": 6.712258338928223, |
|
"learning_rate": 6.163867167524419e-05, |
|
"loss": 0.0925, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.45293569431500463, |
|
"grad_norm": 0.7303646206855774, |
|
"learning_rate": 6.14892868131328e-05, |
|
"loss": 0.0296, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.4538676607642125, |
|
"grad_norm": 1.3826953172683716, |
|
"learning_rate": 6.133979359795468e-05, |
|
"loss": 0.1587, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.4547996272134203, |
|
"grad_norm": 1.1449270248413086, |
|
"learning_rate": 6.119019343954914e-05, |
|
"loss": 0.0431, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.45573159366262816, |
|
"grad_norm": 1.846737027168274, |
|
"learning_rate": 6.104048774876406e-05, |
|
"loss": 0.0455, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.45666356011183595, |
|
"grad_norm": 1.589819073677063, |
|
"learning_rate": 6.0890677937442574e-05, |
|
"loss": 0.066, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4575955265610438, |
|
"grad_norm": 2.8043324947357178, |
|
"learning_rate": 6.074076541840977e-05, |
|
"loss": 0.1297, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.45852749301025164, |
|
"grad_norm": 1.2121779918670654, |
|
"learning_rate": 6.059075160545933e-05, |
|
"loss": 0.0915, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.4594594594594595, |
|
"grad_norm": 1.4770679473876953, |
|
"learning_rate": 6.0440637913340224e-05, |
|
"loss": 0.0892, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.4603914259086673, |
|
"grad_norm": 2.293391227722168, |
|
"learning_rate": 6.029042575774334e-05, |
|
"loss": 0.0915, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.4613233923578751, |
|
"grad_norm": 2.9511337280273438, |
|
"learning_rate": 6.0140116555288204e-05, |
|
"loss": 0.0979, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.46225535880708296, |
|
"grad_norm": 1.325278639793396, |
|
"learning_rate": 5.998971172350953e-05, |
|
"loss": 0.0623, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.46318732525629075, |
|
"grad_norm": 1.2071905136108398, |
|
"learning_rate": 5.9839212680843925e-05, |
|
"loss": 0.0436, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.4641192917054986, |
|
"grad_norm": 1.2752379179000854, |
|
"learning_rate": 5.968862084661643e-05, |
|
"loss": 0.0375, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.46505125815470644, |
|
"grad_norm": 2.3906185626983643, |
|
"learning_rate": 5.953793764102722e-05, |
|
"loss": 0.0396, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.4659832246039143, |
|
"grad_norm": 2.577181339263916, |
|
"learning_rate": 5.938716448513818e-05, |
|
"loss": 0.1317, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.46691519105312207, |
|
"grad_norm": 0.3229764699935913, |
|
"learning_rate": 5.923630280085948e-05, |
|
"loss": 0.0078, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.4678471575023299, |
|
"grad_norm": 1.5205317735671997, |
|
"learning_rate": 5.908535401093618e-05, |
|
"loss": 0.0789, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.46877912395153776, |
|
"grad_norm": 0.8096385598182678, |
|
"learning_rate": 5.8934319538934824e-05, |
|
"loss": 0.0274, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.46971109040074555, |
|
"grad_norm": 1.7681595087051392, |
|
"learning_rate": 5.878320080923001e-05, |
|
"loss": 0.0511, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.4706430568499534, |
|
"grad_norm": 2.489217519760132, |
|
"learning_rate": 5.863199924699095e-05, |
|
"loss": 0.176, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.47157502329916123, |
|
"grad_norm": 1.064134120941162, |
|
"learning_rate": 5.848071627816803e-05, |
|
"loss": 0.0863, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.4725069897483691, |
|
"grad_norm": 1.0623358488082886, |
|
"learning_rate": 5.8329353329479366e-05, |
|
"loss": 0.0437, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.47343895619757687, |
|
"grad_norm": 0.8172505497932434, |
|
"learning_rate": 5.8177911828397336e-05, |
|
"loss": 0.0331, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.4743709226467847, |
|
"grad_norm": 0.7944416999816895, |
|
"learning_rate": 5.802639320313514e-05, |
|
"loss": 0.0435, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.47530288909599255, |
|
"grad_norm": 0.8773410320281982, |
|
"learning_rate": 5.787479888263333e-05, |
|
"loss": 0.0378, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.4762348555452004, |
|
"grad_norm": 0.8762723207473755, |
|
"learning_rate": 5.77231302965463e-05, |
|
"loss": 0.0607, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.4771668219944082, |
|
"grad_norm": 0.24209994077682495, |
|
"learning_rate": 5.757138887522884e-05, |
|
"loss": 0.0059, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.47809878844361603, |
|
"grad_norm": 4.610219478607178, |
|
"learning_rate": 5.7419576049722646e-05, |
|
"loss": 0.0349, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.4790307548928239, |
|
"grad_norm": 0.8906161785125732, |
|
"learning_rate": 5.7267693251742794e-05, |
|
"loss": 0.0262, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.47996272134203166, |
|
"grad_norm": 1.6799854040145874, |
|
"learning_rate": 5.7115741913664264e-05, |
|
"loss": 0.107, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.4808946877912395, |
|
"grad_norm": 2.382206678390503, |
|
"learning_rate": 5.696372346850841e-05, |
|
"loss": 0.0567, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.48182665424044735, |
|
"grad_norm": 1.7199441194534302, |
|
"learning_rate": 5.68116393499295e-05, |
|
"loss": 0.0622, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.4827586206896552, |
|
"grad_norm": 0.9822327494621277, |
|
"learning_rate": 5.6659490992201094e-05, |
|
"loss": 0.025, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.483690587138863, |
|
"grad_norm": 1.5693819522857666, |
|
"learning_rate": 5.650727983020262e-05, |
|
"loss": 0.2062, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.4846225535880708, |
|
"grad_norm": 1.9711823463439941, |
|
"learning_rate": 5.635500729940577e-05, |
|
"loss": 0.0682, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.48555452003727867, |
|
"grad_norm": 1.031491994857788, |
|
"learning_rate": 5.620267483586105e-05, |
|
"loss": 0.0203, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.4864864864864865, |
|
"grad_norm": 1.9086527824401855, |
|
"learning_rate": 5.6050283876184116e-05, |
|
"loss": 0.0701, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.4874184529356943, |
|
"grad_norm": 0.8771370053291321, |
|
"learning_rate": 5.5897835857542317e-05, |
|
"loss": 0.0241, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.48835041938490215, |
|
"grad_norm": 2.242940664291382, |
|
"learning_rate": 5.574533221764109e-05, |
|
"loss": 0.0484, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.48928238583411, |
|
"grad_norm": 23.281734466552734, |
|
"learning_rate": 5.5592774394710465e-05, |
|
"loss": 0.23, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.4902143522833178, |
|
"grad_norm": 1.172943115234375, |
|
"learning_rate": 5.5440163827491454e-05, |
|
"loss": 0.0466, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.4911463187325256, |
|
"grad_norm": 0.3696756958961487, |
|
"learning_rate": 5.5287501955222444e-05, |
|
"loss": 0.011, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.49207828518173347, |
|
"grad_norm": 1.8396764993667603, |
|
"learning_rate": 5.513479021762572e-05, |
|
"loss": 0.1616, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.4930102516309413, |
|
"grad_norm": 1.1849040985107422, |
|
"learning_rate": 5.4982030054893785e-05, |
|
"loss": 0.0695, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.4939422180801491, |
|
"grad_norm": 1.8092740774154663, |
|
"learning_rate": 5.482922290767589e-05, |
|
"loss": 0.0366, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.49487418452935694, |
|
"grad_norm": 0.546469509601593, |
|
"learning_rate": 5.467637021706438e-05, |
|
"loss": 0.0171, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.4958061509785648, |
|
"grad_norm": 1.3984041213989258, |
|
"learning_rate": 5.452347342458104e-05, |
|
"loss": 0.0303, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.4967381174277726, |
|
"grad_norm": 0.39972129464149475, |
|
"learning_rate": 5.437053397216364e-05, |
|
"loss": 0.0101, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.4976700838769804, |
|
"grad_norm": 0.5799165964126587, |
|
"learning_rate": 5.4217553302152237e-05, |
|
"loss": 0.0173, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.49860205032618826, |
|
"grad_norm": 1.4319212436676025, |
|
"learning_rate": 5.406453285727564e-05, |
|
"loss": 0.0405, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.4995340167753961, |
|
"grad_norm": 0.3638681173324585, |
|
"learning_rate": 5.39114740806377e-05, |
|
"loss": 0.0115, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.5004659832246039, |
|
"grad_norm": 0.8622692227363586, |
|
"learning_rate": 5.3758378415703825e-05, |
|
"loss": 0.0348, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.5013979496738118, |
|
"grad_norm": 0.7463175654411316, |
|
"learning_rate": 5.360524730628728e-05, |
|
"loss": 0.0238, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.5013979496738118, |
|
"eval_loss": 0.0520755872130394, |
|
"eval_runtime": 126.0622, |
|
"eval_samples_per_second": 14.334, |
|
"eval_steps_per_second": 3.586, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.5023299161230196, |
|
"grad_norm": 0.33791813254356384, |
|
"learning_rate": 5.345208219653561e-05, |
|
"loss": 0.0073, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.5032618825722274, |
|
"grad_norm": 0.7025349140167236, |
|
"learning_rate": 5.3298884530917014e-05, |
|
"loss": 0.0179, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.5041938490214353, |
|
"grad_norm": 6.270991325378418, |
|
"learning_rate": 5.3145655754206714e-05, |
|
"loss": 0.1438, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.5051258154706431, |
|
"grad_norm": 0.582237184047699, |
|
"learning_rate": 5.2992397311473316e-05, |
|
"loss": 0.0119, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.5060577819198508, |
|
"grad_norm": 0.25732892751693726, |
|
"learning_rate": 5.2839110648065214e-05, |
|
"loss": 0.0065, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.5069897483690587, |
|
"grad_norm": 4.037124156951904, |
|
"learning_rate": 5.2685797209596976e-05, |
|
"loss": 0.0951, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.5079217148182665, |
|
"grad_norm": 1.8424044847488403, |
|
"learning_rate": 5.2532458441935636e-05, |
|
"loss": 0.0928, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.5088536812674743, |
|
"grad_norm": 2.209555149078369, |
|
"learning_rate": 5.2379095791187124e-05, |
|
"loss": 0.1429, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.5097856477166822, |
|
"grad_norm": 0.6223617792129517, |
|
"learning_rate": 5.222571070368257e-05, |
|
"loss": 0.0104, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.51071761416589, |
|
"grad_norm": 1.578021764755249, |
|
"learning_rate": 5.2072304625964785e-05, |
|
"loss": 0.0166, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.5116495806150979, |
|
"grad_norm": 7.880814552307129, |
|
"learning_rate": 5.191887900477443e-05, |
|
"loss": 0.1465, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.5125815470643057, |
|
"grad_norm": 13.242683410644531, |
|
"learning_rate": 5.176543528703657e-05, |
|
"loss": 0.3455, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.5135135135135135, |
|
"grad_norm": 0.05634134262800217, |
|
"learning_rate": 5.161197491984684e-05, |
|
"loss": 0.002, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.5144454799627214, |
|
"grad_norm": 0.34363609552383423, |
|
"learning_rate": 5.1458499350458e-05, |
|
"loss": 0.0064, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.5153774464119292, |
|
"grad_norm": 0.25330761075019836, |
|
"learning_rate": 5.130501002626609e-05, |
|
"loss": 0.0046, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.516309412861137, |
|
"grad_norm": 1.2197694778442383, |
|
"learning_rate": 5.11515083947969e-05, |
|
"loss": 0.0342, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 3.949093818664551, |
|
"learning_rate": 5.099799590369231e-05, |
|
"loss": 0.0867, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.5181733457595527, |
|
"grad_norm": 0.16666558384895325, |
|
"learning_rate": 5.084447400069655e-05, |
|
"loss": 0.0043, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.5191053122087604, |
|
"grad_norm": 1.1512700319290161, |
|
"learning_rate": 5.069094413364272e-05, |
|
"loss": 0.0468, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.5200372786579683, |
|
"grad_norm": 0.9841926097869873, |
|
"learning_rate": 5.053740775043892e-05, |
|
"loss": 0.0752, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.5209692451071761, |
|
"grad_norm": 1.8312132358551025, |
|
"learning_rate": 5.038386629905475e-05, |
|
"loss": 0.0422, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.5219012115563839, |
|
"grad_norm": 2.9533884525299072, |
|
"learning_rate": 5.023032122750759e-05, |
|
"loss": 0.1588, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5228331780055918, |
|
"grad_norm": 0.0878317654132843, |
|
"learning_rate": 5.007677398384901e-05, |
|
"loss": 0.002, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.5237651444547996, |
|
"grad_norm": 0.921676516532898, |
|
"learning_rate": 4.9923226016151e-05, |
|
"loss": 0.0214, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.5246971109040075, |
|
"grad_norm": 5.561086177825928, |
|
"learning_rate": 4.976967877249242e-05, |
|
"loss": 0.1318, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.5256290773532153, |
|
"grad_norm": 0.7021181583404541, |
|
"learning_rate": 4.961613370094527e-05, |
|
"loss": 0.0727, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.5265610438024231, |
|
"grad_norm": 1.6535977125167847, |
|
"learning_rate": 4.9462592249561095e-05, |
|
"loss": 0.0668, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.527493010251631, |
|
"grad_norm": 1.1922616958618164, |
|
"learning_rate": 4.9309055866357295e-05, |
|
"loss": 0.0224, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.5284249767008388, |
|
"grad_norm": 0.9673576354980469, |
|
"learning_rate": 4.915552599930345e-05, |
|
"loss": 0.0291, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.5293569431500466, |
|
"grad_norm": 1.8743202686309814, |
|
"learning_rate": 4.900200409630771e-05, |
|
"loss": 0.0289, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.5302889095992545, |
|
"grad_norm": 1.7686008214950562, |
|
"learning_rate": 4.884849160520311e-05, |
|
"loss": 0.0814, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.5312208760484622, |
|
"grad_norm": 0.39702948927879333, |
|
"learning_rate": 4.869498997373393e-05, |
|
"loss": 0.0115, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.53215284249767, |
|
"grad_norm": 0.8344688415527344, |
|
"learning_rate": 4.8541500649542014e-05, |
|
"loss": 0.0138, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.5330848089468779, |
|
"grad_norm": 1.8823225498199463, |
|
"learning_rate": 4.838802508015316e-05, |
|
"loss": 0.0605, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.5340167753960857, |
|
"grad_norm": 1.7391656637191772, |
|
"learning_rate": 4.823456471296344e-05, |
|
"loss": 0.0357, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.5349487418452936, |
|
"grad_norm": 2.7030084133148193, |
|
"learning_rate": 4.8081120995225574e-05, |
|
"loss": 0.0893, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.5358807082945014, |
|
"grad_norm": 5.969890594482422, |
|
"learning_rate": 4.792769537403523e-05, |
|
"loss": 0.347, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.5368126747437092, |
|
"grad_norm": 0.7905582785606384, |
|
"learning_rate": 4.777428929631743e-05, |
|
"loss": 0.0267, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.5377446411929171, |
|
"grad_norm": 1.111538290977478, |
|
"learning_rate": 4.762090420881289e-05, |
|
"loss": 0.0523, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.5386766076421249, |
|
"grad_norm": 1.5851444005966187, |
|
"learning_rate": 4.746754155806437e-05, |
|
"loss": 0.1028, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.5396085740913327, |
|
"grad_norm": 1.3687413930892944, |
|
"learning_rate": 4.731420279040303e-05, |
|
"loss": 0.0795, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.5405405405405406, |
|
"grad_norm": 2.08109712600708, |
|
"learning_rate": 4.716088935193479e-05, |
|
"loss": 0.0377, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5414725069897484, |
|
"grad_norm": 1.7671325206756592, |
|
"learning_rate": 4.7007602688526695e-05, |
|
"loss": 0.0431, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.5424044734389561, |
|
"grad_norm": 2.354390859603882, |
|
"learning_rate": 4.6854344245793305e-05, |
|
"loss": 0.1034, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.543336439888164, |
|
"grad_norm": 1.8093174695968628, |
|
"learning_rate": 4.6701115469083e-05, |
|
"loss": 0.1356, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.5442684063373718, |
|
"grad_norm": 3.422104597091675, |
|
"learning_rate": 4.65479178034644e-05, |
|
"loss": 0.228, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.5452003727865797, |
|
"grad_norm": 0.0745796486735344, |
|
"learning_rate": 4.6394752693712734e-05, |
|
"loss": 0.0021, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.5461323392357875, |
|
"grad_norm": 0.6440649032592773, |
|
"learning_rate": 4.624162158429619e-05, |
|
"loss": 0.0187, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.5470643056849953, |
|
"grad_norm": 1.015874981880188, |
|
"learning_rate": 4.608852591936231e-05, |
|
"loss": 0.0256, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.5479962721342032, |
|
"grad_norm": 0.12743471562862396, |
|
"learning_rate": 4.593546714272438e-05, |
|
"loss": 0.0025, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.548928238583411, |
|
"grad_norm": 1.2057770490646362, |
|
"learning_rate": 4.578244669784777e-05, |
|
"loss": 0.0317, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.5498602050326188, |
|
"grad_norm": 2.5640909671783447, |
|
"learning_rate": 4.562946602783636e-05, |
|
"loss": 0.1007, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5507921714818267, |
|
"grad_norm": 1.9403150081634521, |
|
"learning_rate": 4.547652657541897e-05, |
|
"loss": 0.0554, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 1.5157524347305298, |
|
"learning_rate": 4.532362978293564e-05, |
|
"loss": 0.0366, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.5526561043802423, |
|
"grad_norm": 1.6860204935073853, |
|
"learning_rate": 4.517077709232411e-05, |
|
"loss": 0.0322, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.5535880708294502, |
|
"grad_norm": 0.4395802617073059, |
|
"learning_rate": 4.501796994510622e-05, |
|
"loss": 0.0131, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.554520037278658, |
|
"grad_norm": 1.106860637664795, |
|
"learning_rate": 4.4865209782374304e-05, |
|
"loss": 0.04, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.5554520037278659, |
|
"grad_norm": 0.5238240361213684, |
|
"learning_rate": 4.471249804477758e-05, |
|
"loss": 0.0125, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.5563839701770736, |
|
"grad_norm": 3.6945576667785645, |
|
"learning_rate": 4.4559836172508565e-05, |
|
"loss": 0.1404, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.5573159366262814, |
|
"grad_norm": 1.1280156373977661, |
|
"learning_rate": 4.4407225605289547e-05, |
|
"loss": 0.0245, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.5582479030754893, |
|
"grad_norm": 1.4675111770629883, |
|
"learning_rate": 4.4254667782358924e-05, |
|
"loss": 0.0228, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.5591798695246971, |
|
"grad_norm": 2.4232566356658936, |
|
"learning_rate": 4.410216414245771e-05, |
|
"loss": 0.0157, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5601118359739049, |
|
"grad_norm": 0.8545390367507935, |
|
"learning_rate": 4.394971612381591e-05, |
|
"loss": 0.029, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.5610438024231128, |
|
"grad_norm": 0.5777714848518372, |
|
"learning_rate": 4.379732516413897e-05, |
|
"loss": 0.0127, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.5619757688723206, |
|
"grad_norm": 0.8869187831878662, |
|
"learning_rate": 4.364499270059424e-05, |
|
"loss": 0.0227, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.5629077353215284, |
|
"grad_norm": 0.9142225980758667, |
|
"learning_rate": 4.34927201697974e-05, |
|
"loss": 0.0552, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.5638397017707363, |
|
"grad_norm": 2.659519910812378, |
|
"learning_rate": 4.334050900779893e-05, |
|
"loss": 0.1076, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.5647716682199441, |
|
"grad_norm": 0.21417352557182312, |
|
"learning_rate": 4.3188360650070525e-05, |
|
"loss": 0.0068, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.5657036346691519, |
|
"grad_norm": 0.3624404966831207, |
|
"learning_rate": 4.303627653149159e-05, |
|
"loss": 0.0087, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.5666356011183598, |
|
"grad_norm": 1.3973774909973145, |
|
"learning_rate": 4.288425808633575e-05, |
|
"loss": 0.0599, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.5675675675675675, |
|
"grad_norm": 1.1630985736846924, |
|
"learning_rate": 4.2732306748257224e-05, |
|
"loss": 0.0857, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.5684995340167754, |
|
"grad_norm": 0.263249933719635, |
|
"learning_rate": 4.258042395027738e-05, |
|
"loss": 0.0084, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.5694315004659832, |
|
"grad_norm": 2.8128879070281982, |
|
"learning_rate": 4.2428611124771184e-05, |
|
"loss": 0.1316, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.570363466915191, |
|
"grad_norm": 1.825242042541504, |
|
"learning_rate": 4.2276869703453724e-05, |
|
"loss": 0.102, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.5712954333643989, |
|
"grad_norm": 0.9243812561035156, |
|
"learning_rate": 4.21252011173667e-05, |
|
"loss": 0.0403, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.5722273998136067, |
|
"grad_norm": 1.2102307081222534, |
|
"learning_rate": 4.1973606796864884e-05, |
|
"loss": 0.0637, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.5731593662628145, |
|
"grad_norm": 1.0278326272964478, |
|
"learning_rate": 4.182208817160269e-05, |
|
"loss": 0.0872, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.5740913327120224, |
|
"grad_norm": 0.973726212978363, |
|
"learning_rate": 4.167064667052065e-05, |
|
"loss": 0.0462, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.5750232991612302, |
|
"grad_norm": 0.3728512227535248, |
|
"learning_rate": 4.151928372183198e-05, |
|
"loss": 0.0064, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.575955265610438, |
|
"grad_norm": 2.0337462425231934, |
|
"learning_rate": 4.136800075300906e-05, |
|
"loss": 0.067, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.5768872320596459, |
|
"grad_norm": 2.5226025581359863, |
|
"learning_rate": 4.1216799190770006e-05, |
|
"loss": 0.1414, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.5778191985088537, |
|
"grad_norm": 0.20914335548877716, |
|
"learning_rate": 4.1065680461065194e-05, |
|
"loss": 0.0033, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.5787511649580616, |
|
"grad_norm": 2.150099754333496, |
|
"learning_rate": 4.0914645989063845e-05, |
|
"loss": 0.0502, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.5796831314072693, |
|
"grad_norm": 3.2852182388305664, |
|
"learning_rate": 4.076369719914055e-05, |
|
"loss": 0.0877, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.5806150978564771, |
|
"grad_norm": 2.739978790283203, |
|
"learning_rate": 4.061283551486185e-05, |
|
"loss": 0.2081, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.581547064305685, |
|
"grad_norm": 3.39498233795166, |
|
"learning_rate": 4.0462062358972796e-05, |
|
"loss": 0.1141, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.5824790307548928, |
|
"grad_norm": 3.0169754028320312, |
|
"learning_rate": 4.03113791533836e-05, |
|
"loss": 0.0791, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.5834109972041006, |
|
"grad_norm": 0.4348333477973938, |
|
"learning_rate": 4.016078731915608e-05, |
|
"loss": 0.0113, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.5843429636533085, |
|
"grad_norm": 0.17418645322322845, |
|
"learning_rate": 4.001028827649046e-05, |
|
"loss": 0.0049, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.5852749301025163, |
|
"grad_norm": 2.0803823471069336, |
|
"learning_rate": 3.9859883444711794e-05, |
|
"loss": 0.0276, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.5862068965517241, |
|
"grad_norm": 0.5942701697349548, |
|
"learning_rate": 3.970957424225666e-05, |
|
"loss": 0.0176, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.587138863000932, |
|
"grad_norm": 1.0880169868469238, |
|
"learning_rate": 3.955936208665979e-05, |
|
"loss": 0.0319, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.5880708294501398, |
|
"grad_norm": 1.0171687602996826, |
|
"learning_rate": 3.940924839454066e-05, |
|
"loss": 0.0592, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.5890027958993477, |
|
"grad_norm": 0.8191186189651489, |
|
"learning_rate": 3.925923458159023e-05, |
|
"loss": 0.0173, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.5899347623485555, |
|
"grad_norm": 0.34623149037361145, |
|
"learning_rate": 3.9109322062557424e-05, |
|
"loss": 0.0099, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.5908667287977633, |
|
"grad_norm": 0.3688358962535858, |
|
"learning_rate": 3.895951225123595e-05, |
|
"loss": 0.0094, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.5917986952469712, |
|
"grad_norm": 1.6616538763046265, |
|
"learning_rate": 3.880980656045087e-05, |
|
"loss": 0.0241, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.5927306616961789, |
|
"grad_norm": 0.9035270810127258, |
|
"learning_rate": 3.866020640204533e-05, |
|
"loss": 0.0169, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.5936626281453867, |
|
"grad_norm": 0.6977134346961975, |
|
"learning_rate": 3.851071318686721e-05, |
|
"loss": 0.0251, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.5945945945945946, |
|
"grad_norm": 1.0212866067886353, |
|
"learning_rate": 3.8361328324755825e-05, |
|
"loss": 0.0717, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.5955265610438024, |
|
"grad_norm": 1.4821285009384155, |
|
"learning_rate": 3.821205322452863e-05, |
|
"loss": 0.0705, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.5964585274930102, |
|
"grad_norm": 0.7820432186126709, |
|
"learning_rate": 3.8062889293967976e-05, |
|
"loss": 0.0187, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.5973904939422181, |
|
"grad_norm": 1.4119162559509277, |
|
"learning_rate": 3.791383793980776e-05, |
|
"loss": 0.0756, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.5983224603914259, |
|
"grad_norm": 0.652569591999054, |
|
"learning_rate": 3.7764900567720196e-05, |
|
"loss": 0.0153, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.5992544268406338, |
|
"grad_norm": 0.5894368290901184, |
|
"learning_rate": 3.761607858230257e-05, |
|
"loss": 0.0099, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.6001863932898416, |
|
"grad_norm": 1.44651198387146, |
|
"learning_rate": 3.746737338706397e-05, |
|
"loss": 0.0758, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.6011183597390494, |
|
"grad_norm": 1.0577771663665771, |
|
"learning_rate": 3.7318786384412074e-05, |
|
"loss": 0.0427, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.6020503261882573, |
|
"grad_norm": 1.540482997894287, |
|
"learning_rate": 3.71703189756399e-05, |
|
"loss": 0.0339, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.6029822926374651, |
|
"grad_norm": 2.182873249053955, |
|
"learning_rate": 3.70219725609126e-05, |
|
"loss": 0.075, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.6039142590866728, |
|
"grad_norm": 1.5290659666061401, |
|
"learning_rate": 3.6873748539254246e-05, |
|
"loss": 0.0569, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.6048462255358807, |
|
"grad_norm": 3.3621015548706055, |
|
"learning_rate": 3.6725648308534696e-05, |
|
"loss": 0.1976, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.6057781919850885, |
|
"grad_norm": 0.9337851405143738, |
|
"learning_rate": 3.65776732654563e-05, |
|
"loss": 0.054, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6067101584342963, |
|
"grad_norm": 0.25393131375312805, |
|
"learning_rate": 3.6429824805540815e-05, |
|
"loss": 0.0052, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.6076421248835042, |
|
"grad_norm": 1.53010892868042, |
|
"learning_rate": 3.628210432311621e-05, |
|
"loss": 0.0433, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.608574091332712, |
|
"grad_norm": 0.31782206892967224, |
|
"learning_rate": 3.6134513211303556e-05, |
|
"loss": 0.0039, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.6095060577819198, |
|
"grad_norm": 0.7145633101463318, |
|
"learning_rate": 3.598705286200382e-05, |
|
"loss": 0.0182, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.6104380242311277, |
|
"grad_norm": 0.5081955790519714, |
|
"learning_rate": 3.58397246658848e-05, |
|
"loss": 0.0138, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.6113699906803355, |
|
"grad_norm": 1.1797877550125122, |
|
"learning_rate": 3.5692530012367955e-05, |
|
"loss": 0.0096, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.6123019571295434, |
|
"grad_norm": 0.0682310089468956, |
|
"learning_rate": 3.554547028961537e-05, |
|
"loss": 0.0023, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.6132339235787512, |
|
"grad_norm": 1.760521650314331, |
|
"learning_rate": 3.5398546884516604e-05, |
|
"loss": 0.0529, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.614165890027959, |
|
"grad_norm": 0.818846583366394, |
|
"learning_rate": 3.5251761182675625e-05, |
|
"loss": 0.0297, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.6150978564771669, |
|
"grad_norm": 0.3822430372238159, |
|
"learning_rate": 3.510511456839777e-05, |
|
"loss": 0.0111, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6160298229263746, |
|
"grad_norm": 1.5274235010147095, |
|
"learning_rate": 3.495860842467664e-05, |
|
"loss": 0.0688, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.6169617893755824, |
|
"grad_norm": 2.4907660484313965, |
|
"learning_rate": 3.481224413318114e-05, |
|
"loss": 0.0601, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.6178937558247903, |
|
"grad_norm": 1.6190844774246216, |
|
"learning_rate": 3.4666023074242356e-05, |
|
"loss": 0.126, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.6188257222739981, |
|
"grad_norm": 1.7249010801315308, |
|
"learning_rate": 3.4519946626840573e-05, |
|
"loss": 0.0391, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.6197576887232059, |
|
"grad_norm": 1.1270893812179565, |
|
"learning_rate": 3.4374016168592296e-05, |
|
"loss": 0.0391, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.6206896551724138, |
|
"grad_norm": 2.65583872795105, |
|
"learning_rate": 3.422823307573722e-05, |
|
"loss": 0.0979, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 0.6216216216216216, |
|
"grad_norm": 0.3144218921661377, |
|
"learning_rate": 3.40825987231253e-05, |
|
"loss": 0.0056, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 0.6225535880708295, |
|
"grad_norm": 0.4311087727546692, |
|
"learning_rate": 3.393711448420372e-05, |
|
"loss": 0.0116, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 0.6234855545200373, |
|
"grad_norm": 1.4498568773269653, |
|
"learning_rate": 3.379178173100396e-05, |
|
"loss": 0.0887, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 0.6244175209692451, |
|
"grad_norm": 0.8447300791740417, |
|
"learning_rate": 3.364660183412892e-05, |
|
"loss": 0.0198, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.625349487418453, |
|
"grad_norm": 0.34523147344589233, |
|
"learning_rate": 3.3501576162739904e-05, |
|
"loss": 0.0081, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 0.6262814538676608, |
|
"grad_norm": 0.3423440456390381, |
|
"learning_rate": 3.335670608454376e-05, |
|
"loss": 0.0096, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 0.6272134203168686, |
|
"grad_norm": 0.6186404228210449, |
|
"learning_rate": 3.321199296577998e-05, |
|
"loss": 0.0128, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 0.6281453867660765, |
|
"grad_norm": 1.3062037229537964, |
|
"learning_rate": 3.3067438171207766e-05, |
|
"loss": 0.0293, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 0.6290773532152842, |
|
"grad_norm": 8.898473739624023, |
|
"learning_rate": 3.292304306409325e-05, |
|
"loss": 0.2124, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.630009319664492, |
|
"grad_norm": 2.829618453979492, |
|
"learning_rate": 3.2778809006196564e-05, |
|
"loss": 0.0664, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 0.6309412861136999, |
|
"grad_norm": 1.4280197620391846, |
|
"learning_rate": 3.263473735775899e-05, |
|
"loss": 0.0601, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 0.6318732525629077, |
|
"grad_norm": 1.7249200344085693, |
|
"learning_rate": 3.249082947749019e-05, |
|
"loss": 0.0946, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 0.6328052190121156, |
|
"grad_norm": 0.622667670249939, |
|
"learning_rate": 3.234708672255538e-05, |
|
"loss": 0.0068, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 0.6337371854613234, |
|
"grad_norm": 2.2010300159454346, |
|
"learning_rate": 3.220351044856247e-05, |
|
"loss": 0.0679, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.6346691519105312, |
|
"grad_norm": 1.1353260278701782, |
|
"learning_rate": 3.206010200954935e-05, |
|
"loss": 0.0427, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 0.6356011183597391, |
|
"grad_norm": 0.8460433483123779, |
|
"learning_rate": 3.191686275797107e-05, |
|
"loss": 0.012, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 0.6365330848089469, |
|
"grad_norm": 1.015140414237976, |
|
"learning_rate": 3.177379404468715e-05, |
|
"loss": 0.0281, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 0.6374650512581547, |
|
"grad_norm": 1.5313421487808228, |
|
"learning_rate": 3.163089721894876e-05, |
|
"loss": 0.0582, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 0.6383970177073626, |
|
"grad_norm": 0.6894136071205139, |
|
"learning_rate": 3.148817362838606e-05, |
|
"loss": 0.0102, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.6393289841565704, |
|
"grad_norm": 0.2898493707180023, |
|
"learning_rate": 3.134562461899545e-05, |
|
"loss": 0.0054, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 0.6402609506057781, |
|
"grad_norm": 0.8155750036239624, |
|
"learning_rate": 3.120325153512687e-05, |
|
"loss": 0.0197, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 0.641192917054986, |
|
"grad_norm": 0.26222237944602966, |
|
"learning_rate": 3.10610557194712e-05, |
|
"loss": 0.0075, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 0.6421248835041938, |
|
"grad_norm": 0.5798500776290894, |
|
"learning_rate": 3.091903851304751e-05, |
|
"loss": 0.013, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 0.6430568499534017, |
|
"grad_norm": 1.631866693496704, |
|
"learning_rate": 3.077720125519042e-05, |
|
"loss": 0.0798, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.6439888164026095, |
|
"grad_norm": 0.4146805703639984, |
|
"learning_rate": 3.063554528353752e-05, |
|
"loss": 0.0091, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 0.6449207828518173, |
|
"grad_norm": 1.7114466428756714, |
|
"learning_rate": 3.0494071934016737e-05, |
|
"loss": 0.0486, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 0.6458527493010252, |
|
"grad_norm": 1.099481225013733, |
|
"learning_rate": 3.03527825408337e-05, |
|
"loss": 0.0241, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 0.646784715750233, |
|
"grad_norm": 1.2806260585784912, |
|
"learning_rate": 3.0211678436459212e-05, |
|
"loss": 0.0481, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 0.6477166821994408, |
|
"grad_norm": 0.46307051181793213, |
|
"learning_rate": 3.0070760951616618e-05, |
|
"loss": 0.011, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.6486486486486487, |
|
"grad_norm": 2.406785488128662, |
|
"learning_rate": 2.993003141526933e-05, |
|
"loss": 0.0789, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 0.6495806150978565, |
|
"grad_norm": 1.8010510206222534, |
|
"learning_rate": 2.978949115460824e-05, |
|
"loss": 0.0789, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 0.6505125815470643, |
|
"grad_norm": 0.39275264739990234, |
|
"learning_rate": 2.9649141495039223e-05, |
|
"loss": 0.0066, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 0.6514445479962722, |
|
"grad_norm": 2.9994301795959473, |
|
"learning_rate": 2.950898376017064e-05, |
|
"loss": 0.056, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 0.65237651444548, |
|
"grad_norm": 6.136080741882324, |
|
"learning_rate": 2.9369019271800823e-05, |
|
"loss": 0.1862, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6533084808946877, |
|
"grad_norm": 0.296806275844574, |
|
"learning_rate": 2.9229249349905684e-05, |
|
"loss": 0.0072, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 0.6542404473438956, |
|
"grad_norm": 0.6903855204582214, |
|
"learning_rate": 2.908967531262618e-05, |
|
"loss": 0.0312, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 0.6551724137931034, |
|
"grad_norm": 1.400633692741394, |
|
"learning_rate": 2.895029847625595e-05, |
|
"loss": 0.0153, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 0.6561043802423113, |
|
"grad_norm": 0.370590478181839, |
|
"learning_rate": 2.8811120155228844e-05, |
|
"loss": 0.0085, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 0.6570363466915191, |
|
"grad_norm": 0.47119617462158203, |
|
"learning_rate": 2.8672141662106578e-05, |
|
"loss": 0.0088, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.6579683131407269, |
|
"grad_norm": 0.07778705656528473, |
|
"learning_rate": 2.853336430756631e-05, |
|
"loss": 0.0024, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 0.6589002795899348, |
|
"grad_norm": 1.7751870155334473, |
|
"learning_rate": 2.8394789400388328e-05, |
|
"loss": 0.0729, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 0.6598322460391426, |
|
"grad_norm": 1.4420006275177002, |
|
"learning_rate": 2.8256418247443662e-05, |
|
"loss": 0.0563, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 0.6607642124883504, |
|
"grad_norm": 0.22819720208644867, |
|
"learning_rate": 2.811825215368179e-05, |
|
"loss": 0.0068, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 0.6616961789375583, |
|
"grad_norm": 0.3278886377811432, |
|
"learning_rate": 2.798029242211828e-05, |
|
"loss": 0.0067, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.6626281453867661, |
|
"grad_norm": 0.25643807649612427, |
|
"learning_rate": 2.7842540353822632e-05, |
|
"loss": 0.005, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 0.6635601118359739, |
|
"grad_norm": 0.47565677762031555, |
|
"learning_rate": 2.770499724790584e-05, |
|
"loss": 0.012, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 0.6644920782851818, |
|
"grad_norm": 0.05990108847618103, |
|
"learning_rate": 2.7567664401508225e-05, |
|
"loss": 0.0012, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 0.6654240447343895, |
|
"grad_norm": 0.6037838459014893, |
|
"learning_rate": 2.7430543109787222e-05, |
|
"loss": 0.0086, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 0.6663560111835974, |
|
"grad_norm": 2.216022491455078, |
|
"learning_rate": 2.729363466590511e-05, |
|
"loss": 0.0832, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.6672879776328052, |
|
"grad_norm": 1.0071760416030884, |
|
"learning_rate": 2.7156940361016864e-05, |
|
"loss": 0.0313, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 0.668219944082013, |
|
"grad_norm": 0.5068988800048828, |
|
"learning_rate": 2.702046148425795e-05, |
|
"loss": 0.0127, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 0.6691519105312209, |
|
"grad_norm": 0.16356121003627777, |
|
"learning_rate": 2.688419932273219e-05, |
|
"loss": 0.0029, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 0.6700838769804287, |
|
"grad_norm": 0.5813694596290588, |
|
"learning_rate": 2.6748155161499567e-05, |
|
"loss": 0.0086, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 0.6710158434296365, |
|
"grad_norm": 0.29364094138145447, |
|
"learning_rate": 2.6612330283564223e-05, |
|
"loss": 0.0052, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.6719478098788444, |
|
"grad_norm": 1.4780179262161255, |
|
"learning_rate": 2.6476725969862227e-05, |
|
"loss": 0.0357, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 0.6728797763280522, |
|
"grad_norm": 0.11548914760351181, |
|
"learning_rate": 2.634134349924956e-05, |
|
"loss": 0.002, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 0.67381174277726, |
|
"grad_norm": 0.39383208751678467, |
|
"learning_rate": 2.6206184148490066e-05, |
|
"loss": 0.0091, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 0.6747437092264679, |
|
"grad_norm": 0.3109658658504486, |
|
"learning_rate": 2.6071249192243363e-05, |
|
"loss": 0.0018, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 0.6756756756756757, |
|
"grad_norm": 3.5623598098754883, |
|
"learning_rate": 2.5936539903052892e-05, |
|
"loss": 0.1177, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.6766076421248836, |
|
"grad_norm": 0.1712770015001297, |
|
"learning_rate": 2.580205755133384e-05, |
|
"loss": 0.0013, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 0.6775396085740913, |
|
"grad_norm": 0.7612726092338562, |
|
"learning_rate": 2.5667803405361213e-05, |
|
"loss": 0.0122, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 0.6784715750232991, |
|
"grad_norm": 0.7174275517463684, |
|
"learning_rate": 2.5533778731257824e-05, |
|
"loss": 0.0259, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 0.679403541472507, |
|
"grad_norm": 1.2396039962768555, |
|
"learning_rate": 2.5399984792982457e-05, |
|
"loss": 0.0468, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 0.6803355079217148, |
|
"grad_norm": 0.49372637271881104, |
|
"learning_rate": 2.5266422852317795e-05, |
|
"loss": 0.0081, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.6812674743709226, |
|
"grad_norm": 0.16633592545986176, |
|
"learning_rate": 2.513309416885865e-05, |
|
"loss": 0.0042, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 0.6821994408201305, |
|
"grad_norm": 1.141757607460022, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 0.0391, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 0.6831314072693383, |
|
"grad_norm": 0.2698551118373871, |
|
"learning_rate": 2.4867141600925214e-05, |
|
"loss": 0.005, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 0.6840633737185461, |
|
"grad_norm": 0.20871886610984802, |
|
"learning_rate": 2.4734520224594093e-05, |
|
"loss": 0.0061, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 0.684995340167754, |
|
"grad_norm": 0.3639092445373535, |
|
"learning_rate": 2.4602137121731195e-05, |
|
"loss": 0.0122, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.6859273066169618, |
|
"grad_norm": 1.5544925928115845, |
|
"learning_rate": 2.44699935408139e-05, |
|
"loss": 0.0348, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 0.6868592730661697, |
|
"grad_norm": 0.7567540407180786, |
|
"learning_rate": 2.433809072806081e-05, |
|
"loss": 0.0063, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 0.6877912395153775, |
|
"grad_norm": 1.6622443199157715, |
|
"learning_rate": 2.4206429927419794e-05, |
|
"loss": 0.0182, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 0.6887232059645852, |
|
"grad_norm": 0.32770344614982605, |
|
"learning_rate": 2.40750123805564e-05, |
|
"loss": 0.0039, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 1.0293552875518799, |
|
"learning_rate": 2.3943839326842092e-05, |
|
"loss": 0.0309, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.6905871388630009, |
|
"grad_norm": 1.511672854423523, |
|
"learning_rate": 2.3812912003342568e-05, |
|
"loss": 0.0387, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 0.6915191053122087, |
|
"grad_norm": 1.2512294054031372, |
|
"learning_rate": 2.3682231644806108e-05, |
|
"loss": 0.0276, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 0.6924510717614166, |
|
"grad_norm": 0.2696451246738434, |
|
"learning_rate": 2.3551799483651894e-05, |
|
"loss": 0.0027, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 0.6933830382106244, |
|
"grad_norm": 0.7463300228118896, |
|
"learning_rate": 2.342161674995843e-05, |
|
"loss": 0.0067, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 0.6943150046598322, |
|
"grad_norm": 0.102654367685318, |
|
"learning_rate": 2.3291684671451903e-05, |
|
"loss": 0.0025, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.6952469711090401, |
|
"grad_norm": 0.2291325330734253, |
|
"learning_rate": 2.3162004473494657e-05, |
|
"loss": 0.004, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 0.6961789375582479, |
|
"grad_norm": 2.3645198345184326, |
|
"learning_rate": 2.3032577379073577e-05, |
|
"loss": 0.0629, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 0.6971109040074557, |
|
"grad_norm": 0.990889847278595, |
|
"learning_rate": 2.2903404608788582e-05, |
|
"loss": 0.024, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 0.6980428704566636, |
|
"grad_norm": 2.9361162185668945, |
|
"learning_rate": 2.2774487380841115e-05, |
|
"loss": 0.0389, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 0.6989748369058714, |
|
"grad_norm": 1.2799005508422852, |
|
"learning_rate": 2.2645826911022655e-05, |
|
"loss": 0.0631, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.6999068033550793, |
|
"grad_norm": 0.12018714100122452, |
|
"learning_rate": 2.2517424412703252e-05, |
|
"loss": 0.0021, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 0.700838769804287, |
|
"grad_norm": 0.8238220810890198, |
|
"learning_rate": 2.2389281096820075e-05, |
|
"loss": 0.0259, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 0.7017707362534948, |
|
"grad_norm": 0.07809086889028549, |
|
"learning_rate": 2.2261398171865978e-05, |
|
"loss": 0.0017, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 0.7027027027027027, |
|
"grad_norm": 2.065542221069336, |
|
"learning_rate": 2.2133776843878186e-05, |
|
"loss": 0.0705, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 0.7036346691519105, |
|
"grad_norm": 0.10505253821611404, |
|
"learning_rate": 2.2006418316426775e-05, |
|
"loss": 0.0023, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.7045666356011183, |
|
"grad_norm": 0.19952963292598724, |
|
"learning_rate": 2.187932379060348e-05, |
|
"loss": 0.0045, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 0.7054986020503262, |
|
"grad_norm": 0.06244141235947609, |
|
"learning_rate": 2.1752494465010242e-05, |
|
"loss": 0.0016, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 0.706430568499534, |
|
"grad_norm": 1.953023910522461, |
|
"learning_rate": 2.1625931535747964e-05, |
|
"loss": 0.0261, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 0.7073625349487418, |
|
"grad_norm": 0.7066685557365417, |
|
"learning_rate": 2.1499636196405228e-05, |
|
"loss": 0.0232, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 0.7082945013979497, |
|
"grad_norm": 0.7350996136665344, |
|
"learning_rate": 2.1373609638047032e-05, |
|
"loss": 0.0185, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7092264678471575, |
|
"grad_norm": 0.9684416651725769, |
|
"learning_rate": 2.1247853049203543e-05, |
|
"loss": 0.0263, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 0.7101584342963654, |
|
"grad_norm": 1.195703148841858, |
|
"learning_rate": 2.112236761585892e-05, |
|
"loss": 0.0458, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 0.7110904007455732, |
|
"grad_norm": 1.525630235671997, |
|
"learning_rate": 2.09971545214401e-05, |
|
"loss": 0.0274, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 0.712022367194781, |
|
"grad_norm": 0.7897143363952637, |
|
"learning_rate": 2.087221494680563e-05, |
|
"loss": 0.0145, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 0.7129543336439889, |
|
"grad_norm": 0.4849226772785187, |
|
"learning_rate": 2.074755007023461e-05, |
|
"loss": 0.0095, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.7138863000931966, |
|
"grad_norm": 2.5897316932678223, |
|
"learning_rate": 2.0623161067415463e-05, |
|
"loss": 0.0353, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 0.7148182665424044, |
|
"grad_norm": 0.49661892652511597, |
|
"learning_rate": 2.049904911143492e-05, |
|
"loss": 0.014, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 0.7157502329916123, |
|
"grad_norm": 0.16137608885765076, |
|
"learning_rate": 2.0375215372766944e-05, |
|
"loss": 0.0023, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 0.7166821994408201, |
|
"grad_norm": 1.290913462638855, |
|
"learning_rate": 2.025166101926168e-05, |
|
"loss": 0.034, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 0.7176141658900279, |
|
"grad_norm": 0.3757019639015198, |
|
"learning_rate": 2.012838721613447e-05, |
|
"loss": 0.0048, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7185461323392358, |
|
"grad_norm": 0.8790585398674011, |
|
"learning_rate": 2.0005395125954812e-05, |
|
"loss": 0.0211, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 0.7194780987884436, |
|
"grad_norm": 0.06951785832643509, |
|
"learning_rate": 1.9882685908635462e-05, |
|
"loss": 0.001, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 0.7204100652376515, |
|
"grad_norm": 0.6821100115776062, |
|
"learning_rate": 1.9760260721421426e-05, |
|
"loss": 0.0127, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 0.7213420316868593, |
|
"grad_norm": 1.6195906400680542, |
|
"learning_rate": 1.963812071887913e-05, |
|
"loss": 0.0168, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 0.7222739981360671, |
|
"grad_norm": 6.112545013427734, |
|
"learning_rate": 1.951626705288544e-05, |
|
"loss": 0.1486, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.723205964585275, |
|
"grad_norm": 0.20122942328453064, |
|
"learning_rate": 1.9394700872616855e-05, |
|
"loss": 0.0041, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 0.7241379310344828, |
|
"grad_norm": 0.0628390684723854, |
|
"learning_rate": 1.9273423324538658e-05, |
|
"loss": 0.0012, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 0.7250698974836906, |
|
"grad_norm": 0.9709482192993164, |
|
"learning_rate": 1.9152435552394104e-05, |
|
"loss": 0.0271, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 0.7260018639328985, |
|
"grad_norm": 1.464906096458435, |
|
"learning_rate": 1.903173869719362e-05, |
|
"loss": 0.0446, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 0.7269338303821062, |
|
"grad_norm": 4.5541791915893555, |
|
"learning_rate": 1.891133389720407e-05, |
|
"loss": 0.0495, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.727865796831314, |
|
"grad_norm": 0.476789265871048, |
|
"learning_rate": 1.8791222287937982e-05, |
|
"loss": 0.0031, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 0.7287977632805219, |
|
"grad_norm": 0.5186454653739929, |
|
"learning_rate": 1.8671405002142918e-05, |
|
"loss": 0.0167, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 0.7297297297297297, |
|
"grad_norm": 2.046664237976074, |
|
"learning_rate": 1.855188316979068e-05, |
|
"loss": 0.0679, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 0.7306616961789375, |
|
"grad_norm": 0.5090547204017639, |
|
"learning_rate": 1.8432657918066732e-05, |
|
"loss": 0.0127, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 0.7315936626281454, |
|
"grad_norm": 0.25921696424484253, |
|
"learning_rate": 1.831373037135955e-05, |
|
"loss": 0.0038, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.7325256290773532, |
|
"grad_norm": 1.4198919534683228, |
|
"learning_rate": 1.8195101651250017e-05, |
|
"loss": 0.0456, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 0.7334575955265611, |
|
"grad_norm": 0.12709754705429077, |
|
"learning_rate": 1.807677287650083e-05, |
|
"loss": 0.0022, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 0.7343895619757689, |
|
"grad_norm": 0.19620268046855927, |
|
"learning_rate": 1.7958745163045986e-05, |
|
"loss": 0.0049, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 0.7353215284249767, |
|
"grad_norm": 1.1757110357284546, |
|
"learning_rate": 1.7841019623980217e-05, |
|
"loss": 0.061, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 0.7362534948741846, |
|
"grad_norm": 0.24634391069412231, |
|
"learning_rate": 1.77235973695485e-05, |
|
"loss": 0.0045, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7371854613233924, |
|
"grad_norm": 1.143908143043518, |
|
"learning_rate": 1.760647950713566e-05, |
|
"loss": 0.0276, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 0.7381174277726001, |
|
"grad_norm": 0.6017893552780151, |
|
"learning_rate": 1.74896671412558e-05, |
|
"loss": 0.0078, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 0.739049394221808, |
|
"grad_norm": 0.18127556145191193, |
|
"learning_rate": 1.737316137354197e-05, |
|
"loss": 0.0029, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 0.7399813606710158, |
|
"grad_norm": 1.0178278684616089, |
|
"learning_rate": 1.725696330273575e-05, |
|
"loss": 0.0244, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 0.7409133271202236, |
|
"grad_norm": 0.9616022706031799, |
|
"learning_rate": 1.7141074024676912e-05, |
|
"loss": 0.0233, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.7418452935694315, |
|
"grad_norm": 1.721096396446228, |
|
"learning_rate": 1.7025494632293048e-05, |
|
"loss": 0.055, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 0.7427772600186393, |
|
"grad_norm": 1.3252190351486206, |
|
"learning_rate": 1.6910226215589303e-05, |
|
"loss": 0.024, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 0.7437092264678472, |
|
"grad_norm": 0.27513352036476135, |
|
"learning_rate": 1.6795269861638042e-05, |
|
"loss": 0.0041, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 0.744641192917055, |
|
"grad_norm": 0.22185392677783966, |
|
"learning_rate": 1.6680626654568686e-05, |
|
"loss": 0.0031, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 0.7455731593662628, |
|
"grad_norm": 4.715059757232666, |
|
"learning_rate": 1.656629767555739e-05, |
|
"loss": 0.1378, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7465051258154707, |
|
"grad_norm": 0.09731733053922653, |
|
"learning_rate": 1.645228400281689e-05, |
|
"loss": 0.0021, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 0.7474370922646785, |
|
"grad_norm": 0.505806565284729, |
|
"learning_rate": 1.6338586711586356e-05, |
|
"loss": 0.0118, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 0.7483690587138863, |
|
"grad_norm": 0.15126951038837433, |
|
"learning_rate": 1.6225206874121218e-05, |
|
"loss": 0.0027, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 0.7493010251630942, |
|
"grad_norm": 1.0030357837677002, |
|
"learning_rate": 1.6112145559683055e-05, |
|
"loss": 0.0262, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 0.750232991612302, |
|
"grad_norm": 2.0372867584228516, |
|
"learning_rate": 1.5999403834529548e-05, |
|
"loss": 0.0985, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.7511649580615097, |
|
"grad_norm": 0.2379598617553711, |
|
"learning_rate": 1.5886982761904377e-05, |
|
"loss": 0.0056, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 0.7520969245107176, |
|
"grad_norm": 0.8561238050460815, |
|
"learning_rate": 1.5774883402027208e-05, |
|
"loss": 0.0183, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.7520969245107176, |
|
"eval_loss": 0.029690111055970192, |
|
"eval_runtime": 127.9702, |
|
"eval_samples_per_second": 14.12, |
|
"eval_steps_per_second": 3.532, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 0.7530288909599254, |
|
"grad_norm": 0.1286248415708542, |
|
"learning_rate": 1.5663106812083743e-05, |
|
"loss": 0.0035, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 0.7539608574091333, |
|
"grad_norm": 0.29819682240486145, |
|
"learning_rate": 1.555165404621567e-05, |
|
"loss": 0.0043, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 0.7548928238583411, |
|
"grad_norm": 1.4682197570800781, |
|
"learning_rate": 1.5440526155510763e-05, |
|
"loss": 0.0349, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.7558247903075489, |
|
"grad_norm": 2.5306453704833984, |
|
"learning_rate": 1.532972418799298e-05, |
|
"loss": 0.1089, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 0.7567567567567568, |
|
"grad_norm": 0.7194310426712036, |
|
"learning_rate": 1.5219249188612556e-05, |
|
"loss": 0.0092, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 0.7576887232059646, |
|
"grad_norm": 0.7676751613616943, |
|
"learning_rate": 1.5109102199236152e-05, |
|
"loss": 0.0195, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 0.7586206896551724, |
|
"grad_norm": 0.3930986225605011, |
|
"learning_rate": 1.4999284258637053e-05, |
|
"loss": 0.0055, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 0.7595526561043803, |
|
"grad_norm": 1.3155858516693115, |
|
"learning_rate": 1.488979640248534e-05, |
|
"loss": 0.0408, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.7604846225535881, |
|
"grad_norm": 0.8961852788925171, |
|
"learning_rate": 1.4780639663338125e-05, |
|
"loss": 0.014, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 0.7614165890027959, |
|
"grad_norm": 0.16851025819778442, |
|
"learning_rate": 1.4671815070629868e-05, |
|
"loss": 0.0031, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 0.7623485554520038, |
|
"grad_norm": 0.2377876490354538, |
|
"learning_rate": 1.4563323650662586e-05, |
|
"loss": 0.0036, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 0.7632805219012115, |
|
"grad_norm": 0.26792046427726746, |
|
"learning_rate": 1.4455166426596223e-05, |
|
"loss": 0.0053, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 0.7642124883504194, |
|
"grad_norm": 0.1533597856760025, |
|
"learning_rate": 1.434734441843899e-05, |
|
"loss": 0.0032, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.7651444547996272, |
|
"grad_norm": 0.13230851292610168, |
|
"learning_rate": 1.4239858643037751e-05, |
|
"loss": 0.0025, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 0.766076421248835, |
|
"grad_norm": 0.42541301250457764, |
|
"learning_rate": 1.4132710114068426e-05, |
|
"loss": 0.0078, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 0.7670083876980429, |
|
"grad_norm": 1.640199899673462, |
|
"learning_rate": 1.4025899842026442e-05, |
|
"loss": 0.0358, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 0.7679403541472507, |
|
"grad_norm": 3.3744680881500244, |
|
"learning_rate": 1.3919428834217163e-05, |
|
"loss": 0.066, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 0.7688723205964585, |
|
"grad_norm": 4.397242069244385, |
|
"learning_rate": 1.3813298094746491e-05, |
|
"loss": 0.1446, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.7698042870456664, |
|
"grad_norm": 1.2552555799484253, |
|
"learning_rate": 1.3707508624511262e-05, |
|
"loss": 0.0247, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 0.7707362534948742, |
|
"grad_norm": 0.9916906356811523, |
|
"learning_rate": 1.36020614211899e-05, |
|
"loss": 0.033, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 0.771668219944082, |
|
"grad_norm": 1.0547466278076172, |
|
"learning_rate": 1.349695747923298e-05, |
|
"loss": 0.0706, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 0.7726001863932899, |
|
"grad_norm": 0.8943658471107483, |
|
"learning_rate": 1.339219778985385e-05, |
|
"loss": 0.0239, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 0.7735321528424977, |
|
"grad_norm": 1.5261614322662354, |
|
"learning_rate": 1.3287783341019278e-05, |
|
"loss": 0.0195, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.7744641192917054, |
|
"grad_norm": 0.11581642925739288, |
|
"learning_rate": 1.3183715117440142e-05, |
|
"loss": 0.0019, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 0.7753960857409133, |
|
"grad_norm": 0.190779909491539, |
|
"learning_rate": 1.3079994100562159e-05, |
|
"loss": 0.0051, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 0.7763280521901211, |
|
"grad_norm": 0.7171970009803772, |
|
"learning_rate": 1.2976621268556571e-05, |
|
"loss": 0.0258, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 0.777260018639329, |
|
"grad_norm": 1.5177792310714722, |
|
"learning_rate": 1.2873597596311027e-05, |
|
"loss": 0.0589, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 0.7781919850885368, |
|
"grad_norm": 0.1316295564174652, |
|
"learning_rate": 1.2770924055420258e-05, |
|
"loss": 0.0026, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.7791239515377446, |
|
"grad_norm": 0.5268474221229553, |
|
"learning_rate": 1.2668601614177017e-05, |
|
"loss": 0.0101, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 0.7800559179869525, |
|
"grad_norm": 0.8330385684967041, |
|
"learning_rate": 1.2566631237562893e-05, |
|
"loss": 0.0351, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 0.7809878844361603, |
|
"grad_norm": 0.4103114902973175, |
|
"learning_rate": 1.246501388723923e-05, |
|
"loss": 0.009, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 0.7819198508853681, |
|
"grad_norm": 0.7705252766609192, |
|
"learning_rate": 1.2363750521538064e-05, |
|
"loss": 0.0211, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 0.782851817334576, |
|
"grad_norm": 1.3898861408233643, |
|
"learning_rate": 1.2262842095453065e-05, |
|
"loss": 0.0678, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.7837837837837838, |
|
"grad_norm": 2.382798910140991, |
|
"learning_rate": 1.2162289560630524e-05, |
|
"loss": 0.0201, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 0.7847157502329916, |
|
"grad_norm": 0.8590038418769836, |
|
"learning_rate": 1.2062093865360458e-05, |
|
"loss": 0.0196, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 0.7856477166821995, |
|
"grad_norm": 0.4806217849254608, |
|
"learning_rate": 1.1962255954567537e-05, |
|
"loss": 0.0081, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 0.7865796831314072, |
|
"grad_norm": 0.6536293625831604, |
|
"learning_rate": 1.1862776769802276e-05, |
|
"loss": 0.01, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 0.7875116495806151, |
|
"grad_norm": 1.8112365007400513, |
|
"learning_rate": 1.1763657249232107e-05, |
|
"loss": 0.0299, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.7884436160298229, |
|
"grad_norm": 0.0851447582244873, |
|
"learning_rate": 1.1664898327632551e-05, |
|
"loss": 0.0015, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 0.7893755824790307, |
|
"grad_norm": 3.3141989707946777, |
|
"learning_rate": 1.1566500936378388e-05, |
|
"loss": 0.0935, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 0.7903075489282386, |
|
"grad_norm": 0.655958354473114, |
|
"learning_rate": 1.146846600343488e-05, |
|
"loss": 0.0092, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 0.7912395153774464, |
|
"grad_norm": 2.1537182331085205, |
|
"learning_rate": 1.1370794453349037e-05, |
|
"loss": 0.0292, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 0.7921714818266542, |
|
"grad_norm": 1.5979948043823242, |
|
"learning_rate": 1.1273487207240847e-05, |
|
"loss": 0.0682, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.7931034482758621, |
|
"grad_norm": 0.32921919226646423, |
|
"learning_rate": 1.1176545182794674e-05, |
|
"loss": 0.0051, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 0.7940354147250699, |
|
"grad_norm": 0.3104652762413025, |
|
"learning_rate": 1.1079969294250514e-05, |
|
"loss": 0.0053, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 0.7949673811742777, |
|
"grad_norm": 0.23145949840545654, |
|
"learning_rate": 1.0983760452395413e-05, |
|
"loss": 0.0033, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 0.7958993476234856, |
|
"grad_norm": 0.44747766852378845, |
|
"learning_rate": 1.0887919564554894e-05, |
|
"loss": 0.0089, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 0.7968313140726934, |
|
"grad_norm": 2.918243169784546, |
|
"learning_rate": 1.079244753458437e-05, |
|
"loss": 0.0767, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.7977632805219013, |
|
"grad_norm": 0.6843606233596802, |
|
"learning_rate": 1.0697345262860636e-05, |
|
"loss": 0.0144, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 0.798695246971109, |
|
"grad_norm": 0.1016877144575119, |
|
"learning_rate": 1.0602613646273374e-05, |
|
"loss": 0.0033, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 0.7996272134203168, |
|
"grad_norm": 0.7905264496803284, |
|
"learning_rate": 1.0508253578216693e-05, |
|
"loss": 0.0268, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 0.8005591798695247, |
|
"grad_norm": 1.3822872638702393, |
|
"learning_rate": 1.0414265948580692e-05, |
|
"loss": 0.0685, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 0.8014911463187325, |
|
"grad_norm": 0.5625484585762024, |
|
"learning_rate": 1.032065164374313e-05, |
|
"loss": 0.0123, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8024231127679403, |
|
"grad_norm": 0.05632966756820679, |
|
"learning_rate": 1.0227411546560962e-05, |
|
"loss": 0.001, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 0.8033550792171482, |
|
"grad_norm": 0.7446116805076599, |
|
"learning_rate": 1.01345465363621e-05, |
|
"loss": 0.0202, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 0.804287045666356, |
|
"grad_norm": 0.09283380210399628, |
|
"learning_rate": 1.0042057488937067e-05, |
|
"loss": 0.0016, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 0.8052190121155638, |
|
"grad_norm": 0.27069157361984253, |
|
"learning_rate": 9.949945276530781e-06, |
|
"loss": 0.0053, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 0.8061509785647717, |
|
"grad_norm": 0.8382067680358887, |
|
"learning_rate": 9.858210767834292e-06, |
|
"loss": 0.0226, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.8070829450139795, |
|
"grad_norm": 1.0016965866088867, |
|
"learning_rate": 9.766854827976617e-06, |
|
"loss": 0.041, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 0.8080149114631874, |
|
"grad_norm": 1.245935320854187, |
|
"learning_rate": 9.675878318516546e-06, |
|
"loss": 0.0404, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 0.8089468779123952, |
|
"grad_norm": 0.5361421704292297, |
|
"learning_rate": 9.58528209743459e-06, |
|
"loss": 0.0076, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 0.809878844361603, |
|
"grad_norm": 0.09631752967834473, |
|
"learning_rate": 9.495067019124792e-06, |
|
"loss": 0.0028, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 0.8108108108108109, |
|
"grad_norm": 0.6793972253799438, |
|
"learning_rate": 9.405233934386726e-06, |
|
"loss": 0.0057, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8117427772600186, |
|
"grad_norm": 3.4450275897979736, |
|
"learning_rate": 9.315783690417479e-06, |
|
"loss": 0.053, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 0.8126747437092264, |
|
"grad_norm": 1.280646800994873, |
|
"learning_rate": 9.226717130803636e-06, |
|
"loss": 0.042, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 0.8136067101584343, |
|
"grad_norm": 1.587612509727478, |
|
"learning_rate": 9.138035095513336e-06, |
|
"loss": 0.0671, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 0.8145386766076421, |
|
"grad_norm": 1.265060305595398, |
|
"learning_rate": 9.049738420888349e-06, |
|
"loss": 0.014, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 0.8154706430568499, |
|
"grad_norm": 1.8084266185760498, |
|
"learning_rate": 8.961827939636196e-06, |
|
"loss": 0.0891, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.8164026095060578, |
|
"grad_norm": 0.39547011256217957, |
|
"learning_rate": 8.87430448082227e-06, |
|
"loss": 0.0022, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 0.8173345759552656, |
|
"grad_norm": 0.3031284213066101, |
|
"learning_rate": 8.787168869862066e-06, |
|
"loss": 0.0066, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 0.8182665424044734, |
|
"grad_norm": 0.15621918439865112, |
|
"learning_rate": 8.700421928513352e-06, |
|
"loss": 0.0032, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 0.8191985088536813, |
|
"grad_norm": 0.22168925404548645, |
|
"learning_rate": 8.614064474868421e-06, |
|
"loss": 0.0042, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 0.8201304753028891, |
|
"grad_norm": 1.5389249324798584, |
|
"learning_rate": 8.528097323346407e-06, |
|
"loss": 0.0427, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.821062441752097, |
|
"grad_norm": 1.5170341730117798, |
|
"learning_rate": 8.442521284685573e-06, |
|
"loss": 0.0335, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 0.8219944082013048, |
|
"grad_norm": 0.08587615191936493, |
|
"learning_rate": 8.357337165935675e-06, |
|
"loss": 0.0023, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 0.8229263746505125, |
|
"grad_norm": 0.17892524600028992, |
|
"learning_rate": 8.27254577045039e-06, |
|
"loss": 0.0027, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 0.8238583410997204, |
|
"grad_norm": 0.17656829953193665, |
|
"learning_rate": 8.188147897879667e-06, |
|
"loss": 0.0046, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 0.8247903075489282, |
|
"grad_norm": 0.5518335700035095, |
|
"learning_rate": 8.104144344162229e-06, |
|
"loss": 0.0124, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.825722273998136, |
|
"grad_norm": 0.30466359853744507, |
|
"learning_rate": 8.02053590151805e-06, |
|
"loss": 0.0077, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 0.8266542404473439, |
|
"grad_norm": 0.7674888372421265, |
|
"learning_rate": 7.937323358440935e-06, |
|
"loss": 0.0248, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 0.055480506271123886, |
|
"learning_rate": 7.854507499691005e-06, |
|
"loss": 0.0011, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 0.8285181733457595, |
|
"grad_norm": 0.22888953983783722, |
|
"learning_rate": 7.772089106287344e-06, |
|
"loss": 0.0029, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 0.8294501397949674, |
|
"grad_norm": 0.0735834464430809, |
|
"learning_rate": 7.690068955500624e-06, |
|
"loss": 0.0015, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8303821062441752, |
|
"grad_norm": 1.0018421411514282, |
|
"learning_rate": 7.608447820845771e-06, |
|
"loss": 0.0309, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 0.8313140726933831, |
|
"grad_norm": 1.2352806329727173, |
|
"learning_rate": 7.527226472074678e-06, |
|
"loss": 0.0362, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 0.8322460391425909, |
|
"grad_norm": 0.9229680299758911, |
|
"learning_rate": 7.446405675168938e-06, |
|
"loss": 0.0236, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 0.8331780055917987, |
|
"grad_norm": 0.12361752986907959, |
|
"learning_rate": 7.365986192332625e-06, |
|
"loss": 0.0015, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 0.8341099720410066, |
|
"grad_norm": 0.18358244001865387, |
|
"learning_rate": 7.285968781985092e-06, |
|
"loss": 0.0041, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.8350419384902144, |
|
"grad_norm": 1.7125695943832397, |
|
"learning_rate": 7.206354198753862e-06, |
|
"loss": 0.0712, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 0.8359739049394221, |
|
"grad_norm": 2.4467711448669434, |
|
"learning_rate": 7.127143193467445e-06, |
|
"loss": 0.0988, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 0.83690587138863, |
|
"grad_norm": 2.2507357597351074, |
|
"learning_rate": 7.048336513148307e-06, |
|
"loss": 0.0513, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 0.8378378378378378, |
|
"grad_norm": 2.246809720993042, |
|
"learning_rate": 6.969934901005809e-06, |
|
"loss": 0.0265, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 0.8387698042870456, |
|
"grad_norm": 1.8378955125808716, |
|
"learning_rate": 6.8919390964291895e-06, |
|
"loss": 0.1061, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8397017707362535, |
|
"grad_norm": 0.47382405400276184, |
|
"learning_rate": 6.814349834980621e-06, |
|
"loss": 0.011, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 0.8406337371854613, |
|
"grad_norm": 1.1276845932006836, |
|
"learning_rate": 6.7371678483882264e-06, |
|
"loss": 0.025, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 0.8415657036346692, |
|
"grad_norm": 0.5048341155052185, |
|
"learning_rate": 6.660393864539222e-06, |
|
"loss": 0.0053, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 0.842497670083877, |
|
"grad_norm": 0.9290698766708374, |
|
"learning_rate": 6.58402860747302e-06, |
|
"loss": 0.0501, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 0.8434296365330848, |
|
"grad_norm": 1.0195822715759277, |
|
"learning_rate": 6.508072797374454e-06, |
|
"loss": 0.0182, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.8443616029822927, |
|
"grad_norm": 0.4818008244037628, |
|
"learning_rate": 6.4325271505669025e-06, |
|
"loss": 0.0074, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 0.8452935694315005, |
|
"grad_norm": 0.051283132284879684, |
|
"learning_rate": 6.3573923795056e-06, |
|
"loss": 0.0011, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 0.8462255358807083, |
|
"grad_norm": 1.3800734281539917, |
|
"learning_rate": 6.282669192770896e-06, |
|
"loss": 0.0442, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 0.8471575023299162, |
|
"grad_norm": 1.0327776670455933, |
|
"learning_rate": 6.208358295061572e-06, |
|
"loss": 0.037, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 0.848089468779124, |
|
"grad_norm": 0.1635047346353531, |
|
"learning_rate": 6.134460387188206e-06, |
|
"loss": 0.003, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.8490214352283317, |
|
"grad_norm": 0.17982259392738342, |
|
"learning_rate": 6.060976166066546e-06, |
|
"loss": 0.0032, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 0.8499534016775396, |
|
"grad_norm": 0.2328854650259018, |
|
"learning_rate": 5.9879063247109405e-06, |
|
"loss": 0.0049, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 0.8508853681267474, |
|
"grad_norm": 1.2415688037872314, |
|
"learning_rate": 5.915251552227829e-06, |
|
"loss": 0.0307, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 0.8518173345759553, |
|
"grad_norm": 0.3968764841556549, |
|
"learning_rate": 5.8430125338092115e-06, |
|
"loss": 0.0087, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 0.8527493010251631, |
|
"grad_norm": 1.2752315998077393, |
|
"learning_rate": 5.771189950726191e-06, |
|
"loss": 0.0312, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.8536812674743709, |
|
"grad_norm": 0.7192822098731995, |
|
"learning_rate": 5.699784480322568e-06, |
|
"loss": 0.0108, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 0.8546132339235788, |
|
"grad_norm": 0.34972473978996277, |
|
"learning_rate": 5.628796796008434e-06, |
|
"loss": 0.0029, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 0.8555452003727866, |
|
"grad_norm": 0.3486124277114868, |
|
"learning_rate": 5.558227567253832e-06, |
|
"loss": 0.0051, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 0.8564771668219944, |
|
"grad_norm": 0.3966856300830841, |
|
"learning_rate": 5.488077459582425e-06, |
|
"loss": 0.0074, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 0.8574091332712023, |
|
"grad_norm": 0.9853683114051819, |
|
"learning_rate": 5.418347134565249e-06, |
|
"loss": 0.0151, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.8583410997204101, |
|
"grad_norm": 0.6727381348609924, |
|
"learning_rate": 5.349037249814442e-06, |
|
"loss": 0.0114, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 0.8592730661696178, |
|
"grad_norm": 1.942173719406128, |
|
"learning_rate": 5.28014845897708e-06, |
|
"loss": 0.0432, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 0.8602050326188257, |
|
"grad_norm": 0.726831316947937, |
|
"learning_rate": 5.211681411728969e-06, |
|
"loss": 0.0217, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 0.8611369990680335, |
|
"grad_norm": 1.3569886684417725, |
|
"learning_rate": 5.143636753768549e-06, |
|
"loss": 0.0124, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 1.4848052263259888, |
|
"learning_rate": 5.076015126810784e-06, |
|
"loss": 0.0913, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.8630009319664492, |
|
"grad_norm": 0.08492793142795563, |
|
"learning_rate": 5.008817168581137e-06, |
|
"loss": 0.0021, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 0.863932898415657, |
|
"grad_norm": 0.3844028115272522, |
|
"learning_rate": 4.94204351280953e-06, |
|
"loss": 0.0082, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 0.8648648648648649, |
|
"grad_norm": 0.3236229717731476, |
|
"learning_rate": 4.8756947892243725e-06, |
|
"loss": 0.0026, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 0.8657968313140727, |
|
"grad_norm": 0.13389159739017487, |
|
"learning_rate": 4.809771623546627e-06, |
|
"loss": 0.0024, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 0.8667287977632805, |
|
"grad_norm": 0.4836329519748688, |
|
"learning_rate": 4.744274637483936e-06, |
|
"loss": 0.0089, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.8676607642124884, |
|
"grad_norm": 0.7628198862075806, |
|
"learning_rate": 4.6792044487247e-06, |
|
"loss": 0.0091, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 0.8685927306616962, |
|
"grad_norm": 0.9907669425010681, |
|
"learning_rate": 4.614561670932288e-06, |
|
"loss": 0.0322, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 0.869524697110904, |
|
"grad_norm": 0.8017989993095398, |
|
"learning_rate": 4.550346913739256e-06, |
|
"loss": 0.0208, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 0.8704566635601119, |
|
"grad_norm": 0.680810272693634, |
|
"learning_rate": 4.486560782741578e-06, |
|
"loss": 0.0122, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 0.8713886300093197, |
|
"grad_norm": 2.3900279998779297, |
|
"learning_rate": 4.423203879492943e-06, |
|
"loss": 0.0444, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.8723205964585274, |
|
"grad_norm": 0.30623510479927063, |
|
"learning_rate": 4.360276801499086e-06, |
|
"loss": 0.006, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 0.8732525629077353, |
|
"grad_norm": 0.19743725657463074, |
|
"learning_rate": 4.29778014221216e-06, |
|
"loss": 0.004, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 0.8741845293569431, |
|
"grad_norm": 0.06322675198316574, |
|
"learning_rate": 4.2357144910251e-06, |
|
"loss": 0.0012, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 0.875116495806151, |
|
"grad_norm": 0.14598406851291656, |
|
"learning_rate": 4.174080433266136e-06, |
|
"loss": 0.0035, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 0.8760484622553588, |
|
"grad_norm": 0.20462970435619354, |
|
"learning_rate": 4.112878550193194e-06, |
|
"loss": 0.0028, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.8769804287045666, |
|
"grad_norm": 1.6550384759902954, |
|
"learning_rate": 4.05210941898847e-06, |
|
"loss": 0.0479, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 0.8779123951537745, |
|
"grad_norm": 0.5954486131668091, |
|
"learning_rate": 3.991773612752952e-06, |
|
"loss": 0.0106, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 0.8788443616029823, |
|
"grad_norm": 0.2953099310398102, |
|
"learning_rate": 3.9318717005010495e-06, |
|
"loss": 0.0049, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 0.8797763280521901, |
|
"grad_norm": 0.3062129020690918, |
|
"learning_rate": 3.872404247155193e-06, |
|
"loss": 0.0024, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 0.880708294501398, |
|
"grad_norm": 0.44278305768966675, |
|
"learning_rate": 3.8133718135405285e-06, |
|
"loss": 0.0084, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.8816402609506058, |
|
"grad_norm": 0.15503761172294617, |
|
"learning_rate": 3.754774956379614e-06, |
|
"loss": 0.0025, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 0.8825722273998136, |
|
"grad_norm": 0.15528859198093414, |
|
"learning_rate": 3.696614228287187e-06, |
|
"loss": 0.0022, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 0.8835041938490215, |
|
"grad_norm": 0.3577142655849457, |
|
"learning_rate": 3.6388901777649477e-06, |
|
"loss": 0.006, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 0.8844361602982292, |
|
"grad_norm": 1.3068500757217407, |
|
"learning_rate": 3.581603349196372e-06, |
|
"loss": 0.0105, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 0.8853681267474371, |
|
"grad_norm": 1.0203229188919067, |
|
"learning_rate": 3.5247542828415747e-06, |
|
"loss": 0.0584, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.8863000931966449, |
|
"grad_norm": 1.07167387008667, |
|
"learning_rate": 3.468343514832251e-06, |
|
"loss": 0.0127, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 0.8872320596458527, |
|
"grad_norm": 0.17283280193805695, |
|
"learning_rate": 3.4123715771665786e-06, |
|
"loss": 0.0023, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 0.8881640260950606, |
|
"grad_norm": 0.06770434975624084, |
|
"learning_rate": 3.356838997704226e-06, |
|
"loss": 0.0018, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 0.8890959925442684, |
|
"grad_norm": 0.20179446041584015, |
|
"learning_rate": 3.3017463001613626e-06, |
|
"loss": 0.0036, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 0.8900279589934762, |
|
"grad_norm": 1.307664394378662, |
|
"learning_rate": 3.2470940041057106e-06, |
|
"loss": 0.0415, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.8909599254426841, |
|
"grad_norm": 0.3763882517814636, |
|
"learning_rate": 3.1928826249516987e-06, |
|
"loss": 0.008, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 0.8918918918918919, |
|
"grad_norm": 0.4271828532218933, |
|
"learning_rate": 3.139112673955513e-06, |
|
"loss": 0.0088, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 0.8928238583410997, |
|
"grad_norm": 0.5596781969070435, |
|
"learning_rate": 3.08578465821035e-06, |
|
"loss": 0.0064, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 0.8937558247903076, |
|
"grad_norm": 0.9955370426177979, |
|
"learning_rate": 3.0328990806415934e-06, |
|
"loss": 0.0293, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 0.8946877912395154, |
|
"grad_norm": 0.6744177341461182, |
|
"learning_rate": 2.9804564400020994e-06, |
|
"loss": 0.0115, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.8956197576887233, |
|
"grad_norm": 1.0342707633972168, |
|
"learning_rate": 2.9284572308674596e-06, |
|
"loss": 0.0688, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 0.896551724137931, |
|
"grad_norm": 0.36630138754844666, |
|
"learning_rate": 2.8769019436313715e-06, |
|
"loss": 0.0069, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 0.8974836905871388, |
|
"grad_norm": 0.9983659982681274, |
|
"learning_rate": 2.825791064500993e-06, |
|
"loss": 0.032, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 0.8984156570363467, |
|
"grad_norm": 0.27666184306144714, |
|
"learning_rate": 2.7751250754923573e-06, |
|
"loss": 0.0053, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 0.8993476234855545, |
|
"grad_norm": 0.03350253403186798, |
|
"learning_rate": 2.7249044544258363e-06, |
|
"loss": 0.0009, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.9002795899347623, |
|
"grad_norm": 0.501646876335144, |
|
"learning_rate": 2.675129674921639e-06, |
|
"loss": 0.0064, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 0.9012115563839702, |
|
"grad_norm": 0.07862615585327148, |
|
"learning_rate": 2.625801206395312e-06, |
|
"loss": 0.0015, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 0.902143522833178, |
|
"grad_norm": 0.849259078502655, |
|
"learning_rate": 2.576919514053355e-06, |
|
"loss": 0.0202, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 0.9030754892823858, |
|
"grad_norm": 0.0796375498175621, |
|
"learning_rate": 2.528485058888813e-06, |
|
"loss": 0.0017, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 0.9040074557315937, |
|
"grad_norm": 1.963661789894104, |
|
"learning_rate": 2.48049829767692e-06, |
|
"loss": 0.0798, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.9049394221808015, |
|
"grad_norm": 0.7925097346305847, |
|
"learning_rate": 2.4329596829708144e-06, |
|
"loss": 0.0099, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 0.9058713886300093, |
|
"grad_norm": 0.11906258761882782, |
|
"learning_rate": 2.385869663097251e-06, |
|
"loss": 0.002, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 0.9068033550792172, |
|
"grad_norm": 1.8423750400543213, |
|
"learning_rate": 2.3392286821523724e-06, |
|
"loss": 0.0678, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 0.907735321528425, |
|
"grad_norm": 0.6542656421661377, |
|
"learning_rate": 2.2930371799975594e-06, |
|
"loss": 0.0111, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 0.9086672879776329, |
|
"grad_norm": 1.2632553577423096, |
|
"learning_rate": 2.2472955922552163e-06, |
|
"loss": 0.0662, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.9095992544268406, |
|
"grad_norm": 0.9758872985839844, |
|
"learning_rate": 2.202004350304715e-06, |
|
"loss": 0.0104, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 0.9105312208760484, |
|
"grad_norm": 0.615419864654541, |
|
"learning_rate": 2.157163881278312e-06, |
|
"loss": 0.0131, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 0.9114631873252563, |
|
"grad_norm": 0.07674716413021088, |
|
"learning_rate": 2.112774608057111e-06, |
|
"loss": 0.001, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 0.9123951537744641, |
|
"grad_norm": 0.2658063471317291, |
|
"learning_rate": 2.068836949267089e-06, |
|
"loss": 0.0017, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 0.9133271202236719, |
|
"grad_norm": 1.0758992433547974, |
|
"learning_rate": 2.0253513192751373e-06, |
|
"loss": 0.0118, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9142590866728798, |
|
"grad_norm": 0.5867937803268433, |
|
"learning_rate": 1.982318128185151e-06, |
|
"loss": 0.0053, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 0.9151910531220876, |
|
"grad_norm": 0.7973155379295349, |
|
"learning_rate": 1.9397377818341944e-06, |
|
"loss": 0.0145, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 0.9161230195712954, |
|
"grad_norm": 0.16484799981117249, |
|
"learning_rate": 1.8976106817886196e-06, |
|
"loss": 0.0033, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 0.9170549860205033, |
|
"grad_norm": 0.21485017240047455, |
|
"learning_rate": 1.855937225340315e-06, |
|
"loss": 0.0038, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 0.9179869524697111, |
|
"grad_norm": 0.0540773943066597, |
|
"learning_rate": 1.8147178055029579e-06, |
|
"loss": 0.0012, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.918918918918919, |
|
"grad_norm": 0.10457666218280792, |
|
"learning_rate": 1.7739528110083004e-06, |
|
"loss": 0.0014, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 0.9198508853681268, |
|
"grad_norm": 0.07089216262102127, |
|
"learning_rate": 1.7336426263024897e-06, |
|
"loss": 0.0012, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 0.9207828518173345, |
|
"grad_norm": 0.041863564401865005, |
|
"learning_rate": 1.6937876315424705e-06, |
|
"loss": 0.0008, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 0.9217148182665424, |
|
"grad_norm": 0.1343851238489151, |
|
"learning_rate": 1.6543882025923886e-06, |
|
"loss": 0.0024, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 0.9226467847157502, |
|
"grad_norm": 0.04936739057302475, |
|
"learning_rate": 1.6154447110200255e-06, |
|
"loss": 0.0008, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.923578751164958, |
|
"grad_norm": 0.6000242233276367, |
|
"learning_rate": 1.5769575240933422e-06, |
|
"loss": 0.0095, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 0.9245107176141659, |
|
"grad_norm": 0.7469452619552612, |
|
"learning_rate": 1.5389270047769578e-06, |
|
"loss": 0.0163, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 0.9254426840633737, |
|
"grad_norm": 0.09905730932950974, |
|
"learning_rate": 1.5013535117287647e-06, |
|
"loss": 0.0014, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 0.9263746505125815, |
|
"grad_norm": 0.9133400917053223, |
|
"learning_rate": 1.4642373992965364e-06, |
|
"loss": 0.0117, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 0.9273066169617894, |
|
"grad_norm": 1.5601509809494019, |
|
"learning_rate": 1.4275790175145908e-06, |
|
"loss": 0.0671, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.9282385834109972, |
|
"grad_norm": 1.1153135299682617, |
|
"learning_rate": 1.3913787121004719e-06, |
|
"loss": 0.0184, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 0.9291705498602051, |
|
"grad_norm": 1.7211925983428955, |
|
"learning_rate": 1.3556368244517114e-06, |
|
"loss": 0.0227, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 0.9301025163094129, |
|
"grad_norm": 0.985527753829956, |
|
"learning_rate": 1.3203536916425841e-06, |
|
"loss": 0.0273, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 0.9310344827586207, |
|
"grad_norm": 1.213148832321167, |
|
"learning_rate": 1.2855296464209688e-06, |
|
"loss": 0.0441, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 0.9319664492078286, |
|
"grad_norm": 1.5918529033660889, |
|
"learning_rate": 1.2511650172051637e-06, |
|
"loss": 0.0944, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9328984156570364, |
|
"grad_norm": 0.06392987817525864, |
|
"learning_rate": 1.217260128080816e-06, |
|
"loss": 0.0012, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 0.9338303821062441, |
|
"grad_norm": 1.1391026973724365, |
|
"learning_rate": 1.1838152987978578e-06, |
|
"loss": 0.0308, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 0.934762348555452, |
|
"grad_norm": 0.05295366048812866, |
|
"learning_rate": 1.1508308447674975e-06, |
|
"loss": 0.0008, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 0.9356943150046598, |
|
"grad_norm": 1.0572638511657715, |
|
"learning_rate": 1.1183070770592441e-06, |
|
"loss": 0.0262, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 0.9366262814538676, |
|
"grad_norm": 0.37860023975372314, |
|
"learning_rate": 1.0862443023979651e-06, |
|
"loss": 0.0069, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.9375582479030755, |
|
"grad_norm": 0.15000490844249725, |
|
"learning_rate": 1.0546428231609895e-06, |
|
"loss": 0.0035, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 0.9384902143522833, |
|
"grad_norm": 0.05710078775882721, |
|
"learning_rate": 1.0235029373752758e-06, |
|
"loss": 0.001, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 0.9394221808014911, |
|
"grad_norm": 0.5820727348327637, |
|
"learning_rate": 9.928249387145982e-07, |
|
"loss": 0.0101, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 0.940354147250699, |
|
"grad_norm": 0.19106589257717133, |
|
"learning_rate": 9.6260911649676e-07, |
|
"loss": 0.0041, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 0.9412861136999068, |
|
"grad_norm": 0.3644753396511078, |
|
"learning_rate": 9.32855755680867e-07, |
|
"loss": 0.0098, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9422180801491147, |
|
"grad_norm": 0.2526366114616394, |
|
"learning_rate": 9.035651368646648e-07, |
|
"loss": 0.0055, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 0.9431500465983225, |
|
"grad_norm": 0.6924790143966675, |
|
"learning_rate": 8.747375362818666e-07, |
|
"loss": 0.0188, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 0.9440820130475303, |
|
"grad_norm": 1.0197707414627075, |
|
"learning_rate": 8.463732257995571e-07, |
|
"loss": 0.0265, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 0.9450139794967382, |
|
"grad_norm": 0.5194602608680725, |
|
"learning_rate": 8.184724729156379e-07, |
|
"loss": 0.0094, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 0.9459459459459459, |
|
"grad_norm": 0.5754898190498352, |
|
"learning_rate": 7.910355407562742e-07, |
|
"loss": 0.0105, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.9468779123951537, |
|
"grad_norm": 0.28730660676956177, |
|
"learning_rate": 7.640626880734581e-07, |
|
"loss": 0.0034, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 0.9478098788443616, |
|
"grad_norm": 1.0178496837615967, |
|
"learning_rate": 7.375541692425326e-07, |
|
"loss": 0.0199, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 0.9487418452935694, |
|
"grad_norm": 0.2701060473918915, |
|
"learning_rate": 7.115102342598101e-07, |
|
"loss": 0.003, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 0.9496738117427772, |
|
"grad_norm": 0.5945216417312622, |
|
"learning_rate": 6.859311287402081e-07, |
|
"loss": 0.0105, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 0.9506057781919851, |
|
"grad_norm": 0.7214066982269287, |
|
"learning_rate": 6.608170939149283e-07, |
|
"loss": 0.0132, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.9515377446411929, |
|
"grad_norm": 0.11991623789072037, |
|
"learning_rate": 6.361683666291973e-07, |
|
"loss": 0.0031, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 0.9524697110904008, |
|
"grad_norm": 0.24525563418865204, |
|
"learning_rate": 6.119851793400189e-07, |
|
"loss": 0.0026, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 0.9534016775396086, |
|
"grad_norm": 1.5672634840011597, |
|
"learning_rate": 5.882677601139919e-07, |
|
"loss": 0.0275, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 0.9543336439888164, |
|
"grad_norm": 0.9412722587585449, |
|
"learning_rate": 5.650163326251345e-07, |
|
"loss": 0.0178, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 0.9552656104380243, |
|
"grad_norm": 0.39089635014533997, |
|
"learning_rate": 5.422311161528193e-07, |
|
"loss": 0.0224, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.9561975768872321, |
|
"grad_norm": 0.03957606479525566, |
|
"learning_rate": 5.199123255796634e-07, |
|
"loss": 0.0009, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 0.9571295433364398, |
|
"grad_norm": 1.1148029565811157, |
|
"learning_rate": 4.980601713895305e-07, |
|
"loss": 0.023, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 0.9580615097856477, |
|
"grad_norm": 0.12970255315303802, |
|
"learning_rate": 4.7667485966552685e-07, |
|
"loss": 0.0021, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 0.9589934762348555, |
|
"grad_norm": 0.9582773447036743, |
|
"learning_rate": 4.55756592088058e-07, |
|
"loss": 0.0134, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 0.9599254426840633, |
|
"grad_norm": 0.3440712094306946, |
|
"learning_rate": 4.353055659329419e-07, |
|
"loss": 0.0089, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.9608574091332712, |
|
"grad_norm": 0.0910550057888031, |
|
"learning_rate": 4.153219740695435e-07, |
|
"loss": 0.0022, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 0.961789375582479, |
|
"grad_norm": 0.5724707841873169, |
|
"learning_rate": 3.958060049589485e-07, |
|
"loss": 0.008, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 0.9627213420316869, |
|
"grad_norm": 0.7248005867004395, |
|
"learning_rate": 3.767578426521923e-07, |
|
"loss": 0.0209, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 0.9636533084808947, |
|
"grad_norm": 1.4421683549880981, |
|
"learning_rate": 3.5817766678850615e-07, |
|
"loss": 0.0059, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 0.9645852749301025, |
|
"grad_norm": 0.555047869682312, |
|
"learning_rate": 3.4006565259366295e-07, |
|
"loss": 0.0095, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.9655172413793104, |
|
"grad_norm": 0.6058024764060974, |
|
"learning_rate": 3.224219708782894e-07, |
|
"loss": 0.0113, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 0.9664492078285182, |
|
"grad_norm": 0.9548462629318237, |
|
"learning_rate": 3.052467880362675e-07, |
|
"loss": 0.0218, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 0.967381174277726, |
|
"grad_norm": 0.2721622586250305, |
|
"learning_rate": 2.88540266043158e-07, |
|
"loss": 0.0058, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 0.9683131407269339, |
|
"grad_norm": 0.2145017385482788, |
|
"learning_rate": 2.723025624546849e-07, |
|
"loss": 0.0034, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 0.9692451071761417, |
|
"grad_norm": 0.4106197655200958, |
|
"learning_rate": 2.5653383040524227e-07, |
|
"loss": 0.0055, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.9701770736253494, |
|
"grad_norm": 0.16109812259674072, |
|
"learning_rate": 2.412342186064565e-07, |
|
"loss": 0.0024, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 0.9711090400745573, |
|
"grad_norm": 1.1901835203170776, |
|
"learning_rate": 2.2640387134577058e-07, |
|
"loss": 0.0369, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 0.9720410065237651, |
|
"grad_norm": 0.8408852815628052, |
|
"learning_rate": 2.1204292848509555e-07, |
|
"loss": 0.019, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 0.972972972972973, |
|
"grad_norm": 0.14452603459358215, |
|
"learning_rate": 1.98151525459489e-07, |
|
"loss": 0.0023, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 0.9739049394221808, |
|
"grad_norm": 0.34769976139068604, |
|
"learning_rate": 1.8472979327587292e-07, |
|
"loss": 0.0066, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.9748369058713886, |
|
"grad_norm": 0.8973527550697327, |
|
"learning_rate": 1.717778585118013e-07, |
|
"loss": 0.0171, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 0.9757688723205965, |
|
"grad_norm": 0.06965801864862442, |
|
"learning_rate": 1.5929584331427216e-07, |
|
"loss": 0.0011, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 0.9767008387698043, |
|
"grad_norm": 0.5106223225593567, |
|
"learning_rate": 1.4728386539856754e-07, |
|
"loss": 0.0086, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 0.9776328052190121, |
|
"grad_norm": 1.0167614221572876, |
|
"learning_rate": 1.3574203804713747e-07, |
|
"loss": 0.0193, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 0.97856477166822, |
|
"grad_norm": 1.6071141958236694, |
|
"learning_rate": 1.246704701085566e-07, |
|
"loss": 0.0923, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.9794967381174278, |
|
"grad_norm": 0.06256900727748871, |
|
"learning_rate": 1.1406926599646373e-07, |
|
"loss": 0.001, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 0.9804287045666356, |
|
"grad_norm": 0.6707242727279663, |
|
"learning_rate": 1.0393852568860719e-07, |
|
"loss": 0.0157, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 0.9813606710158435, |
|
"grad_norm": 0.9473837614059448, |
|
"learning_rate": 9.427834472588992e-08, |
|
"loss": 0.0473, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 0.9822926374650512, |
|
"grad_norm": 0.40152788162231445, |
|
"learning_rate": 8.508881421145365e-08, |
|
"loss": 0.0091, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 0.983224603914259, |
|
"grad_norm": 0.11309801042079926, |
|
"learning_rate": 7.637002080985168e-08, |
|
"loss": 0.002, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.9841565703634669, |
|
"grad_norm": 0.1226629987359047, |
|
"learning_rate": 6.812204674619959e-08, |
|
"loss": 0.0025, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 0.9850885368126747, |
|
"grad_norm": 0.6275689601898193, |
|
"learning_rate": 6.034496980542037e-08, |
|
"loss": 0.0149, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 0.9860205032618826, |
|
"grad_norm": 0.6120526194572449, |
|
"learning_rate": 5.303886333151153e-08, |
|
"loss": 0.0088, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 0.9869524697110904, |
|
"grad_norm": 0.8378371000289917, |
|
"learning_rate": 4.620379622682358e-08, |
|
"loss": 0.0166, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 0.9878844361602982, |
|
"grad_norm": 0.06605886667966843, |
|
"learning_rate": 3.9839832951465986e-08, |
|
"loss": 0.0012, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.9888164026095061, |
|
"grad_norm": 0.9707759618759155, |
|
"learning_rate": 3.394703352263551e-08, |
|
"loss": 0.0434, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 0.9897483690587139, |
|
"grad_norm": 0.12164362519979477, |
|
"learning_rate": 2.8525453514099965e-08, |
|
"loss": 0.0018, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 0.9906803355079217, |
|
"grad_norm": 0.4129875898361206, |
|
"learning_rate": 2.3575144055643094e-08, |
|
"loss": 0.0093, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 0.9916123019571296, |
|
"grad_norm": 0.6688860654830933, |
|
"learning_rate": 1.9096151832609375e-08, |
|
"loss": 0.0118, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 0.9925442684063374, |
|
"grad_norm": 0.1448785960674286, |
|
"learning_rate": 1.5088519085437734e-08, |
|
"loss": 0.0021, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.9934762348555451, |
|
"grad_norm": 0.17577403783798218, |
|
"learning_rate": 1.1552283609272962e-08, |
|
"loss": 0.0036, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 0.994408201304753, |
|
"grad_norm": 0.0788675993680954, |
|
"learning_rate": 8.487478753615997e-09, |
|
"loss": 0.0016, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 0.9953401677539608, |
|
"grad_norm": 0.3536447286605835, |
|
"learning_rate": 5.894133422001957e-09, |
|
"loss": 0.004, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 0.9962721342031687, |
|
"grad_norm": 1.099785327911377, |
|
"learning_rate": 3.772272071722593e-09, |
|
"loss": 0.016, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 0.9972041006523765, |
|
"grad_norm": 1.2353144884109497, |
|
"learning_rate": 2.1219147136264382e-09, |
|
"loss": 0.024, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.9981360671015843, |
|
"grad_norm": 0.8307344317436218, |
|
"learning_rate": 9.43076911874563e-10, |
|
"loss": 0.0139, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 0.9990680335507922, |
|
"grad_norm": 2.4767701625823975, |
|
"learning_rate": 2.3576978384065584e-10, |
|
"loss": 0.0363, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.866561770439148, |
|
"learning_rate": 0.0, |
|
"loss": 0.0964, |
|
"step": 1073 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1073, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 269, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.5471679295717376e+17, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|