|
{ |
|
"best_metric": 2.497835874557495, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 3.011764705882353, |
|
"eval_steps": 50, |
|
"global_step": 192, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"grad_norm": 0.9195303320884705, |
|
"learning_rate": 1e-05, |
|
"loss": 2.8736, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01568627450980392, |
|
"eval_loss": 3.324666738510132, |
|
"eval_runtime": 7.9726, |
|
"eval_samples_per_second": 13.546, |
|
"eval_steps_per_second": 3.387, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.03137254901960784, |
|
"grad_norm": 1.0334582328796387, |
|
"learning_rate": 2e-05, |
|
"loss": 2.8739, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.047058823529411764, |
|
"grad_norm": 1.0187386274337769, |
|
"learning_rate": 3e-05, |
|
"loss": 2.9073, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.06274509803921569, |
|
"grad_norm": 0.9314672946929932, |
|
"learning_rate": 4e-05, |
|
"loss": 2.9041, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0784313725490196, |
|
"grad_norm": 0.9586840867996216, |
|
"learning_rate": 5e-05, |
|
"loss": 2.807, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09411764705882353, |
|
"grad_norm": 0.9514013528823853, |
|
"learning_rate": 6e-05, |
|
"loss": 2.996, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.10980392156862745, |
|
"grad_norm": 0.8477784395217896, |
|
"learning_rate": 7e-05, |
|
"loss": 2.9011, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.12549019607843137, |
|
"grad_norm": 0.898993968963623, |
|
"learning_rate": 8e-05, |
|
"loss": 2.7146, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.1411764705882353, |
|
"grad_norm": 0.8038886785507202, |
|
"learning_rate": 9e-05, |
|
"loss": 2.7318, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.1568627450980392, |
|
"grad_norm": 0.8036936521530151, |
|
"learning_rate": 0.0001, |
|
"loss": 2.6728, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.17254901960784313, |
|
"grad_norm": 0.9414847493171692, |
|
"learning_rate": 9.999255120204248e-05, |
|
"loss": 2.6703, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.18823529411764706, |
|
"grad_norm": 1.016950249671936, |
|
"learning_rate": 9.997020702755353e-05, |
|
"loss": 2.7351, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.20392156862745098, |
|
"grad_norm": 1.0459362268447876, |
|
"learning_rate": 9.99329741340228e-05, |
|
"loss": 2.6783, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.2196078431372549, |
|
"grad_norm": 1.2477803230285645, |
|
"learning_rate": 9.98808636150624e-05, |
|
"loss": 2.6327, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.23529411764705882, |
|
"grad_norm": 1.4739620685577393, |
|
"learning_rate": 9.981389099710132e-05, |
|
"loss": 3.0748, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.25098039215686274, |
|
"grad_norm": 0.5202563405036926, |
|
"learning_rate": 9.973207623475965e-05, |
|
"loss": 2.41, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.26666666666666666, |
|
"grad_norm": 0.6118998527526855, |
|
"learning_rate": 9.96354437049027e-05, |
|
"loss": 2.5087, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.2823529411764706, |
|
"grad_norm": 0.5753269791603088, |
|
"learning_rate": 9.952402219937816e-05, |
|
"loss": 2.5023, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.2980392156862745, |
|
"grad_norm": 0.548793613910675, |
|
"learning_rate": 9.939784491643734e-05, |
|
"loss": 2.4598, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.3137254901960784, |
|
"grad_norm": 0.5158197283744812, |
|
"learning_rate": 9.92569494508437e-05, |
|
"loss": 2.626, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.32941176470588235, |
|
"grad_norm": 0.4742230474948883, |
|
"learning_rate": 9.910137778267152e-05, |
|
"loss": 2.539, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.34509803921568627, |
|
"grad_norm": 0.4577326774597168, |
|
"learning_rate": 9.893117626479777e-05, |
|
"loss": 2.5212, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.3607843137254902, |
|
"grad_norm": 0.469974547624588, |
|
"learning_rate": 9.874639560909117e-05, |
|
"loss": 2.4532, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.3764705882352941, |
|
"grad_norm": 0.6523585915565491, |
|
"learning_rate": 9.85470908713026e-05, |
|
"loss": 2.4785, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.39215686274509803, |
|
"grad_norm": 0.6335188746452332, |
|
"learning_rate": 9.833332143466099e-05, |
|
"loss": 2.5327, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.40784313725490196, |
|
"grad_norm": 0.5561938881874084, |
|
"learning_rate": 9.810515099218003e-05, |
|
"loss": 2.7133, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.4235294117647059, |
|
"grad_norm": 0.6505088210105896, |
|
"learning_rate": 9.78626475276808e-05, |
|
"loss": 2.5733, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.4392156862745098, |
|
"grad_norm": 0.7462759613990784, |
|
"learning_rate": 9.760588329553571e-05, |
|
"loss": 2.6319, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.4549019607843137, |
|
"grad_norm": 0.8369238376617432, |
|
"learning_rate": 9.73349347991403e-05, |
|
"loss": 2.5938, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.47058823529411764, |
|
"grad_norm": 1.1839768886566162, |
|
"learning_rate": 9.704988276811883e-05, |
|
"loss": 2.9516, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.48627450980392156, |
|
"grad_norm": 0.29538607597351074, |
|
"learning_rate": 9.675081213427076e-05, |
|
"loss": 2.3553, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.5019607843137255, |
|
"grad_norm": 0.40422534942626953, |
|
"learning_rate": 9.643781200626511e-05, |
|
"loss": 2.426, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.5176470588235295, |
|
"grad_norm": 0.41036108136177063, |
|
"learning_rate": 9.611097564309053e-05, |
|
"loss": 2.503, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.5333333333333333, |
|
"grad_norm": 0.4156786799430847, |
|
"learning_rate": 9.577040042626833e-05, |
|
"loss": 2.3799, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.5490196078431373, |
|
"grad_norm": 0.39962074160575867, |
|
"learning_rate": 9.54161878308377e-05, |
|
"loss": 2.4543, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.5647058823529412, |
|
"grad_norm": 0.4289894998073578, |
|
"learning_rate": 9.504844339512095e-05, |
|
"loss": 2.4369, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.5803921568627451, |
|
"grad_norm": 0.4545486271381378, |
|
"learning_rate": 9.466727668927816e-05, |
|
"loss": 2.5761, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.596078431372549, |
|
"grad_norm": 0.431194931268692, |
|
"learning_rate": 9.42728012826605e-05, |
|
"loss": 2.4045, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.611764705882353, |
|
"grad_norm": 0.43016883730888367, |
|
"learning_rate": 9.38651347099721e-05, |
|
"loss": 2.3858, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.6274509803921569, |
|
"grad_norm": 0.45133844017982483, |
|
"learning_rate": 9.344439843625034e-05, |
|
"loss": 2.4977, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.6431372549019608, |
|
"grad_norm": 0.4912121295928955, |
|
"learning_rate": 9.301071782067504e-05, |
|
"loss": 2.4795, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.6588235294117647, |
|
"grad_norm": 0.6027780175209045, |
|
"learning_rate": 9.256422207921757e-05, |
|
"loss": 2.5236, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.6745098039215687, |
|
"grad_norm": 0.5655380487442017, |
|
"learning_rate": 9.210504424614059e-05, |
|
"loss": 2.4498, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.6901960784313725, |
|
"grad_norm": 0.6778488755226135, |
|
"learning_rate": 9.163332113436032e-05, |
|
"loss": 2.5315, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.7058823529411765, |
|
"grad_norm": 1.1361249685287476, |
|
"learning_rate": 9.114919329468282e-05, |
|
"loss": 2.8519, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.7215686274509804, |
|
"grad_norm": 0.3818633258342743, |
|
"learning_rate": 9.065280497392663e-05, |
|
"loss": 2.4607, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.7372549019607844, |
|
"grad_norm": 0.3662630319595337, |
|
"learning_rate": 9.014430407194413e-05, |
|
"loss": 2.4082, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.7529411764705882, |
|
"grad_norm": 0.35173290967941284, |
|
"learning_rate": 8.962384209755452e-05, |
|
"loss": 2.5405, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.7686274509803922, |
|
"grad_norm": 0.38660287857055664, |
|
"learning_rate": 8.90915741234015e-05, |
|
"loss": 2.4206, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"grad_norm": 0.40245190262794495, |
|
"learning_rate": 8.854765873974898e-05, |
|
"loss": 2.5983, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.7843137254901961, |
|
"eval_loss": 2.5114123821258545, |
|
"eval_runtime": 8.157, |
|
"eval_samples_per_second": 13.24, |
|
"eval_steps_per_second": 3.31, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.3828006386756897, |
|
"learning_rate": 8.799225800722895e-05, |
|
"loss": 2.4409, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.8156862745098039, |
|
"grad_norm": 0.4464765787124634, |
|
"learning_rate": 8.742553740855506e-05, |
|
"loss": 2.4858, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.8313725490196079, |
|
"grad_norm": 0.4025624692440033, |
|
"learning_rate": 8.684766579921684e-05, |
|
"loss": 2.4596, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.8470588235294118, |
|
"grad_norm": 0.4462139904499054, |
|
"learning_rate": 8.625881535716883e-05, |
|
"loss": 2.4744, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.8627450980392157, |
|
"grad_norm": 0.4364989995956421, |
|
"learning_rate": 8.565916153152983e-05, |
|
"loss": 2.4725, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.8784313725490196, |
|
"grad_norm": 0.4733513593673706, |
|
"learning_rate": 8.504888299030747e-05, |
|
"loss": 2.609, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.8941176470588236, |
|
"grad_norm": 0.5065345764160156, |
|
"learning_rate": 8.442816156716385e-05, |
|
"loss": 2.5392, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.9098039215686274, |
|
"grad_norm": 0.5636035799980164, |
|
"learning_rate": 8.379718220723773e-05, |
|
"loss": 2.5665, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.9254901960784314, |
|
"grad_norm": 0.6569843292236328, |
|
"learning_rate": 8.315613291203976e-05, |
|
"loss": 2.5817, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.9411764705882353, |
|
"grad_norm": 0.907096266746521, |
|
"learning_rate": 8.250520468343722e-05, |
|
"loss": 2.911, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.9568627450980393, |
|
"grad_norm": 0.29703453183174133, |
|
"learning_rate": 8.184459146674446e-05, |
|
"loss": 2.3542, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.9725490196078431, |
|
"grad_norm": 0.3698834478855133, |
|
"learning_rate": 8.117449009293668e-05, |
|
"loss": 2.3618, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.9882352941176471, |
|
"grad_norm": 0.504271388053894, |
|
"learning_rate": 8.049510022000364e-05, |
|
"loss": 2.5812, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 1.003921568627451, |
|
"grad_norm": 0.8231114745140076, |
|
"learning_rate": 7.980662427346127e-05, |
|
"loss": 3.6064, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 1.0196078431372548, |
|
"grad_norm": 0.24639078974723816, |
|
"learning_rate": 7.910926738603854e-05, |
|
"loss": 2.1655, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 1.035294117647059, |
|
"grad_norm": 0.29097798466682434, |
|
"learning_rate": 7.840323733655778e-05, |
|
"loss": 2.2555, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 1.0509803921568628, |
|
"grad_norm": 0.3319820165634155, |
|
"learning_rate": 7.768874448802665e-05, |
|
"loss": 2.4114, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 1.0666666666666667, |
|
"grad_norm": 0.32010725140571594, |
|
"learning_rate": 7.696600172495997e-05, |
|
"loss": 2.2756, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 1.0823529411764705, |
|
"grad_norm": 0.3406822383403778, |
|
"learning_rate": 7.62352243899504e-05, |
|
"loss": 2.3815, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 1.0980392156862746, |
|
"grad_norm": 0.37193605303764343, |
|
"learning_rate": 7.54966302195068e-05, |
|
"loss": 2.406, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 1.1137254901960785, |
|
"grad_norm": 0.36552101373672485, |
|
"learning_rate": 7.475043927917907e-05, |
|
"loss": 2.3517, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 1.1294117647058823, |
|
"grad_norm": 0.3685903251171112, |
|
"learning_rate": 7.399687389798933e-05, |
|
"loss": 2.262, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 1.1450980392156862, |
|
"grad_norm": 0.41463446617126465, |
|
"learning_rate": 7.323615860218843e-05, |
|
"loss": 2.3599, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 1.1607843137254903, |
|
"grad_norm": 0.47038400173187256, |
|
"learning_rate": 7.246852004835807e-05, |
|
"loss": 2.3755, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 1.1764705882352942, |
|
"grad_norm": 0.4810601770877838, |
|
"learning_rate": 7.169418695587791e-05, |
|
"loss": 2.3339, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 1.192156862745098, |
|
"grad_norm": 0.5399831533432007, |
|
"learning_rate": 7.091339003877826e-05, |
|
"loss": 2.3184, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 1.2078431372549019, |
|
"grad_norm": 0.6069369316101074, |
|
"learning_rate": 7.012636193699837e-05, |
|
"loss": 2.1125, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 1.223529411764706, |
|
"grad_norm": 0.7255291938781738, |
|
"learning_rate": 6.933333714707094e-05, |
|
"loss": 2.3926, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 1.2392156862745098, |
|
"grad_norm": 0.819170355796814, |
|
"learning_rate": 6.853455195225338e-05, |
|
"loss": 2.6737, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 1.2549019607843137, |
|
"grad_norm": 0.3163304626941681, |
|
"learning_rate": 6.773024435212678e-05, |
|
"loss": 2.1127, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 1.2705882352941176, |
|
"grad_norm": 0.406746506690979, |
|
"learning_rate": 6.692065399168352e-05, |
|
"loss": 2.3158, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 1.2862745098039214, |
|
"grad_norm": 0.4056803584098816, |
|
"learning_rate": 6.610602208992454e-05, |
|
"loss": 2.2758, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 1.3019607843137255, |
|
"grad_norm": 0.4088509976863861, |
|
"learning_rate": 6.528659136798764e-05, |
|
"loss": 2.2759, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 1.3176470588235294, |
|
"grad_norm": 0.4500698447227478, |
|
"learning_rate": 6.446260597682839e-05, |
|
"loss": 2.3797, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.45922282338142395, |
|
"learning_rate": 6.363431142447469e-05, |
|
"loss": 2.3687, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 1.3490196078431373, |
|
"grad_norm": 0.48098620772361755, |
|
"learning_rate": 6.280195450287736e-05, |
|
"loss": 2.3443, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 1.3647058823529412, |
|
"grad_norm": 0.478667289018631, |
|
"learning_rate": 6.19657832143779e-05, |
|
"loss": 2.4358, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 1.380392156862745, |
|
"grad_norm": 0.4647475481033325, |
|
"learning_rate": 6.112604669781572e-05, |
|
"loss": 2.2697, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 1.396078431372549, |
|
"grad_norm": 0.5217877626419067, |
|
"learning_rate": 6.028299515429683e-05, |
|
"loss": 2.3142, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 1.4117647058823528, |
|
"grad_norm": 0.527504026889801, |
|
"learning_rate": 5.943687977264584e-05, |
|
"loss": 2.3132, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 1.427450980392157, |
|
"grad_norm": 0.607871413230896, |
|
"learning_rate": 5.8587952654563817e-05, |
|
"loss": 2.3922, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 1.4431372549019608, |
|
"grad_norm": 0.6600663661956787, |
|
"learning_rate": 5.773646673951406e-05, |
|
"loss": 2.2529, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 1.4588235294117646, |
|
"grad_norm": 0.8639673590660095, |
|
"learning_rate": 5.688267572935842e-05, |
|
"loss": 2.5246, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 1.4745098039215687, |
|
"grad_norm": 0.8424491286277771, |
|
"learning_rate": 5.602683401276615e-05, |
|
"loss": 2.5439, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 1.4901960784313726, |
|
"grad_norm": 0.33690783381462097, |
|
"learning_rate": 5.5169196589418504e-05, |
|
"loss": 2.0268, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 1.5058823529411764, |
|
"grad_norm": 0.40822649002075195, |
|
"learning_rate": 5.431001899403098e-05, |
|
"loss": 2.3005, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 1.5215686274509803, |
|
"grad_norm": 0.42489108443260193, |
|
"learning_rate": 5.344955722021624e-05, |
|
"loss": 2.3231, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 1.5372549019607842, |
|
"grad_norm": 0.43306756019592285, |
|
"learning_rate": 5.258806764421048e-05, |
|
"loss": 2.2458, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 1.5529411764705883, |
|
"grad_norm": 0.4661814868450165, |
|
"learning_rate": 5.172580694848541e-05, |
|
"loss": 2.3549, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"grad_norm": 0.4894493520259857, |
|
"learning_rate": 5.086303204526943e-05, |
|
"loss": 2.3509, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5686274509803921, |
|
"eval_loss": 2.497835874557495, |
|
"eval_runtime": 8.1646, |
|
"eval_samples_per_second": 13.228, |
|
"eval_steps_per_second": 3.307, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 1.5843137254901962, |
|
"grad_norm": 0.5029212832450867, |
|
"learning_rate": 5e-05, |
|
"loss": 2.2764, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"grad_norm": 0.49811941385269165, |
|
"learning_rate": 4.913696795473058e-05, |
|
"loss": 2.2556, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 1.615686274509804, |
|
"grad_norm": 0.5826261043548584, |
|
"learning_rate": 4.827419305151461e-05, |
|
"loss": 2.2982, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 1.6313725490196078, |
|
"grad_norm": 0.5830192565917969, |
|
"learning_rate": 4.741193235578952e-05, |
|
"loss": 2.4361, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 1.6470588235294117, |
|
"grad_norm": 0.6307199597358704, |
|
"learning_rate": 4.655044277978375e-05, |
|
"loss": 2.3384, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 1.6627450980392156, |
|
"grad_norm": 0.652266263961792, |
|
"learning_rate": 4.568998100596903e-05, |
|
"loss": 2.3353, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 1.6784313725490196, |
|
"grad_norm": 0.7673619389533997, |
|
"learning_rate": 4.48308034105815e-05, |
|
"loss": 2.2034, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 1.6941176470588235, |
|
"grad_norm": 0.8802884221076965, |
|
"learning_rate": 4.397316598723385e-05, |
|
"loss": 2.4979, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 1.7098039215686276, |
|
"grad_norm": 0.8109568357467651, |
|
"learning_rate": 4.31173242706416e-05, |
|
"loss": 2.626, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 1.7254901960784315, |
|
"grad_norm": 0.32692885398864746, |
|
"learning_rate": 4.226353326048593e-05, |
|
"loss": 1.9041, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.7411764705882353, |
|
"grad_norm": 0.4149419069290161, |
|
"learning_rate": 4.1412047345436195e-05, |
|
"loss": 2.2416, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 1.7568627450980392, |
|
"grad_norm": 0.4411557614803314, |
|
"learning_rate": 4.056312022735417e-05, |
|
"loss": 2.3267, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 1.772549019607843, |
|
"grad_norm": 0.49479031562805176, |
|
"learning_rate": 3.971700484570318e-05, |
|
"loss": 2.3624, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 1.788235294117647, |
|
"grad_norm": 0.4794876277446747, |
|
"learning_rate": 3.887395330218429e-05, |
|
"loss": 2.3468, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 1.803921568627451, |
|
"grad_norm": 0.5065494775772095, |
|
"learning_rate": 3.803421678562213e-05, |
|
"loss": 2.2112, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.8196078431372549, |
|
"grad_norm": 0.5268806219100952, |
|
"learning_rate": 3.719804549712265e-05, |
|
"loss": 2.238, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 1.835294117647059, |
|
"grad_norm": 0.5357680320739746, |
|
"learning_rate": 3.6365688575525315e-05, |
|
"loss": 2.2156, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 1.8509803921568628, |
|
"grad_norm": 0.5712677240371704, |
|
"learning_rate": 3.553739402317162e-05, |
|
"loss": 2.3148, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 1.8666666666666667, |
|
"grad_norm": 0.5927073955535889, |
|
"learning_rate": 3.471340863201237e-05, |
|
"loss": 2.2461, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 1.8823529411764706, |
|
"grad_norm": 0.7000199556350708, |
|
"learning_rate": 3.389397791007548e-05, |
|
"loss": 2.2504, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.8980392156862744, |
|
"grad_norm": 0.7094666361808777, |
|
"learning_rate": 3.307934600831648e-05, |
|
"loss": 2.198, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 1.9137254901960783, |
|
"grad_norm": 0.8061943650245667, |
|
"learning_rate": 3.226975564787322e-05, |
|
"loss": 2.3629, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 1.9294117647058824, |
|
"grad_norm": 0.9445766806602478, |
|
"learning_rate": 3.146544804774663e-05, |
|
"loss": 2.2729, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 1.9450980392156862, |
|
"grad_norm": 0.9857886433601379, |
|
"learning_rate": 3.066666285292906e-05, |
|
"loss": 2.5454, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 1.9607843137254903, |
|
"grad_norm": 0.38298577070236206, |
|
"learning_rate": 2.9873638063001628e-05, |
|
"loss": 1.9603, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.9764705882352942, |
|
"grad_norm": 0.5521575808525085, |
|
"learning_rate": 2.9086609961221755e-05, |
|
"loss": 2.3363, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 1.992156862745098, |
|
"grad_norm": 0.9297695755958557, |
|
"learning_rate": 2.8305813044122097e-05, |
|
"loss": 2.5893, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 2.007843137254902, |
|
"grad_norm": 0.9833527207374573, |
|
"learning_rate": 2.7531479951641924e-05, |
|
"loss": 2.6296, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 2.023529411764706, |
|
"grad_norm": 0.3908270597457886, |
|
"learning_rate": 2.6763841397811573e-05, |
|
"loss": 2.0195, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 2.0392156862745097, |
|
"grad_norm": 0.42692381143569946, |
|
"learning_rate": 2.6003126102010695e-05, |
|
"loss": 2.1701, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 2.0549019607843135, |
|
"grad_norm": 0.44662317633628845, |
|
"learning_rate": 2.5249560720820932e-05, |
|
"loss": 2.2962, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 2.070588235294118, |
|
"grad_norm": 0.4669097363948822, |
|
"learning_rate": 2.450336978049322e-05, |
|
"loss": 2.1407, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 2.0862745098039217, |
|
"grad_norm": 0.48035910725593567, |
|
"learning_rate": 2.37647756100496e-05, |
|
"loss": 2.1402, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 2.1019607843137256, |
|
"grad_norm": 0.5153262615203857, |
|
"learning_rate": 2.3033998275040046e-05, |
|
"loss": 2.2411, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 2.1176470588235294, |
|
"grad_norm": 0.511375367641449, |
|
"learning_rate": 2.2311255511973345e-05, |
|
"loss": 2.0221, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 2.1333333333333333, |
|
"grad_norm": 0.5629892349243164, |
|
"learning_rate": 2.1596762663442218e-05, |
|
"loss": 2.2439, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 2.149019607843137, |
|
"grad_norm": 0.5920019745826721, |
|
"learning_rate": 2.0890732613961478e-05, |
|
"loss": 2.1041, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 2.164705882352941, |
|
"grad_norm": 0.6213743090629578, |
|
"learning_rate": 2.0193375726538737e-05, |
|
"loss": 2.0556, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 2.180392156862745, |
|
"grad_norm": 0.7653319239616394, |
|
"learning_rate": 1.9504899779996355e-05, |
|
"loss": 2.0099, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 2.196078431372549, |
|
"grad_norm": 0.7903580665588379, |
|
"learning_rate": 1.8825509907063327e-05, |
|
"loss": 1.9591, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 2.211764705882353, |
|
"grad_norm": 0.9329378604888916, |
|
"learning_rate": 1.8155408533255553e-05, |
|
"loss": 2.1217, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 2.227450980392157, |
|
"grad_norm": 1.1085801124572754, |
|
"learning_rate": 1.749479531656279e-05, |
|
"loss": 2.0027, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 2.243137254901961, |
|
"grad_norm": 0.7666586637496948, |
|
"learning_rate": 1.684386708796025e-05, |
|
"loss": 2.2613, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 2.2588235294117647, |
|
"grad_norm": 0.4413524270057678, |
|
"learning_rate": 1.6202817792762282e-05, |
|
"loss": 2.1461, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 2.2745098039215685, |
|
"grad_norm": 0.48825109004974365, |
|
"learning_rate": 1.557183843283614e-05, |
|
"loss": 2.2913, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 2.2901960784313724, |
|
"grad_norm": 0.5957844853401184, |
|
"learning_rate": 1.4951117009692528e-05, |
|
"loss": 2.2318, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 2.3058823529411763, |
|
"grad_norm": 0.5846164226531982, |
|
"learning_rate": 1.4340838468470197e-05, |
|
"loss": 2.193, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 2.3215686274509806, |
|
"grad_norm": 0.6084363460540771, |
|
"learning_rate": 1.3741184642831189e-05, |
|
"loss": 2.3104, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 2.3372549019607844, |
|
"grad_norm": 0.630045473575592, |
|
"learning_rate": 1.3152334200783167e-05, |
|
"loss": 2.2282, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"grad_norm": 0.6543457508087158, |
|
"learning_rate": 1.257446259144494e-05, |
|
"loss": 2.1706, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.3529411764705883, |
|
"eval_loss": 2.531811237335205, |
|
"eval_runtime": 8.1637, |
|
"eval_samples_per_second": 13.229, |
|
"eval_steps_per_second": 3.307, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 2.368627450980392, |
|
"grad_norm": 0.6354141235351562, |
|
"learning_rate": 1.2007741992771065e-05, |
|
"loss": 2.0553, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 2.384313725490196, |
|
"grad_norm": 0.7069724798202515, |
|
"learning_rate": 1.145234126025102e-05, |
|
"loss": 2.1107, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"grad_norm": 0.7220402359962463, |
|
"learning_rate": 1.090842587659851e-05, |
|
"loss": 2.0186, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 2.4156862745098038, |
|
"grad_norm": 0.8067901134490967, |
|
"learning_rate": 1.0376157902445488e-05, |
|
"loss": 1.997, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 2.431372549019608, |
|
"grad_norm": 0.8723448514938354, |
|
"learning_rate": 9.85569592805588e-06, |
|
"loss": 2.0568, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 2.447058823529412, |
|
"grad_norm": 0.9434618949890137, |
|
"learning_rate": 9.347195026073369e-06, |
|
"loss": 1.9484, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 2.462745098039216, |
|
"grad_norm": 1.2919942140579224, |
|
"learning_rate": 8.850806705317183e-06, |
|
"loss": 2.0413, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 2.4784313725490197, |
|
"grad_norm": 0.8914510607719421, |
|
"learning_rate": 8.366678865639688e-06, |
|
"loss": 2.136, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 2.4941176470588236, |
|
"grad_norm": 0.45241811871528625, |
|
"learning_rate": 7.894955753859413e-06, |
|
"loss": 1.95, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 2.5098039215686274, |
|
"grad_norm": 0.5093364715576172, |
|
"learning_rate": 7.435777920782444e-06, |
|
"loss": 2.1615, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 2.5254901960784313, |
|
"grad_norm": 0.5468050837516785, |
|
"learning_rate": 6.989282179324963e-06, |
|
"loss": 2.1543, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 2.541176470588235, |
|
"grad_norm": 0.5462315082550049, |
|
"learning_rate": 6.555601563749675e-06, |
|
"loss": 2.1315, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 2.556862745098039, |
|
"grad_norm": 0.6020755767822266, |
|
"learning_rate": 6.1348652900279025e-06, |
|
"loss": 2.2727, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 2.572549019607843, |
|
"grad_norm": 0.6015859246253967, |
|
"learning_rate": 5.727198717339511e-06, |
|
"loss": 2.1035, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 2.588235294117647, |
|
"grad_norm": 0.6677731871604919, |
|
"learning_rate": 5.332723310721854e-06, |
|
"loss": 2.1539, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 2.603921568627451, |
|
"grad_norm": 0.7184847593307495, |
|
"learning_rate": 4.951556604879048e-06, |
|
"loss": 2.1104, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 2.619607843137255, |
|
"grad_norm": 0.7267415523529053, |
|
"learning_rate": 4.5838121691623e-06, |
|
"loss": 2.0935, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 2.635294117647059, |
|
"grad_norm": 0.8061967492103577, |
|
"learning_rate": 4.229599573731685e-06, |
|
"loss": 2.186, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 2.6509803921568627, |
|
"grad_norm": 0.872096836566925, |
|
"learning_rate": 3.8890243569094874e-06, |
|
"loss": 2.2811, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 2.6666666666666665, |
|
"grad_norm": 0.9103512167930603, |
|
"learning_rate": 3.5621879937348836e-06, |
|
"loss": 2.0644, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 2.682352941176471, |
|
"grad_norm": 0.9733991622924805, |
|
"learning_rate": 3.249187865729264e-06, |
|
"loss": 1.9821, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 2.6980392156862747, |
|
"grad_norm": 1.33379065990448, |
|
"learning_rate": 2.950117231881183e-06, |
|
"loss": 2.0174, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 2.7137254901960786, |
|
"grad_norm": 0.9886175394058228, |
|
"learning_rate": 2.6650652008597068e-06, |
|
"loss": 2.3795, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 2.7294117647058824, |
|
"grad_norm": 0.4592796266078949, |
|
"learning_rate": 2.3941167044642944e-06, |
|
"loss": 1.892, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 2.7450980392156863, |
|
"grad_norm": 0.5413078665733337, |
|
"learning_rate": 2.137352472319215e-06, |
|
"loss": 2.2523, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 2.76078431372549, |
|
"grad_norm": 0.5450495481491089, |
|
"learning_rate": 1.8948490078199764e-06, |
|
"loss": 2.207, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 2.776470588235294, |
|
"grad_norm": 0.5484597086906433, |
|
"learning_rate": 1.6666785653390249e-06, |
|
"loss": 2.1261, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 2.792156862745098, |
|
"grad_norm": 0.6012923121452332, |
|
"learning_rate": 1.4529091286973995e-06, |
|
"loss": 2.2482, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 2.8078431372549018, |
|
"grad_norm": 0.6597626805305481, |
|
"learning_rate": 1.2536043909088191e-06, |
|
"loss": 2.2572, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 2.8235294117647056, |
|
"grad_norm": 0.6608418822288513, |
|
"learning_rate": 1.0688237352022345e-06, |
|
"loss": 2.2363, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 2.83921568627451, |
|
"grad_norm": 0.6887637376785278, |
|
"learning_rate": 8.986222173284875e-07, |
|
"loss": 2.1163, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 2.854901960784314, |
|
"grad_norm": 0.728211522102356, |
|
"learning_rate": 7.4305054915631e-07, |
|
"loss": 2.1268, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 2.8705882352941177, |
|
"grad_norm": 0.796875536441803, |
|
"learning_rate": 6.021550835626777e-07, |
|
"loss": 2.1706, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 2.8862745098039215, |
|
"grad_norm": 0.855240523815155, |
|
"learning_rate": 4.7597780062184073e-07, |
|
"loss": 2.1735, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 2.9019607843137254, |
|
"grad_norm": 0.899287760257721, |
|
"learning_rate": 3.6455629509730136e-07, |
|
"loss": 1.9215, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 2.9176470588235293, |
|
"grad_norm": 0.957107663154602, |
|
"learning_rate": 2.6792376524036877e-07, |
|
"loss": 1.8281, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 2.9333333333333336, |
|
"grad_norm": 1.1885648965835571, |
|
"learning_rate": 1.8610900289867673e-07, |
|
"loss": 2.0065, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 2.9490196078431374, |
|
"grad_norm": 0.9298829436302185, |
|
"learning_rate": 1.191363849376237e-07, |
|
"loss": 2.2486, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 2.9647058823529413, |
|
"grad_norm": 0.5273903608322144, |
|
"learning_rate": 6.702586597719385e-08, |
|
"loss": 1.838, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 2.980392156862745, |
|
"grad_norm": 0.7140518426895142, |
|
"learning_rate": 2.9792972446479605e-08, |
|
"loss": 2.2171, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 2.996078431372549, |
|
"grad_norm": 1.283398151397705, |
|
"learning_rate": 7.448797957526621e-09, |
|
"loss": 2.4425, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 3.011764705882353, |
|
"grad_norm": 0.6145690083503723, |
|
"learning_rate": 0.0, |
|
"loss": 2.2341, |
|
"step": 192 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 192, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 4, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 1 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.62298517110784e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|