|
{ |
|
"best_metric": 0.925627350509402, |
|
"best_model_checkpoint": "checkpoints/BEE-spoke-data-bert-plus-L8-v1.0-allNLI_matryoshka-synthetic-text-similarity-Mar-07_22-56/checkpoint-3600", |
|
"epoch": 0.35376884422110555, |
|
"eval_steps": 300, |
|
"global_step": 4400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 7.8406219482421875, |
|
"learning_rate": 8.038585209003216e-08, |
|
"loss": 0.1372, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 6.115014553070068, |
|
"learning_rate": 1.6077170418006432e-07, |
|
"loss": 0.1134, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.3239176273345947, |
|
"learning_rate": 2.4115755627009647e-07, |
|
"loss": 0.0902, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 4.643126964569092, |
|
"learning_rate": 3.2154340836012864e-07, |
|
"loss": 0.0907, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.8138389587402344, |
|
"learning_rate": 4.019292604501608e-07, |
|
"loss": 0.087, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.0662753582000732, |
|
"learning_rate": 4.823151125401929e-07, |
|
"loss": 0.0713, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.2391765117645264, |
|
"learning_rate": 5.627009646302252e-07, |
|
"loss": 0.0724, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.7336483001708984, |
|
"learning_rate": 6.430868167202573e-07, |
|
"loss": 0.0711, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 0.5051072239875793, |
|
"learning_rate": 7.234726688102894e-07, |
|
"loss": 0.0386, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 1.8692330121994019, |
|
"learning_rate": 8.038585209003216e-07, |
|
"loss": 0.07, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.106194257736206, |
|
"learning_rate": 8.842443729903538e-07, |
|
"loss": 0.0908, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 2.4909865856170654, |
|
"learning_rate": 9.646302250803859e-07, |
|
"loss": 0.0632, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.8705946207046509, |
|
"learning_rate": 1.045016077170418e-06, |
|
"loss": 0.0436, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.9496204853057861, |
|
"learning_rate": 1.1254019292604503e-06, |
|
"loss": 0.0481, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.2997841835021973, |
|
"learning_rate": 1.2057877813504825e-06, |
|
"loss": 0.0552, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.0006277561187744, |
|
"learning_rate": 1.2861736334405146e-06, |
|
"loss": 0.0249, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.1965364217758179, |
|
"learning_rate": 1.3665594855305468e-06, |
|
"loss": 0.0261, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8660438656806946, |
|
"learning_rate": 1.4469453376205788e-06, |
|
"loss": 0.0383, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8808113932609558, |
|
"learning_rate": 1.527331189710611e-06, |
|
"loss": 0.0254, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.918907880783081, |
|
"learning_rate": 1.6077170418006432e-06, |
|
"loss": 0.0656, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.735292673110962, |
|
"learning_rate": 1.6881028938906755e-06, |
|
"loss": 0.0518, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.8118743896484375, |
|
"learning_rate": 1.7684887459807077e-06, |
|
"loss": 0.0908, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.0685555934906006, |
|
"learning_rate": 1.84887459807074e-06, |
|
"loss": 0.0332, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.8618605732917786, |
|
"learning_rate": 1.9292604501607717e-06, |
|
"loss": 0.0456, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.7448190450668335, |
|
"learning_rate": 2.0096463022508037e-06, |
|
"loss": 0.0349, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.6762810945510864, |
|
"learning_rate": 2.090032154340836e-06, |
|
"loss": 0.025, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.172909736633301, |
|
"learning_rate": 2.170418006430868e-06, |
|
"loss": 0.0334, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 5.036996364593506, |
|
"learning_rate": 2.2508038585209006e-06, |
|
"loss": 0.0548, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.498236894607544, |
|
"learning_rate": 2.3311897106109326e-06, |
|
"loss": 0.0492, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.8583147525787354, |
|
"learning_rate": 2.411575562700965e-06, |
|
"loss": 0.05, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 1.241025686264038, |
|
"learning_rate": 2.491961414790997e-06, |
|
"loss": 0.0264, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.9725112915039062, |
|
"learning_rate": 2.572347266881029e-06, |
|
"loss": 0.0321, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9365035891532898, |
|
"learning_rate": 2.6527331189710615e-06, |
|
"loss": 0.0149, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9742757081985474, |
|
"learning_rate": 2.7331189710610936e-06, |
|
"loss": 0.0186, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 0.9915574193000793, |
|
"learning_rate": 2.813504823151126e-06, |
|
"loss": 0.0308, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 3.514364719390869, |
|
"learning_rate": 2.8938906752411576e-06, |
|
"loss": 0.0288, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 2.717421293258667, |
|
"learning_rate": 2.97427652733119e-06, |
|
"loss": 0.0371, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 4.118988990783691, |
|
"learning_rate": 3.054662379421222e-06, |
|
"loss": 0.0494, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.1433117389678955, |
|
"learning_rate": 3.135048231511254e-06, |
|
"loss": 0.0515, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.93222975730896, |
|
"learning_rate": 3.2154340836012865e-06, |
|
"loss": 0.0276, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6795055270195007, |
|
"learning_rate": 3.2958199356913185e-06, |
|
"loss": 0.0318, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.950875997543335, |
|
"learning_rate": 3.376205787781351e-06, |
|
"loss": 0.0335, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.8429243564605713, |
|
"learning_rate": 3.456591639871383e-06, |
|
"loss": 0.0434, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.2567358016967773, |
|
"learning_rate": 3.5369774919614154e-06, |
|
"loss": 0.0324, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.407845139503479, |
|
"learning_rate": 3.617363344051447e-06, |
|
"loss": 0.0309, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 3.4068539142608643, |
|
"learning_rate": 3.69774919614148e-06, |
|
"loss": 0.0454, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8196914196014404, |
|
"learning_rate": 3.7781350482315114e-06, |
|
"loss": 0.0255, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.0097465515136719, |
|
"learning_rate": 3.8585209003215434e-06, |
|
"loss": 0.0263, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.995619773864746, |
|
"learning_rate": 3.938906752411576e-06, |
|
"loss": 0.0331, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.168858528137207, |
|
"learning_rate": 4.0192926045016075e-06, |
|
"loss": 0.0416, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.8158152103424072, |
|
"learning_rate": 4.09967845659164e-06, |
|
"loss": 0.0385, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.8158726692199707, |
|
"learning_rate": 4.180064308681672e-06, |
|
"loss": 0.0385, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6891674995422363, |
|
"learning_rate": 4.260450160771704e-06, |
|
"loss": 0.0182, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.5722671747207642, |
|
"learning_rate": 4.340836012861736e-06, |
|
"loss": 0.0323, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.8213624954223633, |
|
"learning_rate": 4.421221864951769e-06, |
|
"loss": 0.0351, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 1.2886526584625244, |
|
"learning_rate": 4.501607717041801e-06, |
|
"loss": 0.0401, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.6343109607696533, |
|
"learning_rate": 4.581993569131833e-06, |
|
"loss": 0.0343, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.7094342708587646, |
|
"learning_rate": 4.662379421221865e-06, |
|
"loss": 0.0261, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.2275326251983643, |
|
"learning_rate": 4.742765273311897e-06, |
|
"loss": 0.0183, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.9329387545585632, |
|
"learning_rate": 4.82315112540193e-06, |
|
"loss": 0.0651, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"eval_loss": 0.03527999296784401, |
|
"eval_pearson_cosine": 0.8237974983947806, |
|
"eval_pearson_dot": 0.79982187310788, |
|
"eval_pearson_euclidean": 0.8049934987510711, |
|
"eval_pearson_manhattan": 0.802979519881207, |
|
"eval_pearson_max": 0.8237974983947806, |
|
"eval_runtime": 426.4216, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.8749033396133584, |
|
"eval_spearman_dot": 0.8475867183468735, |
|
"eval_spearman_euclidean": 0.8703331453325813, |
|
"eval_spearman_manhattan": 0.8672266689066755, |
|
"eval_spearman_max": 0.8749033396133584, |
|
"eval_steps_per_second": 1.173, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 0.6371708512306213, |
|
"learning_rate": 4.903536977491961e-06, |
|
"loss": 0.019, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 2.065721035003662, |
|
"learning_rate": 4.983922829581994e-06, |
|
"loss": 0.0193, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.1992969512939453, |
|
"learning_rate": 5.064308681672026e-06, |
|
"loss": 0.0579, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.422349214553833, |
|
"learning_rate": 5.144694533762058e-06, |
|
"loss": 0.0342, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.09092378616333, |
|
"learning_rate": 5.22508038585209e-06, |
|
"loss": 0.0321, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.6249920129776, |
|
"learning_rate": 5.305466237942123e-06, |
|
"loss": 0.05, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4493677616119385, |
|
"learning_rate": 5.385852090032154e-06, |
|
"loss": 0.0371, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.1457599401474, |
|
"learning_rate": 5.466237942122187e-06, |
|
"loss": 0.0191, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.7196341156959534, |
|
"learning_rate": 5.546623794212219e-06, |
|
"loss": 0.0653, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.7609484791755676, |
|
"learning_rate": 5.627009646302252e-06, |
|
"loss": 0.0234, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.9668811559677124, |
|
"learning_rate": 5.707395498392283e-06, |
|
"loss": 0.043, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.4503982067108154, |
|
"learning_rate": 5.787781350482315e-06, |
|
"loss": 0.0423, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.6571695804595947, |
|
"learning_rate": 5.868167202572348e-06, |
|
"loss": 0.0463, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.0262566804885864, |
|
"learning_rate": 5.94855305466238e-06, |
|
"loss": 0.0362, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 3.1059563159942627, |
|
"learning_rate": 6.028938906752412e-06, |
|
"loss": 0.0291, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.3710355758666992, |
|
"learning_rate": 6.109324758842444e-06, |
|
"loss": 0.0286, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.9536277055740356, |
|
"learning_rate": 6.189710610932477e-06, |
|
"loss": 0.048, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.9812152981758118, |
|
"learning_rate": 6.270096463022508e-06, |
|
"loss": 0.0211, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8256497383117676, |
|
"learning_rate": 6.350482315112541e-06, |
|
"loss": 0.0248, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5534074306488037, |
|
"learning_rate": 6.430868167202573e-06, |
|
"loss": 0.0406, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 2.0551486015319824, |
|
"learning_rate": 6.511254019292606e-06, |
|
"loss": 0.0299, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.8460248708724976, |
|
"learning_rate": 6.591639871382637e-06, |
|
"loss": 0.0182, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 0.7704716920852661, |
|
"learning_rate": 6.672025723472669e-06, |
|
"loss": 0.0223, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 8.103775024414062, |
|
"learning_rate": 6.752411575562702e-06, |
|
"loss": 0.0296, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.514302134513855, |
|
"learning_rate": 6.832797427652733e-06, |
|
"loss": 0.0302, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.5787880420684814, |
|
"learning_rate": 6.913183279742766e-06, |
|
"loss": 0.0229, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 1.478020429611206, |
|
"learning_rate": 6.993569131832798e-06, |
|
"loss": 0.03, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.8203696608543396, |
|
"learning_rate": 7.073954983922831e-06, |
|
"loss": 0.0333, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.000648856163025, |
|
"learning_rate": 7.154340836012862e-06, |
|
"loss": 0.0177, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5386018753051758, |
|
"learning_rate": 7.234726688102894e-06, |
|
"loss": 0.0351, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.0292292833328247, |
|
"learning_rate": 7.315112540192927e-06, |
|
"loss": 0.0395, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.5625340938568115, |
|
"learning_rate": 7.39549839228296e-06, |
|
"loss": 0.0299, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.6210525035858154, |
|
"learning_rate": 7.475884244372991e-06, |
|
"loss": 0.0321, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.8112176656723022, |
|
"learning_rate": 7.556270096463023e-06, |
|
"loss": 0.0316, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.0322524309158325, |
|
"learning_rate": 7.636655948553056e-06, |
|
"loss": 0.0253, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 3.308305263519287, |
|
"learning_rate": 7.717041800643087e-06, |
|
"loss": 0.0387, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2584173679351807, |
|
"learning_rate": 7.79742765273312e-06, |
|
"loss": 0.0353, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6924129128456116, |
|
"learning_rate": 7.877813504823153e-06, |
|
"loss": 0.0157, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.4199492931365967, |
|
"learning_rate": 7.958199356913184e-06, |
|
"loss": 0.0401, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.35832083225250244, |
|
"learning_rate": 8.038585209003215e-06, |
|
"loss": 0.0227, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.6043404340744019, |
|
"learning_rate": 8.118971061093248e-06, |
|
"loss": 0.0199, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.0943973064422607, |
|
"learning_rate": 8.19935691318328e-06, |
|
"loss": 0.0213, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.8544149398803711, |
|
"learning_rate": 8.279742765273314e-06, |
|
"loss": 0.0226, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 4.021656036376953, |
|
"learning_rate": 8.360128617363345e-06, |
|
"loss": 0.0574, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.3117215633392334, |
|
"learning_rate": 8.440514469453378e-06, |
|
"loss": 0.019, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2881953716278076, |
|
"learning_rate": 8.520900321543409e-06, |
|
"loss": 0.0267, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 2.115152597427368, |
|
"learning_rate": 8.601286173633442e-06, |
|
"loss": 0.0233, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.7420244216918945, |
|
"learning_rate": 8.681672025723473e-06, |
|
"loss": 0.0216, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.2626619338989258, |
|
"learning_rate": 8.762057877813506e-06, |
|
"loss": 0.0182, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.5143030881881714, |
|
"learning_rate": 8.842443729903538e-06, |
|
"loss": 0.0261, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 1.1002925634384155, |
|
"learning_rate": 8.92282958199357e-06, |
|
"loss": 0.022, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.9754141569137573, |
|
"learning_rate": 9.003215434083602e-06, |
|
"loss": 0.0252, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3871146440505981, |
|
"learning_rate": 9.083601286173634e-06, |
|
"loss": 0.0456, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.471267580986023, |
|
"learning_rate": 9.163987138263667e-06, |
|
"loss": 0.0384, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8994916677474976, |
|
"learning_rate": 9.244372990353698e-06, |
|
"loss": 0.036, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.7490404844284058, |
|
"learning_rate": 9.32475884244373e-06, |
|
"loss": 0.0221, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3971872329711914, |
|
"learning_rate": 9.405144694533763e-06, |
|
"loss": 0.0302, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9706817865371704, |
|
"learning_rate": 9.485530546623795e-06, |
|
"loss": 0.0291, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.0623135566711426, |
|
"learning_rate": 9.565916398713827e-06, |
|
"loss": 0.0332, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.330348253250122, |
|
"learning_rate": 9.64630225080386e-06, |
|
"loss": 0.031, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"eval_loss": 0.03203292191028595, |
|
"eval_pearson_cosine": 0.8607192041230748, |
|
"eval_pearson_dot": 0.8492954726274867, |
|
"eval_pearson_euclidean": 0.8425777905216318, |
|
"eval_pearson_manhattan": 0.8394006469079365, |
|
"eval_pearson_max": 0.8607192041230748, |
|
"eval_runtime": 425.6285, |
|
"eval_samples_per_second": 1.175, |
|
"eval_spearman_cosine": 0.8971169644678579, |
|
"eval_spearman_dot": 0.8784386497545991, |
|
"eval_spearman_euclidean": 0.894069240276961, |
|
"eval_spearman_manhattan": 0.8926532346129386, |
|
"eval_spearman_max": 0.8971169644678579, |
|
"eval_steps_per_second": 1.175, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.334221124649048, |
|
"learning_rate": 9.726688102893891e-06, |
|
"loss": 0.0273, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.7424949407577515, |
|
"learning_rate": 9.807073954983923e-06, |
|
"loss": 0.0647, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4894630908966064, |
|
"learning_rate": 9.887459807073955e-06, |
|
"loss": 0.0361, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.4873441159725189, |
|
"learning_rate": 9.967845659163988e-06, |
|
"loss": 0.0264, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.5973559617996216, |
|
"learning_rate": 1.0048231511254021e-05, |
|
"loss": 0.029, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.1919150352478027, |
|
"learning_rate": 1.0128617363344052e-05, |
|
"loss": 0.0328, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3969812393188477, |
|
"learning_rate": 1.0209003215434084e-05, |
|
"loss": 0.043, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 2.33707332611084, |
|
"learning_rate": 1.0289389067524116e-05, |
|
"loss": 0.0448, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.3516573905944824, |
|
"learning_rate": 1.0369774919614148e-05, |
|
"loss": 0.0328, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 3.014158248901367, |
|
"learning_rate": 1.045016077170418e-05, |
|
"loss": 0.0326, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.1477378606796265, |
|
"learning_rate": 1.0530546623794213e-05, |
|
"loss": 0.025, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.8221299648284912, |
|
"learning_rate": 1.0610932475884246e-05, |
|
"loss": 0.0425, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.68177330493927, |
|
"learning_rate": 1.0691318327974279e-05, |
|
"loss": 0.025, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.4552159309387207, |
|
"learning_rate": 1.0771704180064308e-05, |
|
"loss": 0.0398, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 1.352478265762329, |
|
"learning_rate": 1.0852090032154341e-05, |
|
"loss": 0.0155, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 0.9538177251815796, |
|
"learning_rate": 1.0932475884244374e-05, |
|
"loss": 0.0299, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0173323154449463, |
|
"learning_rate": 1.1012861736334405e-05, |
|
"loss": 0.0268, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.0908379554748535, |
|
"learning_rate": 1.1093247588424438e-05, |
|
"loss": 0.0308, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6585212349891663, |
|
"learning_rate": 1.1173633440514471e-05, |
|
"loss": 0.025, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.959600567817688, |
|
"learning_rate": 1.1254019292604504e-05, |
|
"loss": 0.0219, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2033023834228516, |
|
"learning_rate": 1.1334405144694535e-05, |
|
"loss": 0.0315, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.735729455947876, |
|
"learning_rate": 1.1414790996784566e-05, |
|
"loss": 0.0249, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.850893497467041, |
|
"learning_rate": 1.14951768488746e-05, |
|
"loss": 0.0248, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.902856171131134, |
|
"learning_rate": 1.157556270096463e-05, |
|
"loss": 0.0683, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.6975336670875549, |
|
"learning_rate": 1.1655948553054663e-05, |
|
"loss": 0.0315, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 3.3982033729553223, |
|
"learning_rate": 1.1736334405144696e-05, |
|
"loss": 0.0479, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 2.775419235229492, |
|
"learning_rate": 1.1816720257234729e-05, |
|
"loss": 0.068, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.9351024627685547, |
|
"learning_rate": 1.189710610932476e-05, |
|
"loss": 0.0254, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5352950096130371, |
|
"learning_rate": 1.1977491961414791e-05, |
|
"loss": 0.0193, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.5528688430786133, |
|
"learning_rate": 1.2057877813504824e-05, |
|
"loss": 0.039, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.6287634372711182, |
|
"learning_rate": 1.2138263665594855e-05, |
|
"loss": 0.017, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.0720481872558594, |
|
"learning_rate": 1.2218649517684888e-05, |
|
"loss": 0.0261, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.8808945417404175, |
|
"learning_rate": 1.2299035369774921e-05, |
|
"loss": 0.0224, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.175379753112793, |
|
"learning_rate": 1.2379421221864954e-05, |
|
"loss": 0.0157, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7339066863059998, |
|
"learning_rate": 1.2459807073954987e-05, |
|
"loss": 0.0242, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5107578039169312, |
|
"learning_rate": 1.2540192926045016e-05, |
|
"loss": 0.0245, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.5132850408554077, |
|
"learning_rate": 1.2620578778135049e-05, |
|
"loss": 0.0191, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.7692009210586548, |
|
"learning_rate": 1.2700964630225082e-05, |
|
"loss": 0.019, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.1860913038253784, |
|
"learning_rate": 1.2781350482315113e-05, |
|
"loss": 0.0121, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 0.9162280559539795, |
|
"learning_rate": 1.2861736334405146e-05, |
|
"loss": 0.0195, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 1.2460323572158813, |
|
"learning_rate": 1.2942122186495179e-05, |
|
"loss": 0.0223, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.7504421472549438, |
|
"learning_rate": 1.3022508038585212e-05, |
|
"loss": 0.0325, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.3735928535461426, |
|
"learning_rate": 1.3102893890675241e-05, |
|
"loss": 0.0213, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.1271485090255737, |
|
"learning_rate": 1.3183279742765274e-05, |
|
"loss": 0.0219, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8849589228630066, |
|
"learning_rate": 1.3263665594855307e-05, |
|
"loss": 0.0143, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.7527520060539246, |
|
"learning_rate": 1.3344051446945338e-05, |
|
"loss": 0.0241, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.0900169610977173, |
|
"learning_rate": 1.3424437299035371e-05, |
|
"loss": 0.0414, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.1919902563095093, |
|
"learning_rate": 1.3504823151125404e-05, |
|
"loss": 0.0234, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8385984301567078, |
|
"learning_rate": 1.3585209003215437e-05, |
|
"loss": 0.0239, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9777655005455017, |
|
"learning_rate": 1.3665594855305466e-05, |
|
"loss": 0.0145, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.2803690433502197, |
|
"learning_rate": 1.3745980707395499e-05, |
|
"loss": 0.0274, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.1460819244384766, |
|
"learning_rate": 1.3826366559485532e-05, |
|
"loss": 0.0373, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8751824498176575, |
|
"learning_rate": 1.3906752411575563e-05, |
|
"loss": 0.0204, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.566417932510376, |
|
"learning_rate": 1.3987138263665596e-05, |
|
"loss": 0.0139, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6487017869949341, |
|
"learning_rate": 1.4067524115755629e-05, |
|
"loss": 0.0458, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.9419571161270142, |
|
"learning_rate": 1.4147909967845662e-05, |
|
"loss": 0.0215, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.7554964423179626, |
|
"learning_rate": 1.4228295819935693e-05, |
|
"loss": 0.0219, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.240870475769043, |
|
"learning_rate": 1.4308681672025724e-05, |
|
"loss": 0.0291, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 1.2490875720977783, |
|
"learning_rate": 1.4389067524115757e-05, |
|
"loss": 0.0256, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.9301912188529968, |
|
"learning_rate": 1.4469453376205788e-05, |
|
"loss": 0.0303, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"eval_loss": 0.0409804992377758, |
|
"eval_pearson_cosine": 0.8657545592954407, |
|
"eval_pearson_dot": 0.8604614808110218, |
|
"eval_pearson_euclidean": 0.849931010036339, |
|
"eval_pearson_manhattan": 0.84985721810182, |
|
"eval_pearson_max": 0.8657545592954407, |
|
"eval_runtime": 425.4053, |
|
"eval_samples_per_second": 1.175, |
|
"eval_spearman_cosine": 0.9034730538922157, |
|
"eval_spearman_dot": 0.8936158304633218, |
|
"eval_spearman_euclidean": 0.9027816591266363, |
|
"eval_spearman_manhattan": 0.9029234516938067, |
|
"eval_spearman_max": 0.9034730538922157, |
|
"eval_steps_per_second": 1.175, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.6353436708450317, |
|
"learning_rate": 1.454983922829582e-05, |
|
"loss": 0.0306, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 3.5071334838867188, |
|
"learning_rate": 1.4630225080385854e-05, |
|
"loss": 0.0529, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.511518716812134, |
|
"learning_rate": 1.4710610932475886e-05, |
|
"loss": 0.0351, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.8150439858436584, |
|
"learning_rate": 1.479099678456592e-05, |
|
"loss": 0.0304, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 0.909027636051178, |
|
"learning_rate": 1.4871382636655949e-05, |
|
"loss": 0.0306, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 2.117823362350464, |
|
"learning_rate": 1.4951768488745982e-05, |
|
"loss": 0.0444, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0820565223693848, |
|
"learning_rate": 1.5032154340836015e-05, |
|
"loss": 0.0147, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0721031427383423, |
|
"learning_rate": 1.5112540192926046e-05, |
|
"loss": 0.0379, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6273934841156006, |
|
"learning_rate": 1.5192926045016079e-05, |
|
"loss": 0.0202, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4175567626953125, |
|
"learning_rate": 1.527331189710611e-05, |
|
"loss": 0.0265, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.7840615510940552, |
|
"learning_rate": 1.5353697749196143e-05, |
|
"loss": 0.0194, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 2.6450772285461426, |
|
"learning_rate": 1.5434083601286174e-05, |
|
"loss": 0.0271, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1013175249099731, |
|
"learning_rate": 1.5514469453376205e-05, |
|
"loss": 0.035, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6244901418685913, |
|
"learning_rate": 1.559485530546624e-05, |
|
"loss": 0.0183, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0193443298339844, |
|
"learning_rate": 1.567524115755627e-05, |
|
"loss": 0.0234, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1802617311477661, |
|
"learning_rate": 1.5755627009646305e-05, |
|
"loss": 0.0302, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1835167407989502, |
|
"learning_rate": 1.5836012861736336e-05, |
|
"loss": 0.0253, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.4406228065490723, |
|
"learning_rate": 1.5916398713826368e-05, |
|
"loss": 0.0316, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9229764938354492, |
|
"learning_rate": 1.59967845659164e-05, |
|
"loss": 0.0408, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.6133905649185181, |
|
"learning_rate": 1.607717041800643e-05, |
|
"loss": 0.0163, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.388720154762268, |
|
"learning_rate": 1.6157556270096464e-05, |
|
"loss": 0.0221, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9162605404853821, |
|
"learning_rate": 1.6237942122186496e-05, |
|
"loss": 0.0292, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.0816845893859863, |
|
"learning_rate": 1.631832797427653e-05, |
|
"loss": 0.0284, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.974607527256012, |
|
"learning_rate": 1.639871382636656e-05, |
|
"loss": 0.0245, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.7858002185821533, |
|
"learning_rate": 1.6479099678456592e-05, |
|
"loss": 0.0208, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.8541093468666077, |
|
"learning_rate": 1.6559485530546627e-05, |
|
"loss": 0.0402, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9254925847053528, |
|
"learning_rate": 1.6639871382636655e-05, |
|
"loss": 0.0277, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1855261325836182, |
|
"learning_rate": 1.672025723472669e-05, |
|
"loss": 0.0289, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.1872082948684692, |
|
"learning_rate": 1.680064308681672e-05, |
|
"loss": 0.0255, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.9865713119506836, |
|
"learning_rate": 1.6881028938906755e-05, |
|
"loss": 0.0331, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 1.5213806629180908, |
|
"learning_rate": 1.6961414790996786e-05, |
|
"loss": 0.0385, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5804238319396973, |
|
"learning_rate": 1.7041800643086817e-05, |
|
"loss": 0.0447, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.2709295749664307, |
|
"learning_rate": 1.7122186495176852e-05, |
|
"loss": 0.0184, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6574845314025879, |
|
"learning_rate": 1.7202572347266883e-05, |
|
"loss": 0.0201, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.590877056121826, |
|
"learning_rate": 1.7282958199356914e-05, |
|
"loss": 0.0456, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.060797929763794, |
|
"learning_rate": 1.7363344051446945e-05, |
|
"loss": 0.0375, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.38614028692245483, |
|
"learning_rate": 1.744372990353698e-05, |
|
"loss": 0.0371, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.6158710718154907, |
|
"learning_rate": 1.752411575562701e-05, |
|
"loss": 0.0403, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5008140802383423, |
|
"learning_rate": 1.7604501607717042e-05, |
|
"loss": 0.0296, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.41157564520835876, |
|
"learning_rate": 1.7684887459807077e-05, |
|
"loss": 0.0255, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.3567136526107788, |
|
"learning_rate": 1.7765273311897108e-05, |
|
"loss": 0.0217, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.698455810546875, |
|
"learning_rate": 1.784565916398714e-05, |
|
"loss": 0.0188, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.4908955097198486, |
|
"learning_rate": 1.792604501607717e-05, |
|
"loss": 0.0188, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 2.2162156105041504, |
|
"learning_rate": 1.8006430868167205e-05, |
|
"loss": 0.02, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.3919101655483246, |
|
"learning_rate": 1.8086816720257236e-05, |
|
"loss": 0.0133, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.4801468551158905, |
|
"learning_rate": 1.8167202572347267e-05, |
|
"loss": 0.0303, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5365796685218811, |
|
"learning_rate": 1.8247588424437302e-05, |
|
"loss": 0.0275, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.777245819568634, |
|
"learning_rate": 1.8327974276527333e-05, |
|
"loss": 0.0273, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.5682530403137207, |
|
"learning_rate": 1.8408360128617364e-05, |
|
"loss": 0.0261, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.0468765497207642, |
|
"learning_rate": 1.8488745980707395e-05, |
|
"loss": 0.0288, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.2527729272842407, |
|
"learning_rate": 1.856913183279743e-05, |
|
"loss": 0.0239, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5002691149711609, |
|
"learning_rate": 1.864951768488746e-05, |
|
"loss": 0.0219, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 1.9675867557525635, |
|
"learning_rate": 1.8729903536977496e-05, |
|
"loss": 0.0476, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.8520388007164001, |
|
"learning_rate": 1.8810289389067527e-05, |
|
"loss": 0.0258, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.5552634596824646, |
|
"learning_rate": 1.8890675241157558e-05, |
|
"loss": 0.0237, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 0.6992123126983643, |
|
"learning_rate": 1.897106109324759e-05, |
|
"loss": 0.0265, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.1183786392211914, |
|
"learning_rate": 1.905144694533762e-05, |
|
"loss": 0.027, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.0854079723358154, |
|
"learning_rate": 1.9131832797427655e-05, |
|
"loss": 0.0252, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.8095922470092773, |
|
"learning_rate": 1.9212218649517686e-05, |
|
"loss": 0.037, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6434860229492188, |
|
"learning_rate": 1.929260450160772e-05, |
|
"loss": 0.0277, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"eval_loss": 0.030386893078684807, |
|
"eval_pearson_cosine": 0.8622394577970579, |
|
"eval_pearson_dot": 0.8570136493582737, |
|
"eval_pearson_euclidean": 0.849571905885398, |
|
"eval_pearson_manhattan": 0.8505210712126229, |
|
"eval_pearson_max": 0.8622394577970579, |
|
"eval_runtime": 425.8405, |
|
"eval_samples_per_second": 1.174, |
|
"eval_spearman_cosine": 0.8911063804255216, |
|
"eval_spearman_dot": 0.8816442945771783, |
|
"eval_spearman_euclidean": 0.8895569342277367, |
|
"eval_spearman_manhattan": 0.890007464029856, |
|
"eval_spearman_max": 0.8911063804255216, |
|
"eval_steps_per_second": 1.174, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.8423680067062378, |
|
"learning_rate": 1.9372990353697752e-05, |
|
"loss": 0.0313, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2395281791687012, |
|
"learning_rate": 1.9453376205787783e-05, |
|
"loss": 0.0297, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.149891972541809, |
|
"learning_rate": 1.9533762057877814e-05, |
|
"loss": 0.0208, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.4256904125213623, |
|
"learning_rate": 1.9614147909967845e-05, |
|
"loss": 0.0437, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7631718516349792, |
|
"learning_rate": 1.969453376205788e-05, |
|
"loss": 0.0335, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0927650928497314, |
|
"learning_rate": 1.977491961414791e-05, |
|
"loss": 0.0349, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.7310745120048523, |
|
"learning_rate": 1.9855305466237946e-05, |
|
"loss": 0.0305, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.624340534210205, |
|
"learning_rate": 1.9935691318327977e-05, |
|
"loss": 0.0456, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.09121835231781, |
|
"learning_rate": 1.999821316894488e-05, |
|
"loss": 0.0119, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.6216334104537964, |
|
"learning_rate": 1.998927901366926e-05, |
|
"loss": 0.0419, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.0775346755981445, |
|
"learning_rate": 1.998034485839364e-05, |
|
"loss": 0.0317, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.044417381286621, |
|
"learning_rate": 1.997141070311802e-05, |
|
"loss": 0.0245, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.581361174583435, |
|
"learning_rate": 1.9962476547842403e-05, |
|
"loss": 0.0246, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.4875144362449646, |
|
"learning_rate": 1.9953542392566784e-05, |
|
"loss": 0.0218, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.2498222589492798, |
|
"learning_rate": 1.9944608237291165e-05, |
|
"loss": 0.04, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.562926173210144, |
|
"learning_rate": 1.993567408201555e-05, |
|
"loss": 0.0268, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.5474754571914673, |
|
"learning_rate": 1.992673992673993e-05, |
|
"loss": 0.0514, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.6975357532501221, |
|
"learning_rate": 1.9917805771464308e-05, |
|
"loss": 0.0345, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 2.0237133502960205, |
|
"learning_rate": 1.9908871616188692e-05, |
|
"loss": 0.0523, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 0.604995846748352, |
|
"learning_rate": 1.9899937460913073e-05, |
|
"loss": 0.0279, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.3883243799209595, |
|
"learning_rate": 1.9891003305637454e-05, |
|
"loss": 0.0227, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5331462621688843, |
|
"learning_rate": 1.9882069150361835e-05, |
|
"loss": 0.0326, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.44855189323425293, |
|
"learning_rate": 1.9873134995086216e-05, |
|
"loss": 0.0352, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.213827610015869, |
|
"learning_rate": 1.9864200839810597e-05, |
|
"loss": 0.0432, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5794016718864441, |
|
"learning_rate": 1.985526668453498e-05, |
|
"loss": 0.0245, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6042608618736267, |
|
"learning_rate": 1.984633252925936e-05, |
|
"loss": 0.0168, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.935172200202942, |
|
"learning_rate": 1.983739837398374e-05, |
|
"loss": 0.0347, |
|
"step": 1335 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.000755786895752, |
|
"learning_rate": 1.982846421870812e-05, |
|
"loss": 0.0302, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 2.054868698120117, |
|
"learning_rate": 1.9819530063432503e-05, |
|
"loss": 0.0622, |
|
"step": 1345 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.873913049697876, |
|
"learning_rate": 1.9810595908156884e-05, |
|
"loss": 0.0178, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6479474306106567, |
|
"learning_rate": 1.9801661752881268e-05, |
|
"loss": 0.0204, |
|
"step": 1355 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.0146560668945312, |
|
"learning_rate": 1.979272759760565e-05, |
|
"loss": 0.0312, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7326614856719971, |
|
"learning_rate": 1.978379344233003e-05, |
|
"loss": 0.0444, |
|
"step": 1365 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.8839195370674133, |
|
"learning_rate": 1.977485928705441e-05, |
|
"loss": 0.0301, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.485784113407135, |
|
"learning_rate": 1.9765925131778792e-05, |
|
"loss": 0.0349, |
|
"step": 1375 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.7656601071357727, |
|
"learning_rate": 1.9756990976503173e-05, |
|
"loss": 0.0282, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6562867164611816, |
|
"learning_rate": 1.9748056821227554e-05, |
|
"loss": 0.033, |
|
"step": 1385 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.971908688545227, |
|
"learning_rate": 1.9739122665951935e-05, |
|
"loss": 0.0358, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.4427505731582642, |
|
"learning_rate": 1.9730188510676316e-05, |
|
"loss": 0.029, |
|
"step": 1395 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.177832841873169, |
|
"learning_rate": 1.9721254355400697e-05, |
|
"loss": 0.0296, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.5108907222747803, |
|
"learning_rate": 1.971232020012508e-05, |
|
"loss": 0.0239, |
|
"step": 1405 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.7967244386672974, |
|
"learning_rate": 1.9703386044849463e-05, |
|
"loss": 0.0336, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.008859395980835, |
|
"learning_rate": 1.9694451889573844e-05, |
|
"loss": 0.0197, |
|
"step": 1415 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.1244670152664185, |
|
"learning_rate": 1.968551773429822e-05, |
|
"loss": 0.0407, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 1.6256637573242188, |
|
"learning_rate": 1.9676583579022606e-05, |
|
"loss": 0.0464, |
|
"step": 1425 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 0.6180126667022705, |
|
"learning_rate": 1.9667649423746987e-05, |
|
"loss": 0.0185, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8708995580673218, |
|
"learning_rate": 1.9658715268471368e-05, |
|
"loss": 0.0206, |
|
"step": 1435 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.933128833770752, |
|
"learning_rate": 1.964978111319575e-05, |
|
"loss": 0.0427, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.588142991065979, |
|
"learning_rate": 1.964084695792013e-05, |
|
"loss": 0.0242, |
|
"step": 1445 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.38248518109321594, |
|
"learning_rate": 1.963191280264451e-05, |
|
"loss": 0.0252, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8211073279380798, |
|
"learning_rate": 1.9622978647368892e-05, |
|
"loss": 0.0159, |
|
"step": 1455 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.4323325455188751, |
|
"learning_rate": 1.9614044492093273e-05, |
|
"loss": 0.0383, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7777843475341797, |
|
"learning_rate": 1.9605110336817658e-05, |
|
"loss": 0.0399, |
|
"step": 1465 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7342023253440857, |
|
"learning_rate": 1.9596176181542035e-05, |
|
"loss": 0.0232, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.43286553025245667, |
|
"learning_rate": 1.9587242026266416e-05, |
|
"loss": 0.03, |
|
"step": 1475 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.4914606809616089, |
|
"learning_rate": 1.95783078709908e-05, |
|
"loss": 0.0253, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.6109514236450195, |
|
"learning_rate": 1.9569373715715182e-05, |
|
"loss": 0.0244, |
|
"step": 1485 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.099238395690918, |
|
"learning_rate": 1.9560439560439563e-05, |
|
"loss": 0.0219, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.672016978263855, |
|
"learning_rate": 1.9551505405163944e-05, |
|
"loss": 0.0156, |
|
"step": 1495 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.9910028576850891, |
|
"learning_rate": 1.9542571249888325e-05, |
|
"loss": 0.025, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"eval_loss": 0.029246093705296516, |
|
"eval_pearson_cosine": 0.8662779121023975, |
|
"eval_pearson_dot": 0.8609972822698294, |
|
"eval_pearson_euclidean": 0.8525960773878268, |
|
"eval_pearson_manhattan": 0.8525524127391249, |
|
"eval_pearson_max": 0.8662779121023975, |
|
"eval_runtime": 425.5831, |
|
"eval_samples_per_second": 1.175, |
|
"eval_spearman_cosine": 0.8936378145512582, |
|
"eval_spearman_dot": 0.8876585266341065, |
|
"eval_spearman_euclidean": 0.891318445273781, |
|
"eval_spearman_manhattan": 0.889811431245725, |
|
"eval_spearman_max": 0.8936378145512582, |
|
"eval_steps_per_second": 1.175, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.7004162073135376, |
|
"learning_rate": 1.9533637094612706e-05, |
|
"loss": 0.0248, |
|
"step": 1505 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.0799355506896973, |
|
"learning_rate": 1.9524702939337087e-05, |
|
"loss": 0.0268, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8577656745910645, |
|
"learning_rate": 1.9515768784061468e-05, |
|
"loss": 0.0363, |
|
"step": 1515 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.811858892440796, |
|
"learning_rate": 1.950683462878585e-05, |
|
"loss": 0.0408, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.2648321390151978, |
|
"learning_rate": 1.949790047351023e-05, |
|
"loss": 0.0273, |
|
"step": 1525 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.6332646012306213, |
|
"learning_rate": 1.948896631823461e-05, |
|
"loss": 0.0234, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.1494219303131104, |
|
"learning_rate": 1.9480032162958992e-05, |
|
"loss": 0.0396, |
|
"step": 1535 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.5537444353103638, |
|
"learning_rate": 1.9471098007683377e-05, |
|
"loss": 0.0392, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 1.7606208324432373, |
|
"learning_rate": 1.9462163852407754e-05, |
|
"loss": 0.0468, |
|
"step": 1545 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.3266965448856354, |
|
"learning_rate": 1.945322969713214e-05, |
|
"loss": 0.0164, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.4910846948623657, |
|
"learning_rate": 1.944429554185652e-05, |
|
"loss": 0.0181, |
|
"step": 1555 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.621263861656189, |
|
"learning_rate": 1.94353613865809e-05, |
|
"loss": 0.021, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.8153178691864014, |
|
"learning_rate": 1.9426427231305282e-05, |
|
"loss": 0.019, |
|
"step": 1565 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9617846012115479, |
|
"learning_rate": 1.9417493076029663e-05, |
|
"loss": 0.0186, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6482746005058289, |
|
"learning_rate": 1.9408558920754044e-05, |
|
"loss": 0.0226, |
|
"step": 1575 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6985549926757812, |
|
"learning_rate": 1.9399624765478425e-05, |
|
"loss": 0.0267, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9519606828689575, |
|
"learning_rate": 1.9390690610202806e-05, |
|
"loss": 0.0187, |
|
"step": 1585 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9981301426887512, |
|
"learning_rate": 1.9381756454927187e-05, |
|
"loss": 0.0195, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.5453444719314575, |
|
"learning_rate": 1.937282229965157e-05, |
|
"loss": 0.0185, |
|
"step": 1595 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.3481849431991577, |
|
"learning_rate": 1.936388814437595e-05, |
|
"loss": 0.0439, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.49602463841438293, |
|
"learning_rate": 1.935495398910033e-05, |
|
"loss": 0.0158, |
|
"step": 1605 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.477524071931839, |
|
"learning_rate": 1.9346019833824715e-05, |
|
"loss": 0.0276, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.8374057412147522, |
|
"learning_rate": 1.9337085678549096e-05, |
|
"loss": 0.0259, |
|
"step": 1615 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9524512887001038, |
|
"learning_rate": 1.9328151523273477e-05, |
|
"loss": 0.0403, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.6074572205543518, |
|
"learning_rate": 1.9319217367997858e-05, |
|
"loss": 0.0217, |
|
"step": 1625 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.7999153137207031, |
|
"learning_rate": 1.931028321272224e-05, |
|
"loss": 0.0234, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.9614180326461792, |
|
"learning_rate": 1.930134905744662e-05, |
|
"loss": 0.0231, |
|
"step": 1635 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.1887145042419434, |
|
"learning_rate": 1.9292414902171e-05, |
|
"loss": 0.0414, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.8795381784439087, |
|
"learning_rate": 1.9283480746895382e-05, |
|
"loss": 0.0208, |
|
"step": 1645 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.3685334920883179, |
|
"learning_rate": 1.9274546591619766e-05, |
|
"loss": 0.054, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7518516778945923, |
|
"learning_rate": 1.9265612436344144e-05, |
|
"loss": 0.0274, |
|
"step": 1655 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.029801607131958, |
|
"learning_rate": 1.9256678281068525e-05, |
|
"loss": 0.0174, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.7995644807815552, |
|
"learning_rate": 1.924774412579291e-05, |
|
"loss": 0.037, |
|
"step": 1665 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 1.5098012685775757, |
|
"learning_rate": 1.923880997051729e-05, |
|
"loss": 0.0216, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 0.5958812832832336, |
|
"learning_rate": 1.9229875815241668e-05, |
|
"loss": 0.0142, |
|
"step": 1675 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.7016432285308838, |
|
"learning_rate": 1.9220941659966053e-05, |
|
"loss": 0.0416, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5232900977134705, |
|
"learning_rate": 1.9212007504690434e-05, |
|
"loss": 0.0264, |
|
"step": 1685 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.4362630248069763, |
|
"learning_rate": 1.9203073349414815e-05, |
|
"loss": 0.022, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.451072335243225, |
|
"learning_rate": 1.9194139194139196e-05, |
|
"loss": 0.0262, |
|
"step": 1695 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5889037251472473, |
|
"learning_rate": 1.9185205038863577e-05, |
|
"loss": 0.0267, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.49965912103652954, |
|
"learning_rate": 1.9176270883587958e-05, |
|
"loss": 0.0159, |
|
"step": 1705 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.920385479927063, |
|
"learning_rate": 1.916733672831234e-05, |
|
"loss": 0.0357, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5249290466308594, |
|
"learning_rate": 1.915840257303672e-05, |
|
"loss": 0.0213, |
|
"step": 1715 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5141476988792419, |
|
"learning_rate": 1.91494684177611e-05, |
|
"loss": 0.0196, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5003548264503479, |
|
"learning_rate": 1.9140534262485485e-05, |
|
"loss": 0.0157, |
|
"step": 1725 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.4535447657108307, |
|
"learning_rate": 1.9131600107209863e-05, |
|
"loss": 0.019, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.4489072263240814, |
|
"learning_rate": 1.9122665951934247e-05, |
|
"loss": 0.0168, |
|
"step": 1735 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5539095997810364, |
|
"learning_rate": 1.911373179665863e-05, |
|
"loss": 0.02, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.3125584125518799, |
|
"learning_rate": 1.910479764138301e-05, |
|
"loss": 0.0202, |
|
"step": 1745 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6378400921821594, |
|
"learning_rate": 1.909586348610739e-05, |
|
"loss": 0.0289, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.118848443031311, |
|
"learning_rate": 1.908692933083177e-05, |
|
"loss": 0.0242, |
|
"step": 1755 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.8065999746322632, |
|
"learning_rate": 1.9077995175556153e-05, |
|
"loss": 0.0277, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9087148904800415, |
|
"learning_rate": 1.9069061020280534e-05, |
|
"loss": 0.0239, |
|
"step": 1765 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.6891593933105469, |
|
"learning_rate": 1.9060126865004915e-05, |
|
"loss": 0.0217, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.7434559464454651, |
|
"learning_rate": 1.9051192709729296e-05, |
|
"loss": 0.0145, |
|
"step": 1775 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.585472047328949, |
|
"learning_rate": 1.904225855445368e-05, |
|
"loss": 0.0287, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 1.561167597770691, |
|
"learning_rate": 1.9033324399178058e-05, |
|
"loss": 0.0206, |
|
"step": 1785 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.9487882852554321, |
|
"learning_rate": 1.902439024390244e-05, |
|
"loss": 0.0212, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.7181324362754822, |
|
"learning_rate": 1.9015456088626823e-05, |
|
"loss": 0.0241, |
|
"step": 1795 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 0.5083106160163879, |
|
"learning_rate": 1.9006521933351204e-05, |
|
"loss": 0.0159, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"eval_loss": 0.030221089720726013, |
|
"eval_pearson_cosine": 0.8857723004603547, |
|
"eval_pearson_dot": 0.874834502431298, |
|
"eval_pearson_euclidean": 0.8749650474368084, |
|
"eval_pearson_manhattan": 0.8746243487782971, |
|
"eval_pearson_max": 0.8857723004603547, |
|
"eval_runtime": 426.1522, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.9000108960435841, |
|
"eval_spearman_dot": 0.8859179436717747, |
|
"eval_spearman_euclidean": 0.9029959319837279, |
|
"eval_spearman_manhattan": 0.9019226476905907, |
|
"eval_spearman_max": 0.9029959319837279, |
|
"eval_steps_per_second": 1.173, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.43535247445106506, |
|
"learning_rate": 1.8997587778075582e-05, |
|
"loss": 0.0153, |
|
"step": 1805 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 2.1544151306152344, |
|
"learning_rate": 1.8988653622799966e-05, |
|
"loss": 0.0241, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.9339686632156372, |
|
"learning_rate": 1.8979719467524347e-05, |
|
"loss": 0.0439, |
|
"step": 1815 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8600350618362427, |
|
"learning_rate": 1.897078531224873e-05, |
|
"loss": 0.0268, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7343426942825317, |
|
"learning_rate": 1.896185115697311e-05, |
|
"loss": 0.0134, |
|
"step": 1825 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.237486481666565, |
|
"learning_rate": 1.895291700169749e-05, |
|
"loss": 0.0387, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8042169213294983, |
|
"learning_rate": 1.8943982846421875e-05, |
|
"loss": 0.0218, |
|
"step": 1835 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5267782211303711, |
|
"learning_rate": 1.8935048691146253e-05, |
|
"loss": 0.0277, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7091891169548035, |
|
"learning_rate": 1.8926114535870634e-05, |
|
"loss": 0.0196, |
|
"step": 1845 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.3862394094467163, |
|
"learning_rate": 1.8917180380595018e-05, |
|
"loss": 0.0173, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8204643726348877, |
|
"learning_rate": 1.89082462253194e-05, |
|
"loss": 0.0174, |
|
"step": 1855 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6407317519187927, |
|
"learning_rate": 1.8899312070043777e-05, |
|
"loss": 0.0152, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.6078154444694519, |
|
"learning_rate": 1.889037791476816e-05, |
|
"loss": 0.0287, |
|
"step": 1865 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8564119338989258, |
|
"learning_rate": 1.8881443759492542e-05, |
|
"loss": 0.0185, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.7499640583992004, |
|
"learning_rate": 1.8872509604216923e-05, |
|
"loss": 0.04, |
|
"step": 1875 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5337907671928406, |
|
"learning_rate": 1.8863575448941304e-05, |
|
"loss": 0.0157, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.525449275970459, |
|
"learning_rate": 1.8854641293665685e-05, |
|
"loss": 0.0252, |
|
"step": 1885 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.069151759147644, |
|
"learning_rate": 1.8845707138390066e-05, |
|
"loss": 0.0261, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5704050660133362, |
|
"learning_rate": 1.8836772983114447e-05, |
|
"loss": 0.0228, |
|
"step": 1895 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.8183658719062805, |
|
"learning_rate": 1.882783882783883e-05, |
|
"loss": 0.0293, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.5661863088607788, |
|
"learning_rate": 1.881890467256321e-05, |
|
"loss": 0.0214, |
|
"step": 1905 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.24864475429058075, |
|
"learning_rate": 1.8809970517287594e-05, |
|
"loss": 0.0169, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.622249960899353, |
|
"learning_rate": 1.880103636201197e-05, |
|
"loss": 0.0267, |
|
"step": 1915 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 1.6661076545715332, |
|
"learning_rate": 1.8792102206736356e-05, |
|
"loss": 0.0287, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 0.737390398979187, |
|
"learning_rate": 1.8783168051460737e-05, |
|
"loss": 0.0179, |
|
"step": 1925 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6081177592277527, |
|
"learning_rate": 1.8774233896185118e-05, |
|
"loss": 0.0194, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.8305076360702515, |
|
"learning_rate": 1.87652997409095e-05, |
|
"loss": 0.0333, |
|
"step": 1935 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7024769186973572, |
|
"learning_rate": 1.875636558563388e-05, |
|
"loss": 0.0272, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7568246722221375, |
|
"learning_rate": 1.874743143035826e-05, |
|
"loss": 0.0182, |
|
"step": 1945 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.3228060007095337, |
|
"learning_rate": 1.8738497275082642e-05, |
|
"loss": 0.0223, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.272178053855896, |
|
"learning_rate": 1.8729563119807023e-05, |
|
"loss": 0.0299, |
|
"step": 1955 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.30096063017845154, |
|
"learning_rate": 1.8720628964531404e-05, |
|
"loss": 0.017, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5879131555557251, |
|
"learning_rate": 1.871169480925579e-05, |
|
"loss": 0.0265, |
|
"step": 1965 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6623579263687134, |
|
"learning_rate": 1.8702760653980166e-05, |
|
"loss": 0.024, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.7402109503746033, |
|
"learning_rate": 1.8693826498704547e-05, |
|
"loss": 0.0248, |
|
"step": 1975 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.46998223662376404, |
|
"learning_rate": 1.8684892343428932e-05, |
|
"loss": 0.0367, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.8414738178253174, |
|
"learning_rate": 1.8675958188153313e-05, |
|
"loss": 0.0352, |
|
"step": 1985 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5672045350074768, |
|
"learning_rate": 1.866702403287769e-05, |
|
"loss": 0.0286, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4115860164165497, |
|
"learning_rate": 1.8658089877602075e-05, |
|
"loss": 0.0235, |
|
"step": 1995 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6932913064956665, |
|
"learning_rate": 1.8649155722326456e-05, |
|
"loss": 0.0128, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.6872785687446594, |
|
"learning_rate": 1.8640221567050837e-05, |
|
"loss": 0.0205, |
|
"step": 2005 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.590305745601654, |
|
"learning_rate": 1.8631287411775218e-05, |
|
"loss": 0.0207, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.5067042708396912, |
|
"learning_rate": 1.86223532564996e-05, |
|
"loss": 0.038, |
|
"step": 2015 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.46255382895469666, |
|
"learning_rate": 1.861341910122398e-05, |
|
"loss": 0.0237, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.4533158540725708, |
|
"learning_rate": 1.860448494594836e-05, |
|
"loss": 0.0197, |
|
"step": 2025 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.919363796710968, |
|
"learning_rate": 1.8595550790672742e-05, |
|
"loss": 0.023, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.42132195830345154, |
|
"learning_rate": 1.8586616635397127e-05, |
|
"loss": 0.0119, |
|
"step": 2035 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.9792714715003967, |
|
"learning_rate": 1.8577682480121508e-05, |
|
"loss": 0.022, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 1.0622044801712036, |
|
"learning_rate": 1.8568748324845885e-05, |
|
"loss": 0.0191, |
|
"step": 2045 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.3776015341281891, |
|
"learning_rate": 1.855981416957027e-05, |
|
"loss": 0.0173, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7663435935974121, |
|
"learning_rate": 1.855088001429465e-05, |
|
"loss": 0.0257, |
|
"step": 2055 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5264787673950195, |
|
"learning_rate": 1.8541945859019032e-05, |
|
"loss": 0.023, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0220805406570435, |
|
"learning_rate": 1.8533011703743413e-05, |
|
"loss": 0.0215, |
|
"step": 2065 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.47214239835739136, |
|
"learning_rate": 1.8524077548467794e-05, |
|
"loss": 0.0368, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.40313026309013367, |
|
"learning_rate": 1.8515143393192175e-05, |
|
"loss": 0.0199, |
|
"step": 2075 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7139260768890381, |
|
"learning_rate": 1.8506209237916556e-05, |
|
"loss": 0.0182, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0911624431610107, |
|
"learning_rate": 1.8497275082640937e-05, |
|
"loss": 0.0215, |
|
"step": 2085 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.5224277973175049, |
|
"learning_rate": 1.8488340927365318e-05, |
|
"loss": 0.0203, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6372708678245544, |
|
"learning_rate": 1.84794067720897e-05, |
|
"loss": 0.0363, |
|
"step": 2095 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0599619150161743, |
|
"learning_rate": 1.847047261681408e-05, |
|
"loss": 0.0343, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"eval_loss": 0.023791618645191193, |
|
"eval_pearson_cosine": 0.8906459396898687, |
|
"eval_pearson_dot": 0.8866654230263398, |
|
"eval_pearson_euclidean": 0.8789258913375184, |
|
"eval_pearson_manhattan": 0.8774504409979331, |
|
"eval_pearson_max": 0.8906459396898687, |
|
"eval_runtime": 425.9801, |
|
"eval_samples_per_second": 1.174, |
|
"eval_spearman_cosine": 0.9069881079524318, |
|
"eval_spearman_dot": 0.8997297109188437, |
|
"eval_spearman_euclidean": 0.9081995367981472, |
|
"eval_spearman_manhattan": 0.90537923751695, |
|
"eval_spearman_max": 0.9081995367981472, |
|
"eval_steps_per_second": 1.174, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0371475219726562, |
|
"learning_rate": 1.8461538461538465e-05, |
|
"loss": 0.0205, |
|
"step": 2105 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7635193467140198, |
|
"learning_rate": 1.8452604306262846e-05, |
|
"loss": 0.0236, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.4593610167503357, |
|
"learning_rate": 1.8443670150987227e-05, |
|
"loss": 0.0223, |
|
"step": 2115 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.9037809371948242, |
|
"learning_rate": 1.8434735995711608e-05, |
|
"loss": 0.0348, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.4961464405059814, |
|
"learning_rate": 1.842580184043599e-05, |
|
"loss": 0.0278, |
|
"step": 2125 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.0911996364593506, |
|
"learning_rate": 1.841686768516037e-05, |
|
"loss": 0.0124, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8265781998634338, |
|
"learning_rate": 1.840793352988475e-05, |
|
"loss": 0.0336, |
|
"step": 2135 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.2838935852050781, |
|
"learning_rate": 1.8398999374609132e-05, |
|
"loss": 0.0271, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8582805395126343, |
|
"learning_rate": 1.8390065219333513e-05, |
|
"loss": 0.0466, |
|
"step": 2145 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 2.2625269889831543, |
|
"learning_rate": 1.8381131064057894e-05, |
|
"loss": 0.0338, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.7238175272941589, |
|
"learning_rate": 1.8372196908782275e-05, |
|
"loss": 0.0149, |
|
"step": 2155 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.4642048478126526, |
|
"learning_rate": 1.8363262753506656e-05, |
|
"loss": 0.017, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.6727991700172424, |
|
"learning_rate": 1.835432859823104e-05, |
|
"loss": 0.0302, |
|
"step": 2165 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 1.1912602186203003, |
|
"learning_rate": 1.834539444295542e-05, |
|
"loss": 0.0244, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 0.8099949955940247, |
|
"learning_rate": 1.83364602876798e-05, |
|
"loss": 0.013, |
|
"step": 2175 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6302483081817627, |
|
"learning_rate": 1.8327526132404184e-05, |
|
"loss": 0.0245, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.44703614711761475, |
|
"learning_rate": 1.8318591977128565e-05, |
|
"loss": 0.0179, |
|
"step": 2185 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4171731472015381, |
|
"learning_rate": 1.8309657821852946e-05, |
|
"loss": 0.0155, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9061782360076904, |
|
"learning_rate": 1.8300723666577327e-05, |
|
"loss": 0.0222, |
|
"step": 2195 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.616051197052002, |
|
"learning_rate": 1.8291789511301708e-05, |
|
"loss": 0.0167, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.4685133695602417, |
|
"learning_rate": 1.828285535602609e-05, |
|
"loss": 0.0259, |
|
"step": 2205 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9447664022445679, |
|
"learning_rate": 1.827392120075047e-05, |
|
"loss": 0.0351, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0720714330673218, |
|
"learning_rate": 1.826498704547485e-05, |
|
"loss": 0.0164, |
|
"step": 2215 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7534575462341309, |
|
"learning_rate": 1.8256052890199235e-05, |
|
"loss": 0.0161, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.8038508892059326, |
|
"learning_rate": 1.8247118734923613e-05, |
|
"loss": 0.0205, |
|
"step": 2225 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.0298678874969482, |
|
"learning_rate": 1.8238184579647994e-05, |
|
"loss": 0.019, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3235474824905396, |
|
"learning_rate": 1.822925042437238e-05, |
|
"loss": 0.0265, |
|
"step": 2235 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.6873222589492798, |
|
"learning_rate": 1.822031626909676e-05, |
|
"loss": 0.0258, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.4005903899669647, |
|
"learning_rate": 1.821138211382114e-05, |
|
"loss": 0.0185, |
|
"step": 2245 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.47836175560951233, |
|
"learning_rate": 1.820244795854552e-05, |
|
"loss": 0.0229, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.653409481048584, |
|
"learning_rate": 1.8193513803269903e-05, |
|
"loss": 0.0155, |
|
"step": 2255 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.2501707077026367, |
|
"learning_rate": 1.8184579647994284e-05, |
|
"loss": 0.0253, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.5146343111991882, |
|
"learning_rate": 1.8175645492718665e-05, |
|
"loss": 0.0355, |
|
"step": 2265 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.7069603204727173, |
|
"learning_rate": 1.8166711337443046e-05, |
|
"loss": 0.0245, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3391192853450775, |
|
"learning_rate": 1.8157777182167427e-05, |
|
"loss": 0.0125, |
|
"step": 2275 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 1.3187044858932495, |
|
"learning_rate": 1.8148843026891808e-05, |
|
"loss": 0.0314, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.3739178478717804, |
|
"learning_rate": 1.813990887161619e-05, |
|
"loss": 0.0254, |
|
"step": 2285 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9314795732498169, |
|
"learning_rate": 1.8130974716340573e-05, |
|
"loss": 0.0264, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9376474022865295, |
|
"learning_rate": 1.8122040561064954e-05, |
|
"loss": 0.0172, |
|
"step": 2295 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 0.9479859471321106, |
|
"learning_rate": 1.8113106405789332e-05, |
|
"loss": 0.0321, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8741356730461121, |
|
"learning_rate": 1.8104172250513716e-05, |
|
"loss": 0.0216, |
|
"step": 2305 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7016437649726868, |
|
"learning_rate": 1.8095238095238097e-05, |
|
"loss": 0.0181, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.6568303108215332, |
|
"learning_rate": 1.808630393996248e-05, |
|
"loss": 0.0374, |
|
"step": 2315 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4468625783920288, |
|
"learning_rate": 1.807736978468686e-05, |
|
"loss": 0.021, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8952460885047913, |
|
"learning_rate": 1.806843562941124e-05, |
|
"loss": 0.0215, |
|
"step": 2325 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5987122654914856, |
|
"learning_rate": 1.805950147413562e-05, |
|
"loss": 0.0182, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8986082077026367, |
|
"learning_rate": 1.8050567318860003e-05, |
|
"loss": 0.0224, |
|
"step": 2335 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4435628652572632, |
|
"learning_rate": 1.8041633163584384e-05, |
|
"loss": 0.0246, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5219196081161499, |
|
"learning_rate": 1.8032699008308765e-05, |
|
"loss": 0.0222, |
|
"step": 2345 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.8987548351287842, |
|
"learning_rate": 1.802376485303315e-05, |
|
"loss": 0.0416, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.3122328817844391, |
|
"learning_rate": 1.8014830697757527e-05, |
|
"loss": 0.0162, |
|
"step": 2355 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.45380455255508423, |
|
"learning_rate": 1.8005896542481908e-05, |
|
"loss": 0.0348, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7789851427078247, |
|
"learning_rate": 1.7996962387206292e-05, |
|
"loss": 0.0207, |
|
"step": 2365 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7289481163024902, |
|
"learning_rate": 1.7988028231930673e-05, |
|
"loss": 0.0154, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.5287164449691772, |
|
"learning_rate": 1.7979094076655054e-05, |
|
"loss": 0.0306, |
|
"step": 2375 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 1.236247181892395, |
|
"learning_rate": 1.7970159921379435e-05, |
|
"loss": 0.0346, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6785498857498169, |
|
"learning_rate": 1.7961225766103816e-05, |
|
"loss": 0.0191, |
|
"step": 2385 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7532624006271362, |
|
"learning_rate": 1.7952291610828197e-05, |
|
"loss": 0.0247, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8474083542823792, |
|
"learning_rate": 1.794335745555258e-05, |
|
"loss": 0.028, |
|
"step": 2395 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.8650068640708923, |
|
"learning_rate": 1.793442330027696e-05, |
|
"loss": 0.0234, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"eval_loss": 0.02186727523803711, |
|
"eval_pearson_cosine": 0.9016899300379176, |
|
"eval_pearson_dot": 0.8979110419976675, |
|
"eval_pearson_euclidean": 0.892515905459987, |
|
"eval_pearson_manhattan": 0.8905747154959989, |
|
"eval_pearson_max": 0.9016899300379176, |
|
"eval_runtime": 426.0444, |
|
"eval_samples_per_second": 1.174, |
|
"eval_spearman_cosine": 0.9066876267505071, |
|
"eval_spearman_dot": 0.9012341329365315, |
|
"eval_spearman_euclidean": 0.9076169104676419, |
|
"eval_spearman_manhattan": 0.9052689330757322, |
|
"eval_spearman_max": 0.9076169104676419, |
|
"eval_steps_per_second": 1.174, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.7486416101455688, |
|
"learning_rate": 1.7925489145001344e-05, |
|
"loss": 0.0158, |
|
"step": 2405 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.20279864966869354, |
|
"learning_rate": 1.791655498972572e-05, |
|
"loss": 0.0141, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.4669971466064453, |
|
"learning_rate": 1.7907620834450103e-05, |
|
"loss": 0.019, |
|
"step": 2415 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.6175631880760193, |
|
"learning_rate": 1.7898686679174487e-05, |
|
"loss": 0.0148, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 0.5767547488212585, |
|
"learning_rate": 1.7889752523898868e-05, |
|
"loss": 0.0252, |
|
"step": 2425 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5371144413948059, |
|
"learning_rate": 1.7880818368623246e-05, |
|
"loss": 0.0121, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6145671606063843, |
|
"learning_rate": 1.787188421334763e-05, |
|
"loss": 0.0195, |
|
"step": 2435 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.262970209121704, |
|
"learning_rate": 1.786295005807201e-05, |
|
"loss": 0.0298, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.3572503328323364, |
|
"learning_rate": 1.7854015902796392e-05, |
|
"loss": 0.0152, |
|
"step": 2445 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6049114465713501, |
|
"learning_rate": 1.7845081747520773e-05, |
|
"loss": 0.0153, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.40164369344711304, |
|
"learning_rate": 1.7836147592245154e-05, |
|
"loss": 0.0171, |
|
"step": 2455 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.33443671464920044, |
|
"learning_rate": 1.7827213436969535e-05, |
|
"loss": 0.0174, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.37140563130378723, |
|
"learning_rate": 1.7818279281693916e-05, |
|
"loss": 0.0147, |
|
"step": 2465 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.44236427545547485, |
|
"learning_rate": 1.7809345126418297e-05, |
|
"loss": 0.019, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.1675889492034912, |
|
"learning_rate": 1.7800410971142682e-05, |
|
"loss": 0.0196, |
|
"step": 2475 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7172742486000061, |
|
"learning_rate": 1.7791476815867063e-05, |
|
"loss": 0.0262, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9804342985153198, |
|
"learning_rate": 1.778254266059144e-05, |
|
"loss": 0.0337, |
|
"step": 2485 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.989030361175537, |
|
"learning_rate": 1.7773608505315825e-05, |
|
"loss": 0.0238, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.754467785358429, |
|
"learning_rate": 1.7764674350040206e-05, |
|
"loss": 0.0224, |
|
"step": 2495 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9083816409111023, |
|
"learning_rate": 1.7755740194764587e-05, |
|
"loss": 0.0165, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.5486024022102356, |
|
"learning_rate": 1.7746806039488968e-05, |
|
"loss": 0.0173, |
|
"step": 2505 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.4576135575771332, |
|
"learning_rate": 1.773787188421335e-05, |
|
"loss": 0.0177, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.9265009760856628, |
|
"learning_rate": 1.772893772893773e-05, |
|
"loss": 0.0216, |
|
"step": 2515 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.506006121635437, |
|
"learning_rate": 1.772000357366211e-05, |
|
"loss": 0.0135, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.2627745270729065, |
|
"learning_rate": 1.7711069418386492e-05, |
|
"loss": 0.0244, |
|
"step": 2525 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.7158712148666382, |
|
"learning_rate": 1.7702135263110873e-05, |
|
"loss": 0.0238, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.8237607479095459, |
|
"learning_rate": 1.7693201107835258e-05, |
|
"loss": 0.0272, |
|
"step": 2535 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.266671359539032, |
|
"learning_rate": 1.7684266952559635e-05, |
|
"loss": 0.0202, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.6004403829574585, |
|
"learning_rate": 1.7675332797284016e-05, |
|
"loss": 0.0414, |
|
"step": 2545 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.9549585580825806, |
|
"learning_rate": 1.76663986420084e-05, |
|
"loss": 0.0209, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.9587855935096741, |
|
"learning_rate": 1.7657464486732782e-05, |
|
"loss": 0.0239, |
|
"step": 2555 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5804222226142883, |
|
"learning_rate": 1.7648530331457163e-05, |
|
"loss": 0.0183, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.506087601184845, |
|
"learning_rate": 1.7639596176181544e-05, |
|
"loss": 0.0202, |
|
"step": 2565 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.8496732711791992, |
|
"learning_rate": 1.7630662020905925e-05, |
|
"loss": 0.0181, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3317871391773224, |
|
"learning_rate": 1.7621727865630306e-05, |
|
"loss": 0.0218, |
|
"step": 2575 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.3938535451889038, |
|
"learning_rate": 1.7612793710354687e-05, |
|
"loss": 0.0112, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.3293476104736328, |
|
"learning_rate": 1.7603859555079068e-05, |
|
"loss": 0.0181, |
|
"step": 2585 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.28195351362228394, |
|
"learning_rate": 1.7594925399803452e-05, |
|
"loss": 0.0135, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.7966617941856384, |
|
"learning_rate": 1.758599124452783e-05, |
|
"loss": 0.0145, |
|
"step": 2595 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5484746694564819, |
|
"learning_rate": 1.757705708925221e-05, |
|
"loss": 0.0207, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.576448380947113, |
|
"learning_rate": 1.7568122933976596e-05, |
|
"loss": 0.0127, |
|
"step": 2605 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.28207531571388245, |
|
"learning_rate": 1.7559188778700977e-05, |
|
"loss": 0.0132, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.29697030782699585, |
|
"learning_rate": 1.7550254623425354e-05, |
|
"loss": 0.0127, |
|
"step": 2615 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.122375249862671, |
|
"learning_rate": 1.754132046814974e-05, |
|
"loss": 0.0297, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5798119902610779, |
|
"learning_rate": 1.753238631287412e-05, |
|
"loss": 0.0171, |
|
"step": 2625 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.6546443104743958, |
|
"learning_rate": 1.75234521575985e-05, |
|
"loss": 0.0436, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.8825979828834534, |
|
"learning_rate": 1.7514518002322882e-05, |
|
"loss": 0.0138, |
|
"step": 2635 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.46849188208580017, |
|
"learning_rate": 1.7505583847047263e-05, |
|
"loss": 0.0328, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 1.324487566947937, |
|
"learning_rate": 1.7496649691771644e-05, |
|
"loss": 0.0241, |
|
"step": 2645 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.5214132070541382, |
|
"learning_rate": 1.7487715536496025e-05, |
|
"loss": 0.0344, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.791459858417511, |
|
"learning_rate": 1.7478781381220406e-05, |
|
"loss": 0.0167, |
|
"step": 2655 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.888538658618927, |
|
"learning_rate": 1.746984722594479e-05, |
|
"loss": 0.0216, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.31034013628959656, |
|
"learning_rate": 1.746091307066917e-05, |
|
"loss": 0.0327, |
|
"step": 2665 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 0.4363711476325989, |
|
"learning_rate": 1.745197891539355e-05, |
|
"loss": 0.0203, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0624545812606812, |
|
"learning_rate": 1.7443044760117934e-05, |
|
"loss": 0.0216, |
|
"step": 2675 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.4411804676055908, |
|
"learning_rate": 1.7434110604842315e-05, |
|
"loss": 0.0294, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.7424313426017761, |
|
"learning_rate": 1.7425176449566696e-05, |
|
"loss": 0.0233, |
|
"step": 2685 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6678118705749512, |
|
"learning_rate": 1.7416242294291077e-05, |
|
"loss": 0.0129, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.43497857451438904, |
|
"learning_rate": 1.7407308139015458e-05, |
|
"loss": 0.0272, |
|
"step": 2695 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3903897702693939, |
|
"learning_rate": 1.739837398373984e-05, |
|
"loss": 0.0209, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"eval_loss": 0.027375079691410065, |
|
"eval_pearson_cosine": 0.9001051843718942, |
|
"eval_pearson_dot": 0.9006651139179915, |
|
"eval_pearson_euclidean": 0.8930651682478097, |
|
"eval_pearson_manhattan": 0.891765654050478, |
|
"eval_pearson_max": 0.9006651139179915, |
|
"eval_runtime": 426.2183, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.9177515750063, |
|
"eval_spearman_dot": 0.9167129468517873, |
|
"eval_spearman_euclidean": 0.9167275389101556, |
|
"eval_spearman_manhattan": 0.9159309277237108, |
|
"eval_spearman_max": 0.9177515750063, |
|
"eval_steps_per_second": 1.173, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.3496129512786865, |
|
"learning_rate": 1.738943982846422e-05, |
|
"loss": 0.0259, |
|
"step": 2705 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.1606944799423218, |
|
"learning_rate": 1.73805056731886e-05, |
|
"loss": 0.0181, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5323413610458374, |
|
"learning_rate": 1.7371571517912982e-05, |
|
"loss": 0.0137, |
|
"step": 2715 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5756454467773438, |
|
"learning_rate": 1.7362637362637363e-05, |
|
"loss": 0.0181, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3727392256259918, |
|
"learning_rate": 1.7353703207361744e-05, |
|
"loss": 0.0123, |
|
"step": 2725 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.441544771194458, |
|
"learning_rate": 1.7344769052086125e-05, |
|
"loss": 0.0157, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6806672811508179, |
|
"learning_rate": 1.733583489681051e-05, |
|
"loss": 0.0196, |
|
"step": 2735 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5537866353988647, |
|
"learning_rate": 1.732690074153489e-05, |
|
"loss": 0.0163, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5595599412918091, |
|
"learning_rate": 1.731796658625927e-05, |
|
"loss": 0.0229, |
|
"step": 2745 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.48301759362220764, |
|
"learning_rate": 1.7309032430983652e-05, |
|
"loss": 0.0197, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2349361181259155, |
|
"learning_rate": 1.7300098275708034e-05, |
|
"loss": 0.0198, |
|
"step": 2755 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.3702278137207031, |
|
"learning_rate": 1.7291164120432415e-05, |
|
"loss": 0.0165, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.5885152220726013, |
|
"learning_rate": 1.7282229965156796e-05, |
|
"loss": 0.0126, |
|
"step": 2765 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.2845672369003296, |
|
"learning_rate": 1.7273295809881177e-05, |
|
"loss": 0.0232, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 1.0059493780136108, |
|
"learning_rate": 1.7264361654605558e-05, |
|
"loss": 0.0102, |
|
"step": 2775 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.621356189250946, |
|
"learning_rate": 1.725542749932994e-05, |
|
"loss": 0.0149, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.8701572418212891, |
|
"learning_rate": 1.724649334405432e-05, |
|
"loss": 0.0147, |
|
"step": 2785 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.35073745250701904, |
|
"learning_rate": 1.7237559188778704e-05, |
|
"loss": 0.0176, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"grad_norm": 0.6895755529403687, |
|
"learning_rate": 1.7228625033503085e-05, |
|
"loss": 0.0212, |
|
"step": 2795 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9424052834510803, |
|
"learning_rate": 1.7219690878227463e-05, |
|
"loss": 0.0213, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.45782536268234253, |
|
"learning_rate": 1.7210756722951847e-05, |
|
"loss": 0.0122, |
|
"step": 2805 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9347144365310669, |
|
"learning_rate": 1.720182256767623e-05, |
|
"loss": 0.0221, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4957163333892822, |
|
"learning_rate": 1.719288841240061e-05, |
|
"loss": 0.0154, |
|
"step": 2815 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.40040886402130127, |
|
"learning_rate": 1.718395425712499e-05, |
|
"loss": 0.018, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4707423150539398, |
|
"learning_rate": 1.717502010184937e-05, |
|
"loss": 0.0122, |
|
"step": 2825 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.6660599708557129, |
|
"learning_rate": 1.7166085946573752e-05, |
|
"loss": 0.0103, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5606631636619568, |
|
"learning_rate": 1.7157151791298134e-05, |
|
"loss": 0.0246, |
|
"step": 2835 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.0515074729919434, |
|
"learning_rate": 1.7148217636022515e-05, |
|
"loss": 0.0173, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8896968364715576, |
|
"learning_rate": 1.71392834807469e-05, |
|
"loss": 0.0357, |
|
"step": 2845 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.8183419108390808, |
|
"learning_rate": 1.7130349325471277e-05, |
|
"loss": 0.0125, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.28626278042793274, |
|
"learning_rate": 1.7121415170195658e-05, |
|
"loss": 0.011, |
|
"step": 2855 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.5537832975387573, |
|
"learning_rate": 1.7112481014920042e-05, |
|
"loss": 0.0155, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3938451111316681, |
|
"learning_rate": 1.7103546859644423e-05, |
|
"loss": 0.0163, |
|
"step": 2865 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.27609241008758545, |
|
"learning_rate": 1.7094612704368804e-05, |
|
"loss": 0.0193, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.3002891540527344, |
|
"learning_rate": 1.7085678549093185e-05, |
|
"loss": 0.0142, |
|
"step": 2875 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.8881809711456299, |
|
"learning_rate": 1.7076744393817566e-05, |
|
"loss": 0.0296, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.99460768699646, |
|
"learning_rate": 1.7067810238541947e-05, |
|
"loss": 0.0157, |
|
"step": 2885 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 1.14095938205719, |
|
"learning_rate": 1.705887608326633e-05, |
|
"loss": 0.0164, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.2840583026409149, |
|
"learning_rate": 1.704994192799071e-05, |
|
"loss": 0.0139, |
|
"step": 2895 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3837971091270447, |
|
"learning_rate": 1.704100777271509e-05, |
|
"loss": 0.016, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.4202588200569153, |
|
"learning_rate": 1.703207361743947e-05, |
|
"loss": 0.0161, |
|
"step": 2905 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.9836485385894775, |
|
"learning_rate": 1.7023139462163853e-05, |
|
"loss": 0.0336, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.28357017040252686, |
|
"learning_rate": 1.7014205306888234e-05, |
|
"loss": 0.0151, |
|
"step": 2915 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"grad_norm": 0.3464276194572449, |
|
"learning_rate": 1.7005271151612618e-05, |
|
"loss": 0.0151, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6251973509788513, |
|
"learning_rate": 1.6996336996336996e-05, |
|
"loss": 0.0275, |
|
"step": 2925 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 2.1420838832855225, |
|
"learning_rate": 1.698740284106138e-05, |
|
"loss": 0.0339, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.43490684032440186, |
|
"learning_rate": 1.697846868578576e-05, |
|
"loss": 0.0124, |
|
"step": 2935 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.576763391494751, |
|
"learning_rate": 1.6969534530510142e-05, |
|
"loss": 0.0162, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.8549007773399353, |
|
"learning_rate": 1.6960600375234523e-05, |
|
"loss": 0.0217, |
|
"step": 2945 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3743274509906769, |
|
"learning_rate": 1.6951666219958904e-05, |
|
"loss": 0.0165, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7152208089828491, |
|
"learning_rate": 1.6942732064683285e-05, |
|
"loss": 0.0158, |
|
"step": 2955 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5613319873809814, |
|
"learning_rate": 1.6933797909407666e-05, |
|
"loss": 0.0232, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.3946327269077301, |
|
"learning_rate": 1.6924863754132047e-05, |
|
"loss": 0.0156, |
|
"step": 2965 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5846483707427979, |
|
"learning_rate": 1.691592959885643e-05, |
|
"loss": 0.0118, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6644983291625977, |
|
"learning_rate": 1.6906995443580813e-05, |
|
"loss": 0.0156, |
|
"step": 2975 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7113365530967712, |
|
"learning_rate": 1.689806128830519e-05, |
|
"loss": 0.0131, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7063427567481995, |
|
"learning_rate": 1.688912713302957e-05, |
|
"loss": 0.0196, |
|
"step": 2985 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.8692206740379333, |
|
"learning_rate": 1.6880192977753956e-05, |
|
"loss": 0.02, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6556200385093689, |
|
"learning_rate": 1.6871258822478337e-05, |
|
"loss": 0.0273, |
|
"step": 2995 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9636979103088379, |
|
"learning_rate": 1.6862324667202718e-05, |
|
"loss": 0.0192, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"eval_loss": 0.0198129303753376, |
|
"eval_pearson_cosine": 0.9064648118738065, |
|
"eval_pearson_dot": 0.9033815480817863, |
|
"eval_pearson_euclidean": 0.8939116848670458, |
|
"eval_pearson_manhattan": 0.8927147647429707, |
|
"eval_pearson_max": 0.9064648118738065, |
|
"eval_runtime": 426.1029, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.9222772251089003, |
|
"eval_spearman_dot": 0.9168591554366217, |
|
"eval_spearman_euclidean": 0.9222438169752679, |
|
"eval_spearman_manhattan": 0.9220269521078085, |
|
"eval_spearman_max": 0.9222772251089003, |
|
"eval_steps_per_second": 1.173, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.5286144614219666, |
|
"learning_rate": 1.68533905119271e-05, |
|
"loss": 0.0228, |
|
"step": 3005 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7542079091072083, |
|
"learning_rate": 1.684445635665148e-05, |
|
"loss": 0.0156, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6064092516899109, |
|
"learning_rate": 1.683552220137586e-05, |
|
"loss": 0.0258, |
|
"step": 3015 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.7690675258636475, |
|
"learning_rate": 1.6826588046100242e-05, |
|
"loss": 0.0201, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.4456731975078583, |
|
"learning_rate": 1.6817653890824623e-05, |
|
"loss": 0.0159, |
|
"step": 3025 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.6881704330444336, |
|
"learning_rate": 1.6808719735549008e-05, |
|
"loss": 0.0191, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.515715479850769, |
|
"learning_rate": 1.6799785580273385e-05, |
|
"loss": 0.0272, |
|
"step": 3035 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9520334601402283, |
|
"learning_rate": 1.6790851424997766e-05, |
|
"loss": 0.0164, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 1.4020353555679321, |
|
"learning_rate": 1.678191726972215e-05, |
|
"loss": 0.0224, |
|
"step": 3045 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.868922472000122, |
|
"learning_rate": 1.6772983114446532e-05, |
|
"loss": 0.0343, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5014618635177612, |
|
"learning_rate": 1.676404895917091e-05, |
|
"loss": 0.0318, |
|
"step": 3055 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.820292592048645, |
|
"learning_rate": 1.6755114803895294e-05, |
|
"loss": 0.0264, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6107310056686401, |
|
"learning_rate": 1.6746180648619675e-05, |
|
"loss": 0.0188, |
|
"step": 3065 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.3835718631744385, |
|
"learning_rate": 1.6737246493344056e-05, |
|
"loss": 0.012, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.46898066997528076, |
|
"learning_rate": 1.6728312338068437e-05, |
|
"loss": 0.0333, |
|
"step": 3075 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6556499004364014, |
|
"learning_rate": 1.6719378182792818e-05, |
|
"loss": 0.0184, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8477081060409546, |
|
"learning_rate": 1.67104440275172e-05, |
|
"loss": 0.0327, |
|
"step": 3085 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6761387586593628, |
|
"learning_rate": 1.670150987224158e-05, |
|
"loss": 0.0178, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.4892427623271942, |
|
"learning_rate": 1.669257571696596e-05, |
|
"loss": 0.0266, |
|
"step": 3095 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7155781388282776, |
|
"learning_rate": 1.6683641561690342e-05, |
|
"loss": 0.0242, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.25152644515037537, |
|
"learning_rate": 1.6674707406414727e-05, |
|
"loss": 0.0102, |
|
"step": 3105 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8297644853591919, |
|
"learning_rate": 1.6665773251139104e-05, |
|
"loss": 0.0209, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.36103129386901855, |
|
"learning_rate": 1.665683909586349e-05, |
|
"loss": 0.0166, |
|
"step": 3115 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.8184224367141724, |
|
"learning_rate": 1.664790494058787e-05, |
|
"loss": 0.0215, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5235499739646912, |
|
"learning_rate": 1.663897078531225e-05, |
|
"loss": 0.0223, |
|
"step": 3125 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6001405715942383, |
|
"learning_rate": 1.6630036630036632e-05, |
|
"loss": 0.0177, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5021240711212158, |
|
"learning_rate": 1.6621102474761013e-05, |
|
"loss": 0.0199, |
|
"step": 3135 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.2629213035106659, |
|
"learning_rate": 1.6612168319485394e-05, |
|
"loss": 0.0375, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.6333101987838745, |
|
"learning_rate": 1.6603234164209775e-05, |
|
"loss": 0.0138, |
|
"step": 3145 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5239858627319336, |
|
"learning_rate": 1.6594300008934156e-05, |
|
"loss": 0.0211, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5263399481773376, |
|
"learning_rate": 1.6585365853658537e-05, |
|
"loss": 0.0281, |
|
"step": 3155 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.7547926902770996, |
|
"learning_rate": 1.657643169838292e-05, |
|
"loss": 0.0325, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 1.098939061164856, |
|
"learning_rate": 1.65674975431073e-05, |
|
"loss": 0.0136, |
|
"step": 3165 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"grad_norm": 0.5231286883354187, |
|
"learning_rate": 1.655856338783168e-05, |
|
"loss": 0.0241, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6786376237869263, |
|
"learning_rate": 1.6549629232556064e-05, |
|
"loss": 0.0264, |
|
"step": 3175 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.402637243270874, |
|
"learning_rate": 1.6540695077280446e-05, |
|
"loss": 0.0214, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.169063687324524, |
|
"learning_rate": 1.6531760922004823e-05, |
|
"loss": 0.038, |
|
"step": 3185 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9597774147987366, |
|
"learning_rate": 1.6522826766729208e-05, |
|
"loss": 0.0289, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5611427426338196, |
|
"learning_rate": 1.651389261145359e-05, |
|
"loss": 0.0155, |
|
"step": 3195 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9891091585159302, |
|
"learning_rate": 1.650495845617797e-05, |
|
"loss": 0.0261, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.445168137550354, |
|
"learning_rate": 1.649602430090235e-05, |
|
"loss": 0.0192, |
|
"step": 3205 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 1.0663169622421265, |
|
"learning_rate": 1.6487090145626732e-05, |
|
"loss": 0.0252, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9698647856712341, |
|
"learning_rate": 1.6478155990351116e-05, |
|
"loss": 0.0223, |
|
"step": 3215 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3728877305984497, |
|
"learning_rate": 1.6469221835075494e-05, |
|
"loss": 0.0194, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.654509961605072, |
|
"learning_rate": 1.6460287679799875e-05, |
|
"loss": 0.0255, |
|
"step": 3225 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.8245360851287842, |
|
"learning_rate": 1.645135352452426e-05, |
|
"loss": 0.0151, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6417776942253113, |
|
"learning_rate": 1.644241936924864e-05, |
|
"loss": 0.0153, |
|
"step": 3235 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.23088638484477997, |
|
"learning_rate": 1.6433485213973018e-05, |
|
"loss": 0.0258, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.4798796474933624, |
|
"learning_rate": 1.6424551058697402e-05, |
|
"loss": 0.0152, |
|
"step": 3245 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.42295366525650024, |
|
"learning_rate": 1.6415616903421783e-05, |
|
"loss": 0.0153, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.3053552210330963, |
|
"learning_rate": 1.6406682748146165e-05, |
|
"loss": 0.0098, |
|
"step": 3255 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.37020963430404663, |
|
"learning_rate": 1.6397748592870546e-05, |
|
"loss": 0.0151, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5717061758041382, |
|
"learning_rate": 1.6388814437594927e-05, |
|
"loss": 0.0157, |
|
"step": 3265 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5504093766212463, |
|
"learning_rate": 1.6379880282319308e-05, |
|
"loss": 0.0397, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.6604989767074585, |
|
"learning_rate": 1.637094612704369e-05, |
|
"loss": 0.0158, |
|
"step": 3275 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.7319514155387878, |
|
"learning_rate": 1.636201197176807e-05, |
|
"loss": 0.0118, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.5383831262588501, |
|
"learning_rate": 1.6353077816492454e-05, |
|
"loss": 0.0196, |
|
"step": 3285 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.9546700716018677, |
|
"learning_rate": 1.6344143661216835e-05, |
|
"loss": 0.0204, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"grad_norm": 0.929811954498291, |
|
"learning_rate": 1.6335209505941213e-05, |
|
"loss": 0.0286, |
|
"step": 3295 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.545769453048706, |
|
"learning_rate": 1.6326275350665597e-05, |
|
"loss": 0.0178, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"eval_loss": 0.02022329904139042, |
|
"eval_pearson_cosine": 0.9200674333472122, |
|
"eval_pearson_dot": 0.9176731158869308, |
|
"eval_pearson_euclidean": 0.9061361146439217, |
|
"eval_pearson_manhattan": 0.9047424977345989, |
|
"eval_pearson_max": 0.9200674333472122, |
|
"eval_runtime": 426.1218, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.9299045516182064, |
|
"eval_spearman_dot": 0.9254067416269663, |
|
"eval_spearman_euclidean": 0.9302610970443881, |
|
"eval_spearman_manhattan": 0.9284666578666315, |
|
"eval_spearman_max": 0.9302610970443881, |
|
"eval_steps_per_second": 1.173, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.5426648259162903, |
|
"learning_rate": 1.6317341195389978e-05, |
|
"loss": 0.0123, |
|
"step": 3305 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4093814492225647, |
|
"learning_rate": 1.630840704011436e-05, |
|
"loss": 0.0176, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9546383619308472, |
|
"learning_rate": 1.629947288483874e-05, |
|
"loss": 0.0195, |
|
"step": 3315 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6893852949142456, |
|
"learning_rate": 1.629053872956312e-05, |
|
"loss": 0.0253, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3756393790245056, |
|
"learning_rate": 1.6281604574287502e-05, |
|
"loss": 0.0106, |
|
"step": 3325 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.563850998878479, |
|
"learning_rate": 1.6272670419011883e-05, |
|
"loss": 0.017, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.3767915964126587, |
|
"learning_rate": 1.6263736263736265e-05, |
|
"loss": 0.0231, |
|
"step": 3335 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3786735236644745, |
|
"learning_rate": 1.6254802108460646e-05, |
|
"loss": 0.0121, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.2710884213447571, |
|
"learning_rate": 1.624586795318503e-05, |
|
"loss": 0.0121, |
|
"step": 3345 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4059496819972992, |
|
"learning_rate": 1.6236933797909408e-05, |
|
"loss": 0.0138, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.5236977338790894, |
|
"learning_rate": 1.622799964263379e-05, |
|
"loss": 0.023, |
|
"step": 3355 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.2646647691726685, |
|
"learning_rate": 1.6219065487358173e-05, |
|
"loss": 0.026, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.5769827365875244, |
|
"learning_rate": 1.6210131332082554e-05, |
|
"loss": 0.0145, |
|
"step": 3365 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.8552572727203369, |
|
"learning_rate": 1.6201197176806932e-05, |
|
"loss": 0.0132, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.690263271331787, |
|
"learning_rate": 1.6192263021531316e-05, |
|
"loss": 0.04, |
|
"step": 3375 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.6928017735481262, |
|
"learning_rate": 1.6183328866255697e-05, |
|
"loss": 0.0184, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.31537023186683655, |
|
"learning_rate": 1.6174394710980078e-05, |
|
"loss": 0.0156, |
|
"step": 3385 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.826603889465332, |
|
"learning_rate": 1.616546055570446e-05, |
|
"loss": 0.0253, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 1.0481098890304565, |
|
"learning_rate": 1.615652640042884e-05, |
|
"loss": 0.019, |
|
"step": 3395 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.719532310962677, |
|
"learning_rate": 1.614759224515322e-05, |
|
"loss": 0.0152, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.7920011281967163, |
|
"learning_rate": 1.6138658089877602e-05, |
|
"loss": 0.0142, |
|
"step": 3405 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.9310560822486877, |
|
"learning_rate": 1.6129723934601983e-05, |
|
"loss": 0.0187, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.4473055303096771, |
|
"learning_rate": 1.6120789779326368e-05, |
|
"loss": 0.0317, |
|
"step": 3415 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"grad_norm": 0.3835084140300751, |
|
"learning_rate": 1.611185562405075e-05, |
|
"loss": 0.0143, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6586607098579407, |
|
"learning_rate": 1.6102921468775127e-05, |
|
"loss": 0.027, |
|
"step": 3425 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.510007917881012, |
|
"learning_rate": 1.609398731349951e-05, |
|
"loss": 0.0213, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.7229037284851074, |
|
"learning_rate": 1.6085053158223892e-05, |
|
"loss": 0.014, |
|
"step": 3435 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.35988929867744446, |
|
"learning_rate": 1.6076119002948273e-05, |
|
"loss": 0.0166, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.9715976119041443, |
|
"learning_rate": 1.6067184847672654e-05, |
|
"loss": 0.0202, |
|
"step": 3445 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5804651975631714, |
|
"learning_rate": 1.6058250692397035e-05, |
|
"loss": 0.0177, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6329138278961182, |
|
"learning_rate": 1.6049316537121416e-05, |
|
"loss": 0.0184, |
|
"step": 3455 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.46104130148887634, |
|
"learning_rate": 1.6040382381845797e-05, |
|
"loss": 0.0208, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.040407657623291, |
|
"learning_rate": 1.6031448226570178e-05, |
|
"loss": 0.0281, |
|
"step": 3465 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.7342686057090759, |
|
"learning_rate": 1.6022514071294563e-05, |
|
"loss": 0.0163, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.7275007367134094, |
|
"learning_rate": 1.601357991601894e-05, |
|
"loss": 0.0193, |
|
"step": 3475 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.30385661125183105, |
|
"learning_rate": 1.600464576074332e-05, |
|
"loss": 0.0153, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.5035312175750732, |
|
"learning_rate": 1.5995711605467706e-05, |
|
"loss": 0.0189, |
|
"step": 3485 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.2530502676963806, |
|
"learning_rate": 1.5986777450192087e-05, |
|
"loss": 0.017, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.068692922592163, |
|
"learning_rate": 1.5977843294916468e-05, |
|
"loss": 0.0178, |
|
"step": 3495 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.40073007345199585, |
|
"learning_rate": 1.596890913964085e-05, |
|
"loss": 0.0165, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 6.783413887023926, |
|
"learning_rate": 1.595997498436523e-05, |
|
"loss": 0.0235, |
|
"step": 3505 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.46688321232795715, |
|
"learning_rate": 1.595104082908961e-05, |
|
"loss": 0.0188, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.6832770705223083, |
|
"learning_rate": 1.5942106673813992e-05, |
|
"loss": 0.0202, |
|
"step": 3515 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.39423632621765137, |
|
"learning_rate": 1.5933172518538373e-05, |
|
"loss": 0.0118, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8265882730484009, |
|
"learning_rate": 1.5924238363262754e-05, |
|
"loss": 0.0175, |
|
"step": 3525 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.8301231265068054, |
|
"learning_rate": 1.5915304207987135e-05, |
|
"loss": 0.0396, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 0.45745956897735596, |
|
"learning_rate": 1.5906370052711516e-05, |
|
"loss": 0.0195, |
|
"step": 3535 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.464941382408142, |
|
"learning_rate": 1.5897435897435897e-05, |
|
"loss": 0.0207, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7965016961097717, |
|
"learning_rate": 1.588850174216028e-05, |
|
"loss": 0.0178, |
|
"step": 3545 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6353152394294739, |
|
"learning_rate": 1.5879567586884663e-05, |
|
"loss": 0.019, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.38456305861473083, |
|
"learning_rate": 1.587063343160904e-05, |
|
"loss": 0.0169, |
|
"step": 3555 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.2627872228622437, |
|
"learning_rate": 1.5861699276333425e-05, |
|
"loss": 0.0226, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5265514254570007, |
|
"learning_rate": 1.5852765121057806e-05, |
|
"loss": 0.0141, |
|
"step": 3565 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5938783288002014, |
|
"learning_rate": 1.5843830965782187e-05, |
|
"loss": 0.0178, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.39797863364219666, |
|
"learning_rate": 1.5834896810506568e-05, |
|
"loss": 0.0123, |
|
"step": 3575 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6035088300704956, |
|
"learning_rate": 1.582596265523095e-05, |
|
"loss": 0.0162, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.34465670585632324, |
|
"learning_rate": 1.581702849995533e-05, |
|
"loss": 0.0135, |
|
"step": 3585 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.40939584374427795, |
|
"learning_rate": 1.580809434467971e-05, |
|
"loss": 0.0123, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4972923994064331, |
|
"learning_rate": 1.5799160189404092e-05, |
|
"loss": 0.0178, |
|
"step": 3595 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.3403087556362152, |
|
"learning_rate": 1.5790226034128477e-05, |
|
"loss": 0.0104, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"eval_loss": 0.01895288936793804, |
|
"eval_pearson_cosine": 0.9183723675584331, |
|
"eval_pearson_dot": 0.9165141926322535, |
|
"eval_pearson_euclidean": 0.9111139441737433, |
|
"eval_pearson_manhattan": 0.9105498844088082, |
|
"eval_pearson_max": 0.9183723675584331, |
|
"eval_runtime": 425.5624, |
|
"eval_samples_per_second": 1.175, |
|
"eval_spearman_cosine": 0.925627350509402, |
|
"eval_spearman_dot": 0.9219613838455353, |
|
"eval_spearman_euclidean": 0.9258841515366061, |
|
"eval_spearman_manhattan": 0.9260336241344965, |
|
"eval_spearman_max": 0.9260336241344965, |
|
"eval_steps_per_second": 1.175, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.2765352129936218, |
|
"learning_rate": 1.5781291878852854e-05, |
|
"loss": 0.0154, |
|
"step": 3605 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 3.1084377765655518, |
|
"learning_rate": 1.5772357723577235e-05, |
|
"loss": 0.0214, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.42401209473609924, |
|
"learning_rate": 1.576342356830162e-05, |
|
"loss": 0.0111, |
|
"step": 3615 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.4498884081840515, |
|
"learning_rate": 1.5754489413026e-05, |
|
"loss": 0.0144, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.7784323692321777, |
|
"learning_rate": 1.574555525775038e-05, |
|
"loss": 0.0164, |
|
"step": 3625 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.792809247970581, |
|
"learning_rate": 1.5736621102474763e-05, |
|
"loss": 0.0226, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5559731721878052, |
|
"learning_rate": 1.5727686947199144e-05, |
|
"loss": 0.0157, |
|
"step": 3635 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5122278928756714, |
|
"learning_rate": 1.5718752791923525e-05, |
|
"loss": 0.0114, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.579097330570221, |
|
"learning_rate": 1.5709818636647906e-05, |
|
"loss": 0.0176, |
|
"step": 3645 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.5125916004180908, |
|
"learning_rate": 1.5700884481372287e-05, |
|
"loss": 0.0112, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.6206502318382263, |
|
"learning_rate": 1.569195032609667e-05, |
|
"loss": 0.0129, |
|
"step": 3655 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 0.578705906867981, |
|
"learning_rate": 1.568301617082105e-05, |
|
"loss": 0.0133, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"grad_norm": 1.273260235786438, |
|
"learning_rate": 1.567408201554543e-05, |
|
"loss": 0.0238, |
|
"step": 3665 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6316016912460327, |
|
"learning_rate": 1.5665147860269814e-05, |
|
"loss": 0.0107, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6251145005226135, |
|
"learning_rate": 1.5656213704994195e-05, |
|
"loss": 0.0115, |
|
"step": 3675 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.48484379053115845, |
|
"learning_rate": 1.5647279549718573e-05, |
|
"loss": 0.0171, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4522140920162201, |
|
"learning_rate": 1.5638345394442958e-05, |
|
"loss": 0.0222, |
|
"step": 3685 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6267346739768982, |
|
"learning_rate": 1.562941123916734e-05, |
|
"loss": 0.0214, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.9123010635375977, |
|
"learning_rate": 1.562047708389172e-05, |
|
"loss": 0.0214, |
|
"step": 3695 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2564802169799805, |
|
"learning_rate": 1.56115429286161e-05, |
|
"loss": 0.0209, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4140920341014862, |
|
"learning_rate": 1.5602608773340482e-05, |
|
"loss": 0.0194, |
|
"step": 3705 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2953025102615356, |
|
"learning_rate": 1.5593674618064863e-05, |
|
"loss": 0.0175, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.19866344332695007, |
|
"learning_rate": 1.5584740462789244e-05, |
|
"loss": 0.017, |
|
"step": 3715 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.5271120071411133, |
|
"learning_rate": 1.5575806307513625e-05, |
|
"loss": 0.0137, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6619897484779358, |
|
"learning_rate": 1.5566872152238006e-05, |
|
"loss": 0.0184, |
|
"step": 3725 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.7720787525177002, |
|
"learning_rate": 1.555793799696239e-05, |
|
"loss": 0.0198, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.2014611959457397, |
|
"learning_rate": 1.5549003841686768e-05, |
|
"loss": 0.0176, |
|
"step": 3735 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.42123496532440186, |
|
"learning_rate": 1.554006968641115e-05, |
|
"loss": 0.0136, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 2.8967819213867188, |
|
"learning_rate": 1.5531135531135533e-05, |
|
"loss": 0.0202, |
|
"step": 3745 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.4060193598270416, |
|
"learning_rate": 1.5522201375859914e-05, |
|
"loss": 0.0183, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6353384256362915, |
|
"learning_rate": 1.5513267220584295e-05, |
|
"loss": 0.0141, |
|
"step": 3755 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.3105901777744293, |
|
"learning_rate": 1.5504333065308677e-05, |
|
"loss": 0.0123, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.33913782238960266, |
|
"learning_rate": 1.5495398910033058e-05, |
|
"loss": 0.0176, |
|
"step": 3765 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.664908766746521, |
|
"learning_rate": 1.548646475475744e-05, |
|
"loss": 0.0135, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.7868657112121582, |
|
"learning_rate": 1.547753059948182e-05, |
|
"loss": 0.0122, |
|
"step": 3775 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.6869320869445801, |
|
"learning_rate": 1.54685964442062e-05, |
|
"loss": 0.0135, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.1669894456863403, |
|
"learning_rate": 1.5459662288930585e-05, |
|
"loss": 0.024, |
|
"step": 3785 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 0.32140159606933594, |
|
"learning_rate": 1.5450728133654963e-05, |
|
"loss": 0.0109, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5477967262268066, |
|
"learning_rate": 1.5441793978379344e-05, |
|
"loss": 0.0132, |
|
"step": 3795 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2863134741783142, |
|
"learning_rate": 1.5432859823103728e-05, |
|
"loss": 0.0225, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.7895782589912415, |
|
"learning_rate": 1.542392566782811e-05, |
|
"loss": 0.0265, |
|
"step": 3805 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.36137330532073975, |
|
"learning_rate": 1.5414991512552487e-05, |
|
"loss": 0.0114, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5505108833312988, |
|
"learning_rate": 1.540605735727687e-05, |
|
"loss": 0.0244, |
|
"step": 3815 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.2253755331039429, |
|
"learning_rate": 1.5397123202001252e-05, |
|
"loss": 0.0253, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.25381264090538025, |
|
"learning_rate": 1.5388189046725633e-05, |
|
"loss": 0.0215, |
|
"step": 3825 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.59675133228302, |
|
"learning_rate": 1.5379254891450014e-05, |
|
"loss": 0.0229, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4737885296344757, |
|
"learning_rate": 1.5370320736174395e-05, |
|
"loss": 0.0172, |
|
"step": 3835 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.7271224856376648, |
|
"learning_rate": 1.536138658089878e-05, |
|
"loss": 0.0155, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.3435806930065155, |
|
"learning_rate": 1.5352452425623158e-05, |
|
"loss": 0.0115, |
|
"step": 3845 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.37383875250816345, |
|
"learning_rate": 1.534351827034754e-05, |
|
"loss": 0.0175, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.2769816219806671, |
|
"learning_rate": 1.5334584115071923e-05, |
|
"loss": 0.012, |
|
"step": 3855 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.421824187040329, |
|
"learning_rate": 1.5325649959796304e-05, |
|
"loss": 0.0168, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.48985883593559265, |
|
"learning_rate": 1.5316715804520682e-05, |
|
"loss": 0.0121, |
|
"step": 3865 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4765077829360962, |
|
"learning_rate": 1.5307781649245066e-05, |
|
"loss": 0.0181, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.48202696442604065, |
|
"learning_rate": 1.5298847493969447e-05, |
|
"loss": 0.0157, |
|
"step": 3875 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 2.033970594406128, |
|
"learning_rate": 1.5289913338693828e-05, |
|
"loss": 0.0136, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.33205658197402954, |
|
"learning_rate": 1.528097918341821e-05, |
|
"loss": 0.012, |
|
"step": 3885 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.4964057207107544, |
|
"learning_rate": 1.527204502814259e-05, |
|
"loss": 0.0109, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9996275901794434, |
|
"learning_rate": 1.526311087286697e-05, |
|
"loss": 0.0153, |
|
"step": 3895 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5008103251457214, |
|
"learning_rate": 1.5254176717591354e-05, |
|
"loss": 0.0194, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"eval_loss": 0.019112512469291687, |
|
"eval_pearson_cosine": 0.9187946359889192, |
|
"eval_pearson_dot": 0.916935359224543, |
|
"eval_pearson_euclidean": 0.9078268066209557, |
|
"eval_pearson_manhattan": 0.9068373107836385, |
|
"eval_pearson_max": 0.9187946359889192, |
|
"eval_runtime": 426.0704, |
|
"eval_samples_per_second": 1.174, |
|
"eval_spearman_cosine": 0.9320740642962572, |
|
"eval_spearman_dot": 0.9297467269869079, |
|
"eval_spearman_euclidean": 0.9318172632690529, |
|
"eval_spearman_manhattan": 0.9317702230808924, |
|
"eval_spearman_max": 0.9320740642962572, |
|
"eval_steps_per_second": 1.174, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.5863137245178223, |
|
"learning_rate": 1.5245242562315733e-05, |
|
"loss": 0.0155, |
|
"step": 3905 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 0.9501858353614807, |
|
"learning_rate": 1.5236308407040114e-05, |
|
"loss": 0.023, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"grad_norm": 1.0070725679397583, |
|
"learning_rate": 1.5227374251764497e-05, |
|
"loss": 0.0159, |
|
"step": 3915 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.1535245180130005, |
|
"learning_rate": 1.5218440096488878e-05, |
|
"loss": 0.0271, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3500936031341553, |
|
"learning_rate": 1.5209505941213261e-05, |
|
"loss": 0.0185, |
|
"step": 3925 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5063101649284363, |
|
"learning_rate": 1.5200571785937642e-05, |
|
"loss": 0.0092, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.48119908571243286, |
|
"learning_rate": 1.5191637630662021e-05, |
|
"loss": 0.0156, |
|
"step": 3935 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7710624933242798, |
|
"learning_rate": 1.5182703475386404e-05, |
|
"loss": 0.0182, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5304814577102661, |
|
"learning_rate": 1.5173769320110785e-05, |
|
"loss": 0.0127, |
|
"step": 3945 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5894374251365662, |
|
"learning_rate": 1.5164835164835166e-05, |
|
"loss": 0.012, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.48509135842323303, |
|
"learning_rate": 1.5155901009559549e-05, |
|
"loss": 0.0164, |
|
"step": 3955 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.00001060962677, |
|
"learning_rate": 1.5146966854283928e-05, |
|
"loss": 0.0105, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5003077983856201, |
|
"learning_rate": 1.513803269900831e-05, |
|
"loss": 0.0191, |
|
"step": 3965 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4220547378063202, |
|
"learning_rate": 1.5129098543732692e-05, |
|
"loss": 0.0182, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.353935182094574, |
|
"learning_rate": 1.5120164388457073e-05, |
|
"loss": 0.0105, |
|
"step": 3975 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7862482070922852, |
|
"learning_rate": 1.5111230233181452e-05, |
|
"loss": 0.0153, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4707240164279938, |
|
"learning_rate": 1.5102296077905835e-05, |
|
"loss": 0.0106, |
|
"step": 3985 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5706127882003784, |
|
"learning_rate": 1.5093361922630216e-05, |
|
"loss": 0.0193, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5268707275390625, |
|
"learning_rate": 1.5084427767354597e-05, |
|
"loss": 0.0115, |
|
"step": 3995 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.42871159315109253, |
|
"learning_rate": 1.507549361207898e-05, |
|
"loss": 0.0111, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.47120198607444763, |
|
"learning_rate": 1.5066559456803361e-05, |
|
"loss": 0.0148, |
|
"step": 4005 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.6913847327232361, |
|
"learning_rate": 1.505762530152774e-05, |
|
"loss": 0.0156, |
|
"step": 4010 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.40243229269981384, |
|
"learning_rate": 1.5048691146252123e-05, |
|
"loss": 0.021, |
|
"step": 4015 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.4630833566188812, |
|
"learning_rate": 1.5039756990976504e-05, |
|
"loss": 0.0142, |
|
"step": 4020 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.7278119325637817, |
|
"learning_rate": 1.5030822835700887e-05, |
|
"loss": 0.0221, |
|
"step": 4025 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.5856547951698303, |
|
"learning_rate": 1.5021888680425268e-05, |
|
"loss": 0.0127, |
|
"step": 4030 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3612341582775116, |
|
"learning_rate": 1.5012954525149647e-05, |
|
"loss": 0.0142, |
|
"step": 4035 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 0.3373230993747711, |
|
"learning_rate": 1.500402036987403e-05, |
|
"loss": 0.013, |
|
"step": 4040 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.3555310070514679, |
|
"learning_rate": 1.4995086214598411e-05, |
|
"loss": 0.0188, |
|
"step": 4045 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.27012017369270325, |
|
"learning_rate": 1.4986152059322792e-05, |
|
"loss": 0.016, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7879682183265686, |
|
"learning_rate": 1.4977217904047175e-05, |
|
"loss": 0.019, |
|
"step": 4055 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4750874638557434, |
|
"learning_rate": 1.4968283748771554e-05, |
|
"loss": 0.0156, |
|
"step": 4060 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5031574368476868, |
|
"learning_rate": 1.4959349593495935e-05, |
|
"loss": 0.0206, |
|
"step": 4065 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5057447552680969, |
|
"learning_rate": 1.4950415438220318e-05, |
|
"loss": 0.0144, |
|
"step": 4070 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.0315029621124268, |
|
"learning_rate": 1.4941481282944699e-05, |
|
"loss": 0.0179, |
|
"step": 4075 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5417433977127075, |
|
"learning_rate": 1.493254712766908e-05, |
|
"loss": 0.0126, |
|
"step": 4080 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.667726993560791, |
|
"learning_rate": 1.4923612972393463e-05, |
|
"loss": 0.0143, |
|
"step": 4085 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8723002672195435, |
|
"learning_rate": 1.4914678817117842e-05, |
|
"loss": 0.012, |
|
"step": 4090 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8944787979125977, |
|
"learning_rate": 1.4905744661842223e-05, |
|
"loss": 0.0205, |
|
"step": 4095 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.5145695805549622, |
|
"learning_rate": 1.4896810506566606e-05, |
|
"loss": 0.0118, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9632664322853088, |
|
"learning_rate": 1.4887876351290987e-05, |
|
"loss": 0.0233, |
|
"step": 4105 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4608353078365326, |
|
"learning_rate": 1.487894219601537e-05, |
|
"loss": 0.0192, |
|
"step": 4110 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.8873416781425476, |
|
"learning_rate": 1.4870008040739749e-05, |
|
"loss": 0.0196, |
|
"step": 4115 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.7420912384986877, |
|
"learning_rate": 1.486107388546413e-05, |
|
"loss": 0.0114, |
|
"step": 4120 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.3144592344760895, |
|
"learning_rate": 1.4852139730188513e-05, |
|
"loss": 0.0136, |
|
"step": 4125 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.4143444001674652, |
|
"learning_rate": 1.4843205574912894e-05, |
|
"loss": 0.0102, |
|
"step": 4130 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.3450114727020264, |
|
"learning_rate": 1.4834271419637275e-05, |
|
"loss": 0.0273, |
|
"step": 4135 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6474159955978394, |
|
"learning_rate": 1.4825337264361657e-05, |
|
"loss": 0.0125, |
|
"step": 4140 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.3440127372741699, |
|
"learning_rate": 1.4816403109086037e-05, |
|
"loss": 0.016, |
|
"step": 4145 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.6236218214035034, |
|
"learning_rate": 1.4807468953810418e-05, |
|
"loss": 0.019, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.9320889115333557, |
|
"learning_rate": 1.47985347985348e-05, |
|
"loss": 0.015, |
|
"step": 4155 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 0.36292609572410583, |
|
"learning_rate": 1.4789600643259182e-05, |
|
"loss": 0.0155, |
|
"step": 4160 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"grad_norm": 1.278648018836975, |
|
"learning_rate": 1.4780666487983561e-05, |
|
"loss": 0.0229, |
|
"step": 4165 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.24334418773651123, |
|
"learning_rate": 1.4771732332707944e-05, |
|
"loss": 0.0099, |
|
"step": 4170 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.7839992046356201, |
|
"learning_rate": 1.4762798177432325e-05, |
|
"loss": 0.0167, |
|
"step": 4175 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.3292075395584106, |
|
"learning_rate": 1.4753864022156706e-05, |
|
"loss": 0.0168, |
|
"step": 4180 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4192187488079071, |
|
"learning_rate": 1.4744929866881089e-05, |
|
"loss": 0.0177, |
|
"step": 4185 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5623775124549866, |
|
"learning_rate": 1.4735995711605468e-05, |
|
"loss": 0.0103, |
|
"step": 4190 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5713854432106018, |
|
"learning_rate": 1.4727061556329849e-05, |
|
"loss": 0.011, |
|
"step": 4195 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6574892997741699, |
|
"learning_rate": 1.4718127401054232e-05, |
|
"loss": 0.0169, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"eval_loss": 0.01563582755625248, |
|
"eval_pearson_cosine": 0.93071357698338, |
|
"eval_pearson_dot": 0.9303618828257327, |
|
"eval_pearson_euclidean": 0.9158033535152222, |
|
"eval_pearson_manhattan": 0.9136122746167228, |
|
"eval_pearson_max": 0.93071357698338, |
|
"eval_runtime": 426.316, |
|
"eval_samples_per_second": 1.173, |
|
"eval_spearman_cosine": 0.9357050868203473, |
|
"eval_spearman_dot": 0.9326605306421225, |
|
"eval_spearman_euclidean": 0.9346635786543147, |
|
"eval_spearman_manhattan": 0.9334011736046943, |
|
"eval_spearman_max": 0.9357050868203473, |
|
"eval_steps_per_second": 1.173, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5762566328048706, |
|
"learning_rate": 1.4709193245778613e-05, |
|
"loss": 0.0296, |
|
"step": 4205 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6294378638267517, |
|
"learning_rate": 1.4700259090502995e-05, |
|
"loss": 0.0158, |
|
"step": 4210 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5651215314865112, |
|
"learning_rate": 1.4691324935227376e-05, |
|
"loss": 0.0131, |
|
"step": 4215 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6574472784996033, |
|
"learning_rate": 1.4682390779951756e-05, |
|
"loss": 0.0166, |
|
"step": 4220 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4540180563926697, |
|
"learning_rate": 1.4673456624676139e-05, |
|
"loss": 0.024, |
|
"step": 4225 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.3493760824203491, |
|
"learning_rate": 1.466452246940052e-05, |
|
"loss": 0.0149, |
|
"step": 4230 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 1.0167644023895264, |
|
"learning_rate": 1.46555883141249e-05, |
|
"loss": 0.0149, |
|
"step": 4235 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.9241525530815125, |
|
"learning_rate": 1.4646654158849283e-05, |
|
"loss": 0.0184, |
|
"step": 4240 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.6745620965957642, |
|
"learning_rate": 1.4637720003573663e-05, |
|
"loss": 0.0158, |
|
"step": 4245 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4359780251979828, |
|
"learning_rate": 1.4628785848298044e-05, |
|
"loss": 0.0098, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4030389189720154, |
|
"learning_rate": 1.4619851693022426e-05, |
|
"loss": 0.0184, |
|
"step": 4255 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5261355638504028, |
|
"learning_rate": 1.4610917537746807e-05, |
|
"loss": 0.0141, |
|
"step": 4260 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.38097327947616577, |
|
"learning_rate": 1.4601983382471187e-05, |
|
"loss": 0.0083, |
|
"step": 4265 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.41486430168151855, |
|
"learning_rate": 1.459304922719557e-05, |
|
"loss": 0.0202, |
|
"step": 4270 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.5645167827606201, |
|
"learning_rate": 1.458411507191995e-05, |
|
"loss": 0.0126, |
|
"step": 4275 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.4771600365638733, |
|
"learning_rate": 1.4575180916644332e-05, |
|
"loss": 0.0103, |
|
"step": 4280 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.8267874717712402, |
|
"learning_rate": 1.4566246761368714e-05, |
|
"loss": 0.0182, |
|
"step": 4285 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"grad_norm": 0.7216476202011108, |
|
"learning_rate": 1.4557312606093095e-05, |
|
"loss": 0.0155, |
|
"step": 4290 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.0095362663269043, |
|
"learning_rate": 1.4548378450817478e-05, |
|
"loss": 0.0199, |
|
"step": 4295 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.36790329217910767, |
|
"learning_rate": 1.4539444295541857e-05, |
|
"loss": 0.0231, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4778492748737335, |
|
"learning_rate": 1.4530510140266239e-05, |
|
"loss": 0.0101, |
|
"step": 4305 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3736858665943146, |
|
"learning_rate": 1.4521575984990621e-05, |
|
"loss": 0.0157, |
|
"step": 4310 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5705660581588745, |
|
"learning_rate": 1.4512641829715002e-05, |
|
"loss": 0.0133, |
|
"step": 4315 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.7104588747024536, |
|
"learning_rate": 1.4503707674439382e-05, |
|
"loss": 0.0209, |
|
"step": 4320 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4687543511390686, |
|
"learning_rate": 1.4494773519163764e-05, |
|
"loss": 0.0072, |
|
"step": 4325 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5142909288406372, |
|
"learning_rate": 1.4485839363888145e-05, |
|
"loss": 0.0127, |
|
"step": 4330 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.3397541046142578, |
|
"learning_rate": 1.4476905208612526e-05, |
|
"loss": 0.0122, |
|
"step": 4335 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4727453589439392, |
|
"learning_rate": 1.446797105333691e-05, |
|
"loss": 0.0212, |
|
"step": 4340 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.7717282176017761, |
|
"learning_rate": 1.445903689806129e-05, |
|
"loss": 0.0277, |
|
"step": 4345 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4375796914100647, |
|
"learning_rate": 1.445010274278567e-05, |
|
"loss": 0.0119, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.45401817560195923, |
|
"learning_rate": 1.4441168587510052e-05, |
|
"loss": 0.0128, |
|
"step": 4355 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4388526976108551, |
|
"learning_rate": 1.4432234432234433e-05, |
|
"loss": 0.0181, |
|
"step": 4360 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.4506477415561676, |
|
"learning_rate": 1.4423300276958814e-05, |
|
"loss": 0.0118, |
|
"step": 4365 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.7086319923400879, |
|
"learning_rate": 1.4414366121683197e-05, |
|
"loss": 0.0111, |
|
"step": 4370 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.185771182179451, |
|
"learning_rate": 1.4405431966407576e-05, |
|
"loss": 0.0151, |
|
"step": 4375 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.26904481649398804, |
|
"learning_rate": 1.4396497811131958e-05, |
|
"loss": 0.0116, |
|
"step": 4380 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.8145022392272949, |
|
"learning_rate": 1.438756365585634e-05, |
|
"loss": 0.0097, |
|
"step": 4385 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 1.1931172609329224, |
|
"learning_rate": 1.4378629500580721e-05, |
|
"loss": 0.0158, |
|
"step": 4390 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.37602362036705017, |
|
"learning_rate": 1.4369695345305104e-05, |
|
"loss": 0.0145, |
|
"step": 4395 |
|
}, |
|
{ |
|
"epoch": 0.35, |
|
"grad_norm": 0.5753958225250244, |
|
"learning_rate": 1.4360761190029483e-05, |
|
"loss": 0.0156, |
|
"step": 4400 |
|
} |
|
], |
|
"logging_steps": 5, |
|
"max_steps": 12437, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 400, |
|
"total_flos": 0.0, |
|
"train_batch_size": 1, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|