|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 0.2112, |
|
"eval_steps": 500, |
|
"global_step": 2640, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 62.10089111328125, |
|
"learning_rate": 1e-06, |
|
"loss": 4.5777, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 39.39016342163086, |
|
"learning_rate": 2e-06, |
|
"loss": 4.4077, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0, |
|
"grad_norm": 54.24020767211914, |
|
"learning_rate": 3e-06, |
|
"loss": 4.4807, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 30.161609649658203, |
|
"learning_rate": 4e-06, |
|
"loss": 4.5756, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 40.131675720214844, |
|
"learning_rate": 4.9999999999999996e-06, |
|
"loss": 4.4352, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 52.3621940612793, |
|
"learning_rate": 6e-06, |
|
"loss": 4.5096, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 49.86561584472656, |
|
"learning_rate": 7e-06, |
|
"loss": 4.493, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 20.034923553466797, |
|
"learning_rate": 8e-06, |
|
"loss": 4.4088, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.01, |
|
"grad_norm": 50.790679931640625, |
|
"learning_rate": 9e-06, |
|
"loss": 4.4901, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 48.5693473815918, |
|
"learning_rate": 9.999999999999999e-06, |
|
"loss": 4.3628, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 37.95353698730469, |
|
"learning_rate": 1.1e-05, |
|
"loss": 4.3298, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 35.7153434753418, |
|
"learning_rate": 1.2e-05, |
|
"loss": 4.2839, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 91.47773742675781, |
|
"learning_rate": 1.3000000000000001e-05, |
|
"loss": 4.1238, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 23.16193389892578, |
|
"learning_rate": 1.4e-05, |
|
"loss": 4.1245, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.02, |
|
"grad_norm": 28.304485321044922, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.2198, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 34.03230285644531, |
|
"learning_rate": 1.6e-05, |
|
"loss": 4.0958, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 29.786975860595703, |
|
"learning_rate": 1.7e-05, |
|
"loss": 4.024, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 33.04754638671875, |
|
"learning_rate": 1.8e-05, |
|
"loss": 4.0832, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 28.68460464477539, |
|
"learning_rate": 1.9e-05, |
|
"loss": 3.9827, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 26.463253021240234, |
|
"learning_rate": 1.9999999999999998e-05, |
|
"loss": 3.9454, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.03, |
|
"grad_norm": 19.407127380371094, |
|
"learning_rate": 2.1e-05, |
|
"loss": 4.0119, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.383380889892578, |
|
"learning_rate": 2.2e-05, |
|
"loss": 3.9554, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 33.225223541259766, |
|
"learning_rate": 2.3000000000000003e-05, |
|
"loss": 3.8172, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 26.000978469848633, |
|
"learning_rate": 2.4e-05, |
|
"loss": 3.8934, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 28.714366912841797, |
|
"learning_rate": 2.5e-05, |
|
"loss": 3.9194, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 28.721248626708984, |
|
"learning_rate": 2.6000000000000002e-05, |
|
"loss": 3.8144, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 24.934555053710938, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 3.9166, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 23.113840103149414, |
|
"learning_rate": 2.8e-05, |
|
"loss": 3.8248, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 21.58758544921875, |
|
"learning_rate": 2.9e-05, |
|
"loss": 3.7538, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 22.339618682861328, |
|
"learning_rate": 3e-05, |
|
"loss": 3.726, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 49.25693893432617, |
|
"learning_rate": 2.999992132854894e-05, |
|
"loss": 3.8692, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 45.1494026184082, |
|
"learning_rate": 2.999968531502098e-05, |
|
"loss": 3.7374, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 72.25853729248047, |
|
"learning_rate": 2.99992919618918e-05, |
|
"loss": 3.7735, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"grad_norm": 39.445220947265625, |
|
"learning_rate": 2.999874127328748e-05, |
|
"loss": 3.759, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 21.18370246887207, |
|
"learning_rate": 2.9998033254984483e-05, |
|
"loss": 3.7841, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 24.310373306274414, |
|
"learning_rate": 2.999716791440959e-05, |
|
"loss": 3.679, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 36.432350158691406, |
|
"learning_rate": 2.9996145260639812e-05, |
|
"loss": 3.6796, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 32.12275314331055, |
|
"learning_rate": 2.9994965304402304e-05, |
|
"loss": 3.7613, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 38.32442855834961, |
|
"learning_rate": 2.999362805807425e-05, |
|
"loss": 3.7586, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.06, |
|
"grad_norm": 30.289432525634766, |
|
"learning_rate": 2.9992133535682725e-05, |
|
"loss": 3.6919, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 32.69138717651367, |
|
"learning_rate": 2.9990481752904566e-05, |
|
"loss": 3.6855, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 46.554874420166016, |
|
"learning_rate": 2.9988672727066197e-05, |
|
"loss": 3.7201, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 28.671123504638672, |
|
"learning_rate": 2.9986706477143436e-05, |
|
"loss": 3.6594, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 49.44480895996094, |
|
"learning_rate": 2.9984583023761318e-05, |
|
"loss": 3.7271, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 26.61457061767578, |
|
"learning_rate": 2.998230238919386e-05, |
|
"loss": 3.7376, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"grad_norm": 27.453275680541992, |
|
"learning_rate": 2.9979864597363846e-05, |
|
"loss": 3.6716, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 22.791175842285156, |
|
"learning_rate": 2.9977269673842554e-05, |
|
"loss": 3.6172, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 58.2718620300293, |
|
"learning_rate": 2.997451764584951e-05, |
|
"loss": 3.7494, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 33.610286712646484, |
|
"learning_rate": 2.9971608542252175e-05, |
|
"loss": 3.7077, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 25.48147201538086, |
|
"learning_rate": 2.9968542393565674e-05, |
|
"loss": 3.6721, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 27.07135581970215, |
|
"learning_rate": 2.996531923195246e-05, |
|
"loss": 3.7106, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 37.24673843383789, |
|
"learning_rate": 2.996193909122197e-05, |
|
"loss": 3.7447, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 26.41890525817871, |
|
"learning_rate": 2.995840200683028e-05, |
|
"loss": 3.5839, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 32.88002014160156, |
|
"learning_rate": 2.995470801587973e-05, |
|
"loss": 3.6606, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 32.0895881652832, |
|
"learning_rate": 2.9950857157118544e-05, |
|
"loss": 3.677, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 37.726783752441406, |
|
"learning_rate": 2.9946849470940395e-05, |
|
"loss": 3.5546, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 56.246299743652344, |
|
"learning_rate": 2.9942684999384034e-05, |
|
"loss": 3.6391, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 35.675662994384766, |
|
"learning_rate": 2.993836378613278e-05, |
|
"loss": 3.5918, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"grad_norm": 26.685134887695312, |
|
"learning_rate": 2.993388587651412e-05, |
|
"loss": 3.6331, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 27.400333404541016, |
|
"learning_rate": 2.992925131749921e-05, |
|
"loss": 3.6214, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 28.501314163208008, |
|
"learning_rate": 2.9924460157702378e-05, |
|
"loss": 3.6619, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 30.773778915405273, |
|
"learning_rate": 2.991951244738063e-05, |
|
"loss": 3.6453, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 24.701374053955078, |
|
"learning_rate": 2.9914408238433095e-05, |
|
"loss": 3.7282, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 27.605117797851562, |
|
"learning_rate": 2.990914758440052e-05, |
|
"loss": 3.6635, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 27.829086303710938, |
|
"learning_rate": 2.9903730540464668e-05, |
|
"loss": 3.5293, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 40.916263580322266, |
|
"learning_rate": 2.9898157163447767e-05, |
|
"loss": 3.6976, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 33.31068420410156, |
|
"learning_rate": 2.9892427511811912e-05, |
|
"loss": 3.548, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.932533264160156, |
|
"learning_rate": 2.9886541645658435e-05, |
|
"loss": 3.7486, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 35.59455490112305, |
|
"learning_rate": 2.9880499626727284e-05, |
|
"loss": 3.6342, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 29.93869400024414, |
|
"learning_rate": 2.9874301518396377e-05, |
|
"loss": 3.6615, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"grad_norm": 43.417213439941406, |
|
"learning_rate": 2.986794738568094e-05, |
|
"loss": 3.607, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 52.483917236328125, |
|
"learning_rate": 2.9861437295232825e-05, |
|
"loss": 3.5937, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 30.312334060668945, |
|
"learning_rate": 2.9854771315339787e-05, |
|
"loss": 3.5991, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 49.459136962890625, |
|
"learning_rate": 2.984794951592481e-05, |
|
"loss": 3.5261, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 34.81111526489258, |
|
"learning_rate": 2.984097196854534e-05, |
|
"loss": 3.6818, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 34.721946716308594, |
|
"learning_rate": 2.9833838746392544e-05, |
|
"loss": 3.5636, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 31.46621322631836, |
|
"learning_rate": 2.982654992429056e-05, |
|
"loss": 3.5597, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 38.78512191772461, |
|
"learning_rate": 2.981910557869566e-05, |
|
"loss": 3.661, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 27.38837432861328, |
|
"learning_rate": 2.981150578769553e-05, |
|
"loss": 3.6173, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 45.619632720947266, |
|
"learning_rate": 2.980375063100836e-05, |
|
"loss": 3.6632, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 30.708433151245117, |
|
"learning_rate": 2.979584018998209e-05, |
|
"loss": 3.5165, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 35.472938537597656, |
|
"learning_rate": 2.97877745475935e-05, |
|
"loss": 3.5157, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 39.029415130615234, |
|
"learning_rate": 2.9779553788447358e-05, |
|
"loss": 3.6259, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"grad_norm": 57.90769577026367, |
|
"learning_rate": 2.977117799877554e-05, |
|
"loss": 3.6378, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 36.95255661010742, |
|
"learning_rate": 2.9762647266436115e-05, |
|
"loss": 3.5845, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 27.456787109375, |
|
"learning_rate": 2.9753961680912432e-05, |
|
"loss": 3.6647, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 27.383285522460938, |
|
"learning_rate": 2.9745121333312166e-05, |
|
"loss": 3.6668, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 26.555049896240234, |
|
"learning_rate": 2.9736126316366385e-05, |
|
"loss": 3.6617, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 34.009620666503906, |
|
"learning_rate": 2.9726976724428563e-05, |
|
"loss": 3.572, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"grad_norm": 45.44181823730469, |
|
"learning_rate": 2.9717672653473588e-05, |
|
"loss": 3.6354, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 30.79588508605957, |
|
"learning_rate": 2.9708214201096758e-05, |
|
"loss": 3.6953, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 46.61872482299805, |
|
"learning_rate": 2.9698601466512767e-05, |
|
"loss": 3.5373, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 42.86500930786133, |
|
"learning_rate": 2.9688834550554647e-05, |
|
"loss": 3.5982, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 33.480289459228516, |
|
"learning_rate": 2.9678913555672733e-05, |
|
"loss": 3.6024, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 36.41415786743164, |
|
"learning_rate": 2.966883858593356e-05, |
|
"loss": 3.4843, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 0.15, |
|
"grad_norm": 41.39873123168945, |
|
"learning_rate": 2.9658609747018796e-05, |
|
"loss": 3.5257, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 31.24024200439453, |
|
"learning_rate": 2.964822714622412e-05, |
|
"loss": 3.5927, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 52.78026580810547, |
|
"learning_rate": 2.9637690892458103e-05, |
|
"loss": 3.4678, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 27.40117835998535, |
|
"learning_rate": 2.962700109624106e-05, |
|
"loss": 3.5541, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 23.172683715820312, |
|
"learning_rate": 2.961615786970389e-05, |
|
"loss": 3.5713, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 24.177541732788086, |
|
"learning_rate": 2.960516132658692e-05, |
|
"loss": 3.585, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 44.673912048339844, |
|
"learning_rate": 2.9594011582238672e-05, |
|
"loss": 3.5035, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 35.91664505004883, |
|
"learning_rate": 2.95827087536147e-05, |
|
"loss": 3.6404, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 27.3450870513916, |
|
"learning_rate": 2.9571252959276313e-05, |
|
"loss": 3.5121, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 25.66405487060547, |
|
"learning_rate": 2.955964431938939e-05, |
|
"loss": 3.5009, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 24.1674861907959, |
|
"learning_rate": 2.9547882955723052e-05, |
|
"loss": 3.5482, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 49.72268295288086, |
|
"learning_rate": 2.953596899164846e-05, |
|
"loss": 3.4969, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 26.238168716430664, |
|
"learning_rate": 2.9523902552137436e-05, |
|
"loss": 3.5541, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 0.17, |
|
"grad_norm": 30.524545669555664, |
|
"learning_rate": 2.951168376376124e-05, |
|
"loss": 3.6343, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 38.179908752441406, |
|
"learning_rate": 2.9499312754689168e-05, |
|
"loss": 3.4795, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 32.98453903198242, |
|
"learning_rate": 2.9486789654687256e-05, |
|
"loss": 3.6333, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 26.77848243713379, |
|
"learning_rate": 2.94741145951169e-05, |
|
"loss": 3.5654, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 27.737852096557617, |
|
"learning_rate": 2.9461287708933475e-05, |
|
"loss": 3.5044, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 34.2584342956543, |
|
"learning_rate": 2.9448309130684944e-05, |
|
"loss": 3.5979, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"grad_norm": 57.86616897583008, |
|
"learning_rate": 2.9435178996510456e-05, |
|
"loss": 3.5726, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 37.64597702026367, |
|
"learning_rate": 2.9421897444138902e-05, |
|
"loss": 3.5913, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 35.22037124633789, |
|
"learning_rate": 2.9408464612887484e-05, |
|
"loss": 3.5959, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 31.878395080566406, |
|
"learning_rate": 2.9394880643660242e-05, |
|
"loss": 3.5974, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 43.614994049072266, |
|
"learning_rate": 2.938114567894659e-05, |
|
"loss": 3.4834, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 27.587766647338867, |
|
"learning_rate": 2.9367259862819805e-05, |
|
"loss": 3.6154, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 0.19, |
|
"grad_norm": 30.223772048950195, |
|
"learning_rate": 2.9353223340935533e-05, |
|
"loss": 3.4871, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 34.057884216308594, |
|
"learning_rate": 2.933903626053024e-05, |
|
"loss": 3.605, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 39.219242095947266, |
|
"learning_rate": 2.932469877041969e-05, |
|
"loss": 3.6091, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 33.33955001831055, |
|
"learning_rate": 2.931021102099737e-05, |
|
"loss": 3.4862, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 37.07484436035156, |
|
"learning_rate": 2.9295573164232913e-05, |
|
"loss": 3.5267, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 27.145864486694336, |
|
"learning_rate": 2.9280785353670514e-05, |
|
"loss": 3.4369, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 30.31035041809082, |
|
"learning_rate": 2.9265847744427305e-05, |
|
"loss": 3.6056, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 40.823490142822266, |
|
"learning_rate": 2.925076049319174e-05, |
|
"loss": 3.5916, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 44.224796295166016, |
|
"learning_rate": 2.9235523758221944e-05, |
|
"loss": 3.5881, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 33.34773254394531, |
|
"learning_rate": 2.922013769934406e-05, |
|
"loss": 3.5315, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 25.755775451660156, |
|
"learning_rate": 2.920460247795056e-05, |
|
"loss": 3.621, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"grad_norm": 29.75731086730957, |
|
"learning_rate": 2.918891825699857e-05, |
|
"loss": 3.6067, |
|
"step": 2640 |
|
} |
|
], |
|
"logging_steps": 20, |
|
"max_steps": 20000, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 20, |
|
"total_flos": 6257137317445632.0, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|