|
{ |
|
"best_metric": 0.8684313725490196, |
|
"best_model_checkpoint": "vit-base-patch16-224-finetuned-food102/checkpoint-2151", |
|
"epoch": 2.998954339491112, |
|
"eval_steps": 500, |
|
"global_step": 2151, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.013942140118508192, |
|
"grad_norm": 11.970505714416504, |
|
"learning_rate": 2.3148148148148148e-06, |
|
"loss": 19.054, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.027884280237016383, |
|
"grad_norm": 11.977810859680176, |
|
"learning_rate": 4.6296296296296296e-06, |
|
"loss": 19.0411, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.04182642035552457, |
|
"grad_norm": 12.143768310546875, |
|
"learning_rate": 6.944444444444445e-06, |
|
"loss": 19.0188, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.055768560474032766, |
|
"grad_norm": 11.653632164001465, |
|
"learning_rate": 9.259259259259259e-06, |
|
"loss": 18.7585, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06971070059254096, |
|
"grad_norm": 13.25544261932373, |
|
"learning_rate": 1.1574074074074075e-05, |
|
"loss": 18.4989, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.08365284071104914, |
|
"grad_norm": 12.296609878540039, |
|
"learning_rate": 1.388888888888889e-05, |
|
"loss": 18.199, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09759498082955734, |
|
"grad_norm": 11.88782024383545, |
|
"learning_rate": 1.6203703703703704e-05, |
|
"loss": 17.9952, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.11153712094806553, |
|
"grad_norm": 11.943421363830566, |
|
"learning_rate": 1.8518518518518518e-05, |
|
"loss": 17.5776, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12547926106657373, |
|
"grad_norm": 12.558123588562012, |
|
"learning_rate": 2.0833333333333336e-05, |
|
"loss": 17.0623, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.13942140118508192, |
|
"grad_norm": 13.311689376831055, |
|
"learning_rate": 2.314814814814815e-05, |
|
"loss": 16.5161, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1533635413035901, |
|
"grad_norm": 14.52517318725586, |
|
"learning_rate": 2.5462962962962965e-05, |
|
"loss": 15.9671, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.16730568142209828, |
|
"grad_norm": 12.310554504394531, |
|
"learning_rate": 2.777777777777778e-05, |
|
"loss": 15.2662, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18124782154060648, |
|
"grad_norm": 12.67236042022705, |
|
"learning_rate": 3.0092592592592593e-05, |
|
"loss": 14.4554, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19518996165911467, |
|
"grad_norm": 12.847444534301758, |
|
"learning_rate": 3.240740740740741e-05, |
|
"loss": 13.7588, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.20913210177762287, |
|
"grad_norm": 12.77316665649414, |
|
"learning_rate": 3.472222222222222e-05, |
|
"loss": 13.0135, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22307424189613106, |
|
"grad_norm": 13.008152961730957, |
|
"learning_rate": 3.7037037037037037e-05, |
|
"loss": 12.324, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.23701638201463926, |
|
"grad_norm": 13.326498985290527, |
|
"learning_rate": 3.935185185185186e-05, |
|
"loss": 11.2685, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.25095852213314745, |
|
"grad_norm": 13.166327476501465, |
|
"learning_rate": 4.166666666666667e-05, |
|
"loss": 10.491, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.26490066225165565, |
|
"grad_norm": 12.491602897644043, |
|
"learning_rate": 4.3981481481481486e-05, |
|
"loss": 9.8871, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.27884280237016384, |
|
"grad_norm": 12.803274154663086, |
|
"learning_rate": 4.62962962962963e-05, |
|
"loss": 9.0965, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.292784942488672, |
|
"grad_norm": 13.318122863769531, |
|
"learning_rate": 4.8611111111111115e-05, |
|
"loss": 8.2912, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3067270826071802, |
|
"grad_norm": 12.178855895996094, |
|
"learning_rate": 4.989664082687339e-05, |
|
"loss": 8.0104, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3206692227256884, |
|
"grad_norm": 11.760112762451172, |
|
"learning_rate": 4.963824289405685e-05, |
|
"loss": 7.2478, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.33461136284419657, |
|
"grad_norm": 12.436824798583984, |
|
"learning_rate": 4.9379844961240315e-05, |
|
"loss": 6.8172, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.34855350296270476, |
|
"grad_norm": 11.301292419433594, |
|
"learning_rate": 4.912144702842377e-05, |
|
"loss": 6.5386, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.36249564308121296, |
|
"grad_norm": 12.099160194396973, |
|
"learning_rate": 4.886304909560724e-05, |
|
"loss": 6.1708, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.37643778319972115, |
|
"grad_norm": 12.640030860900879, |
|
"learning_rate": 4.86046511627907e-05, |
|
"loss": 6.0798, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.39037992331822935, |
|
"grad_norm": 12.914190292358398, |
|
"learning_rate": 4.834625322997416e-05, |
|
"loss": 5.5983, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.40432206343673754, |
|
"grad_norm": 13.290894508361816, |
|
"learning_rate": 4.8087855297157624e-05, |
|
"loss": 5.6591, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.41826420355524574, |
|
"grad_norm": 12.26599407196045, |
|
"learning_rate": 4.782945736434109e-05, |
|
"loss": 5.0549, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.43220634367375393, |
|
"grad_norm": 13.02130126953125, |
|
"learning_rate": 4.757105943152455e-05, |
|
"loss": 4.8798, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.44614848379226213, |
|
"grad_norm": 11.812455177307129, |
|
"learning_rate": 4.7312661498708014e-05, |
|
"loss": 4.8651, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.4600906239107703, |
|
"grad_norm": 15.11469554901123, |
|
"learning_rate": 4.705426356589148e-05, |
|
"loss": 4.8677, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4740327640292785, |
|
"grad_norm": 13.028066635131836, |
|
"learning_rate": 4.679586563307494e-05, |
|
"loss": 4.7999, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.48797490414778666, |
|
"grad_norm": 14.58755111694336, |
|
"learning_rate": 4.65374677002584e-05, |
|
"loss": 4.7765, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5019170442662949, |
|
"grad_norm": 11.573223114013672, |
|
"learning_rate": 4.627906976744186e-05, |
|
"loss": 4.6033, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.515859184384803, |
|
"grad_norm": 13.456429481506348, |
|
"learning_rate": 4.602067183462532e-05, |
|
"loss": 4.406, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5298013245033113, |
|
"grad_norm": 13.06003475189209, |
|
"learning_rate": 4.5762273901808786e-05, |
|
"loss": 4.4271, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5437434646218194, |
|
"grad_norm": 12.669313430786133, |
|
"learning_rate": 4.550387596899225e-05, |
|
"loss": 4.1339, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5576856047403277, |
|
"grad_norm": 12.845880508422852, |
|
"learning_rate": 4.524547803617571e-05, |
|
"loss": 4.3616, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5716277448588358, |
|
"grad_norm": 13.643671035766602, |
|
"learning_rate": 4.4987080103359175e-05, |
|
"loss": 4.03, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.585569884977344, |
|
"grad_norm": 12.903581619262695, |
|
"learning_rate": 4.472868217054264e-05, |
|
"loss": 4.0456, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.5995120250958522, |
|
"grad_norm": 11.996821403503418, |
|
"learning_rate": 4.44702842377261e-05, |
|
"loss": 4.1508, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6134541652143604, |
|
"grad_norm": 12.505786895751953, |
|
"learning_rate": 4.421188630490956e-05, |
|
"loss": 4.112, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6273963053328686, |
|
"grad_norm": 12.409116744995117, |
|
"learning_rate": 4.395348837209303e-05, |
|
"loss": 3.8887, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6413384454513767, |
|
"grad_norm": 12.7697114944458, |
|
"learning_rate": 4.3695090439276484e-05, |
|
"loss": 3.7454, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.655280585569885, |
|
"grad_norm": 13.008294105529785, |
|
"learning_rate": 4.3436692506459954e-05, |
|
"loss": 3.879, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.6692227256883931, |
|
"grad_norm": 12.83389949798584, |
|
"learning_rate": 4.317829457364341e-05, |
|
"loss": 4.032, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.6831648658069014, |
|
"grad_norm": 13.29801082611084, |
|
"learning_rate": 4.2919896640826874e-05, |
|
"loss": 3.9523, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.6971070059254095, |
|
"grad_norm": 13.225083351135254, |
|
"learning_rate": 4.266149870801034e-05, |
|
"loss": 3.7454, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7110491460439178, |
|
"grad_norm": 13.245360374450684, |
|
"learning_rate": 4.24031007751938e-05, |
|
"loss": 3.5761, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7249912861624259, |
|
"grad_norm": 12.166769981384277, |
|
"learning_rate": 4.214470284237726e-05, |
|
"loss": 3.8125, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7389334262809342, |
|
"grad_norm": 13.542282104492188, |
|
"learning_rate": 4.1886304909560726e-05, |
|
"loss": 3.6979, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7528755663994423, |
|
"grad_norm": 17.77124786376953, |
|
"learning_rate": 4.162790697674418e-05, |
|
"loss": 3.4578, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.7668177065179506, |
|
"grad_norm": 13.022871971130371, |
|
"learning_rate": 4.136950904392765e-05, |
|
"loss": 3.4832, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.7807598466364587, |
|
"grad_norm": 15.890631675720215, |
|
"learning_rate": 4.111111111111111e-05, |
|
"loss": 3.5997, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.7947019867549668, |
|
"grad_norm": 14.40475845336914, |
|
"learning_rate": 4.085271317829458e-05, |
|
"loss": 3.6687, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.8086441268734751, |
|
"grad_norm": 10.683858871459961, |
|
"learning_rate": 4.0594315245478035e-05, |
|
"loss": 3.3821, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8225862669919832, |
|
"grad_norm": 14.170791625976562, |
|
"learning_rate": 4.0335917312661505e-05, |
|
"loss": 3.4403, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8365284071104915, |
|
"grad_norm": 12.744939804077148, |
|
"learning_rate": 4.007751937984496e-05, |
|
"loss": 3.1932, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8504705472289996, |
|
"grad_norm": 12.260272979736328, |
|
"learning_rate": 3.9819121447028425e-05, |
|
"loss": 3.5403, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.8644126873475079, |
|
"grad_norm": 14.6715669631958, |
|
"learning_rate": 3.956072351421189e-05, |
|
"loss": 3.4033, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.878354827466016, |
|
"grad_norm": 12.32928466796875, |
|
"learning_rate": 3.930232558139535e-05, |
|
"loss": 3.2728, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.8922969675845243, |
|
"grad_norm": 13.14637279510498, |
|
"learning_rate": 3.9043927648578814e-05, |
|
"loss": 3.3631, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9062391077030324, |
|
"grad_norm": 11.95271110534668, |
|
"learning_rate": 3.878552971576228e-05, |
|
"loss": 3.2798, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9201812478215406, |
|
"grad_norm": 12.834661483764648, |
|
"learning_rate": 3.8527131782945734e-05, |
|
"loss": 3.5264, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9341233879400488, |
|
"grad_norm": 13.227917671203613, |
|
"learning_rate": 3.8268733850129204e-05, |
|
"loss": 3.5221, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.948065528058557, |
|
"grad_norm": 13.852123260498047, |
|
"learning_rate": 3.801033591731266e-05, |
|
"loss": 3.1419, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.9620076681770652, |
|
"grad_norm": 16.48082160949707, |
|
"learning_rate": 3.775193798449613e-05, |
|
"loss": 3.4653, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.9759498082955733, |
|
"grad_norm": 11.60905933380127, |
|
"learning_rate": 3.7493540051679586e-05, |
|
"loss": 3.1008, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.9898919484140816, |
|
"grad_norm": 25.217321395874023, |
|
"learning_rate": 3.7235142118863056e-05, |
|
"loss": 3.3941, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.9996514464970373, |
|
"eval_accuracy": 0.8350980392156863, |
|
"eval_loss": 0.6625040769577026, |
|
"eval_runtime": 286.4591, |
|
"eval_samples_per_second": 35.607, |
|
"eval_steps_per_second": 1.114, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.0038340885325898, |
|
"grad_norm": 10.983098983764648, |
|
"learning_rate": 3.697674418604651e-05, |
|
"loss": 3.2282, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.0177762286510978, |
|
"grad_norm": 12.984674453735352, |
|
"learning_rate": 3.6718346253229976e-05, |
|
"loss": 2.9876, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.031718368769606, |
|
"grad_norm": 11.363933563232422, |
|
"learning_rate": 3.645994832041344e-05, |
|
"loss": 3.1042, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.0456605088881143, |
|
"grad_norm": 12.959346771240234, |
|
"learning_rate": 3.62015503875969e-05, |
|
"loss": 2.8314, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.0596026490066226, |
|
"grad_norm": 13.28182601928711, |
|
"learning_rate": 3.5943152454780365e-05, |
|
"loss": 2.9215, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.0735447891251306, |
|
"grad_norm": 12.953139305114746, |
|
"learning_rate": 3.568475452196383e-05, |
|
"loss": 2.9363, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.0874869292436389, |
|
"grad_norm": 12.209274291992188, |
|
"learning_rate": 3.5426356589147285e-05, |
|
"loss": 2.9331, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1014290693621471, |
|
"grad_norm": 12.68194580078125, |
|
"learning_rate": 3.5167958656330755e-05, |
|
"loss": 2.9515, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.1153712094806554, |
|
"grad_norm": 11.559549331665039, |
|
"learning_rate": 3.490956072351421e-05, |
|
"loss": 2.7768, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.1293133495991634, |
|
"grad_norm": 13.342930793762207, |
|
"learning_rate": 3.465116279069768e-05, |
|
"loss": 2.8238, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.1432554897176717, |
|
"grad_norm": 14.305107116699219, |
|
"learning_rate": 3.439276485788114e-05, |
|
"loss": 2.9598, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.15719762983618, |
|
"grad_norm": 12.436918258666992, |
|
"learning_rate": 3.41343669250646e-05, |
|
"loss": 2.8277, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.1711397699546882, |
|
"grad_norm": 12.794795036315918, |
|
"learning_rate": 3.3875968992248064e-05, |
|
"loss": 2.7674, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.1850819100731962, |
|
"grad_norm": 12.380959510803223, |
|
"learning_rate": 3.361757105943153e-05, |
|
"loss": 2.6084, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.1990240501917044, |
|
"grad_norm": 16.531293869018555, |
|
"learning_rate": 3.335917312661499e-05, |
|
"loss": 2.6154, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2129661903102127, |
|
"grad_norm": 12.086883544921875, |
|
"learning_rate": 3.310077519379845e-05, |
|
"loss": 2.9116, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.2269083304287207, |
|
"grad_norm": 11.90904712677002, |
|
"learning_rate": 3.2842377260981916e-05, |
|
"loss": 2.7052, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.240850470547229, |
|
"grad_norm": 13.168009757995605, |
|
"learning_rate": 3.258397932816537e-05, |
|
"loss": 2.5401, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.2547926106657372, |
|
"grad_norm": 13.133540153503418, |
|
"learning_rate": 3.2325581395348836e-05, |
|
"loss": 3.089, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.2687347507842452, |
|
"grad_norm": 10.270415306091309, |
|
"learning_rate": 3.20671834625323e-05, |
|
"loss": 2.5938, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.2826768909027535, |
|
"grad_norm": 13.742632865905762, |
|
"learning_rate": 3.180878552971576e-05, |
|
"loss": 2.6841, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.2966190310212617, |
|
"grad_norm": 10.14291000366211, |
|
"learning_rate": 3.1550387596899225e-05, |
|
"loss": 2.5288, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.31056117113977, |
|
"grad_norm": 11.036438941955566, |
|
"learning_rate": 3.129198966408269e-05, |
|
"loss": 2.8172, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.3245033112582782, |
|
"grad_norm": 11.163543701171875, |
|
"learning_rate": 3.103359173126615e-05, |
|
"loss": 2.8402, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.3384454513767863, |
|
"grad_norm": 16.406246185302734, |
|
"learning_rate": 3.0775193798449615e-05, |
|
"loss": 2.8161, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.3523875914952945, |
|
"grad_norm": 11.284817695617676, |
|
"learning_rate": 3.0516795865633074e-05, |
|
"loss": 2.448, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.3663297316138028, |
|
"grad_norm": 12.130209922790527, |
|
"learning_rate": 3.025839793281654e-05, |
|
"loss": 2.8401, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.3802718717323108, |
|
"grad_norm": 11.599443435668945, |
|
"learning_rate": 3e-05, |
|
"loss": 2.6025, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.394214011850819, |
|
"grad_norm": 11.705489158630371, |
|
"learning_rate": 2.9741602067183467e-05, |
|
"loss": 2.6507, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.4081561519693273, |
|
"grad_norm": 11.031926155090332, |
|
"learning_rate": 2.9483204134366927e-05, |
|
"loss": 2.8557, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.4220982920878356, |
|
"grad_norm": 13.046774864196777, |
|
"learning_rate": 2.9224806201550387e-05, |
|
"loss": 2.972, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.4360404322063438, |
|
"grad_norm": 12.700828552246094, |
|
"learning_rate": 2.8966408268733853e-05, |
|
"loss": 2.7807, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.4499825723248518, |
|
"grad_norm": 15.926816940307617, |
|
"learning_rate": 2.8708010335917313e-05, |
|
"loss": 2.5443, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.46392471244336, |
|
"grad_norm": 14.657876968383789, |
|
"learning_rate": 2.844961240310078e-05, |
|
"loss": 2.8499, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.4778668525618683, |
|
"grad_norm": 12.2283935546875, |
|
"learning_rate": 2.819121447028424e-05, |
|
"loss": 2.6669, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.4918089926803764, |
|
"grad_norm": 14.132109642028809, |
|
"learning_rate": 2.7932816537467703e-05, |
|
"loss": 2.75, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.5057511327988846, |
|
"grad_norm": 15.497398376464844, |
|
"learning_rate": 2.7674418604651166e-05, |
|
"loss": 2.6658, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.5196932729173929, |
|
"grad_norm": 39.895477294921875, |
|
"learning_rate": 2.7416020671834625e-05, |
|
"loss": 2.8124, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.533635413035901, |
|
"grad_norm": 12.836750030517578, |
|
"learning_rate": 2.7157622739018092e-05, |
|
"loss": 2.7151, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.5475775531544091, |
|
"grad_norm": 12.65983772277832, |
|
"learning_rate": 2.6899224806201552e-05, |
|
"loss": 2.5187, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.5615196932729174, |
|
"grad_norm": 12.410466194152832, |
|
"learning_rate": 2.6640826873385015e-05, |
|
"loss": 2.7021, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.5754618333914254, |
|
"grad_norm": 16.45282554626465, |
|
"learning_rate": 2.6382428940568478e-05, |
|
"loss": 2.8277, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.589403973509934, |
|
"grad_norm": 12.401958465576172, |
|
"learning_rate": 2.6124031007751938e-05, |
|
"loss": 2.6465, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.603346113628442, |
|
"grad_norm": 13.451824188232422, |
|
"learning_rate": 2.58656330749354e-05, |
|
"loss": 2.7568, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.6172882537469502, |
|
"grad_norm": 12.007116317749023, |
|
"learning_rate": 2.5607235142118864e-05, |
|
"loss": 2.5732, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.6312303938654584, |
|
"grad_norm": 13.369595527648926, |
|
"learning_rate": 2.5348837209302327e-05, |
|
"loss": 2.5993, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.6451725339839665, |
|
"grad_norm": 13.079323768615723, |
|
"learning_rate": 2.5090439276485787e-05, |
|
"loss": 2.557, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.6591146741024747, |
|
"grad_norm": 12.571920394897461, |
|
"learning_rate": 2.483204134366925e-05, |
|
"loss": 2.5582, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.673056814220983, |
|
"grad_norm": 12.451131820678711, |
|
"learning_rate": 2.4573643410852713e-05, |
|
"loss": 2.485, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.686998954339491, |
|
"grad_norm": 11.426173210144043, |
|
"learning_rate": 2.4315245478036177e-05, |
|
"loss": 2.8228, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.7009410944579995, |
|
"grad_norm": 13.152283668518066, |
|
"learning_rate": 2.405684754521964e-05, |
|
"loss": 2.9743, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.7148832345765075, |
|
"grad_norm": 12.218315124511719, |
|
"learning_rate": 2.37984496124031e-05, |
|
"loss": 2.5617, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.7288253746950157, |
|
"grad_norm": 16.104169845581055, |
|
"learning_rate": 2.3540051679586563e-05, |
|
"loss": 2.5886, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.742767514813524, |
|
"grad_norm": 9.216697692871094, |
|
"learning_rate": 2.3281653746770026e-05, |
|
"loss": 2.2698, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.756709654932032, |
|
"grad_norm": 12.568550109863281, |
|
"learning_rate": 2.302325581395349e-05, |
|
"loss": 2.7008, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.7706517950505403, |
|
"grad_norm": 13.100058555603027, |
|
"learning_rate": 2.2764857881136952e-05, |
|
"loss": 2.5122, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.7845939351690485, |
|
"grad_norm": 14.60105037689209, |
|
"learning_rate": 2.2506459948320415e-05, |
|
"loss": 2.7869, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.7985360752875565, |
|
"grad_norm": 11.809743881225586, |
|
"learning_rate": 2.2248062015503875e-05, |
|
"loss": 3.0241, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.8124782154060648, |
|
"grad_norm": 11.19316291809082, |
|
"learning_rate": 2.1989664082687338e-05, |
|
"loss": 2.3842, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.826420355524573, |
|
"grad_norm": 11.623698234558105, |
|
"learning_rate": 2.17312661498708e-05, |
|
"loss": 2.5165, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.840362495643081, |
|
"grad_norm": 12.553630828857422, |
|
"learning_rate": 2.1472868217054264e-05, |
|
"loss": 2.6636, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.8543046357615895, |
|
"grad_norm": 16.92484474182129, |
|
"learning_rate": 2.1214470284237728e-05, |
|
"loss": 2.8168, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.8682467758800976, |
|
"grad_norm": 11.145873069763184, |
|
"learning_rate": 2.095607235142119e-05, |
|
"loss": 2.498, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 1.8821889159986058, |
|
"grad_norm": 13.681021690368652, |
|
"learning_rate": 2.069767441860465e-05, |
|
"loss": 2.5061, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.896131056117114, |
|
"grad_norm": 13.941627502441406, |
|
"learning_rate": 2.0439276485788114e-05, |
|
"loss": 2.5961, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 1.910073196235622, |
|
"grad_norm": 10.970531463623047, |
|
"learning_rate": 2.0180878552971577e-05, |
|
"loss": 2.5375, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 1.9240153363541304, |
|
"grad_norm": 11.011059761047363, |
|
"learning_rate": 1.992248062015504e-05, |
|
"loss": 2.6054, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.9379574764726386, |
|
"grad_norm": 11.878082275390625, |
|
"learning_rate": 1.9664082687338503e-05, |
|
"loss": 2.487, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 1.9518996165911466, |
|
"grad_norm": 12.48755931854248, |
|
"learning_rate": 1.9405684754521966e-05, |
|
"loss": 2.8289, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.9658417567096549, |
|
"grad_norm": 14.737470626831055, |
|
"learning_rate": 1.9147286821705426e-05, |
|
"loss": 2.5594, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 1.9797838968281631, |
|
"grad_norm": 11.880918502807617, |
|
"learning_rate": 1.888888888888889e-05, |
|
"loss": 2.4736, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 1.9937260369466712, |
|
"grad_norm": 14.554669380187988, |
|
"learning_rate": 1.8630490956072352e-05, |
|
"loss": 2.6442, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 1.9993028929940746, |
|
"eval_accuracy": 0.8597058823529412, |
|
"eval_loss": 0.5420134663581848, |
|
"eval_runtime": 226.1399, |
|
"eval_samples_per_second": 45.105, |
|
"eval_steps_per_second": 1.411, |
|
"step": 1434 |
|
}, |
|
{ |
|
"epoch": 2.0076681770651796, |
|
"grad_norm": 11.852781295776367, |
|
"learning_rate": 1.8372093023255815e-05, |
|
"loss": 2.2307, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.0216103171836877, |
|
"grad_norm": 12.653249740600586, |
|
"learning_rate": 1.811369509043928e-05, |
|
"loss": 2.3448, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.0355524573021957, |
|
"grad_norm": 15.625718116760254, |
|
"learning_rate": 1.7855297157622742e-05, |
|
"loss": 2.4701, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.049494597420704, |
|
"grad_norm": 10.152451515197754, |
|
"learning_rate": 1.75968992248062e-05, |
|
"loss": 2.2279, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.063436737539212, |
|
"grad_norm": 11.112920761108398, |
|
"learning_rate": 1.7338501291989665e-05, |
|
"loss": 2.5778, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.0773788776577207, |
|
"grad_norm": 13.829360008239746, |
|
"learning_rate": 1.7080103359173128e-05, |
|
"loss": 2.2665, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.0913210177762287, |
|
"grad_norm": 12.493084907531738, |
|
"learning_rate": 1.682170542635659e-05, |
|
"loss": 2.5002, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.1052631578947367, |
|
"grad_norm": 11.931866645812988, |
|
"learning_rate": 1.6563307493540054e-05, |
|
"loss": 2.3208, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.119205298013245, |
|
"grad_norm": 10.55782413482666, |
|
"learning_rate": 1.6304909560723514e-05, |
|
"loss": 2.2226, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.1331474381317532, |
|
"grad_norm": 14.521842002868652, |
|
"learning_rate": 1.6046511627906977e-05, |
|
"loss": 2.1835, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.1470895782502613, |
|
"grad_norm": 12.054328918457031, |
|
"learning_rate": 1.578811369509044e-05, |
|
"loss": 2.3775, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.1610317183687697, |
|
"grad_norm": 15.088787078857422, |
|
"learning_rate": 1.5529715762273903e-05, |
|
"loss": 2.4388, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.1749738584872778, |
|
"grad_norm": 13.146328926086426, |
|
"learning_rate": 1.5271317829457366e-05, |
|
"loss": 2.5277, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.188915998605786, |
|
"grad_norm": 12.280598640441895, |
|
"learning_rate": 1.5012919896640828e-05, |
|
"loss": 2.3422, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.2028581387242943, |
|
"grad_norm": 12.004185676574707, |
|
"learning_rate": 1.475452196382429e-05, |
|
"loss": 2.4908, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.2168002788428023, |
|
"grad_norm": 10.80855941772461, |
|
"learning_rate": 1.4496124031007753e-05, |
|
"loss": 2.357, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.2307424189613108, |
|
"grad_norm": 11.332206726074219, |
|
"learning_rate": 1.4237726098191214e-05, |
|
"loss": 2.2628, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.244684559079819, |
|
"grad_norm": 13.836607933044434, |
|
"learning_rate": 1.3979328165374677e-05, |
|
"loss": 2.1193, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.258626699198327, |
|
"grad_norm": 11.600666999816895, |
|
"learning_rate": 1.372093023255814e-05, |
|
"loss": 2.4166, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.2725688393168353, |
|
"grad_norm": 14.399880409240723, |
|
"learning_rate": 1.3462532299741603e-05, |
|
"loss": 2.2878, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.2865109794353433, |
|
"grad_norm": 16.899677276611328, |
|
"learning_rate": 1.3204134366925063e-05, |
|
"loss": 2.26, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.3004531195538513, |
|
"grad_norm": 12.676505088806152, |
|
"learning_rate": 1.2945736434108526e-05, |
|
"loss": 2.5939, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.31439525967236, |
|
"grad_norm": 11.750486373901367, |
|
"learning_rate": 1.268733850129199e-05, |
|
"loss": 2.2246, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.328337399790868, |
|
"grad_norm": 14.333374977111816, |
|
"learning_rate": 1.2428940568475453e-05, |
|
"loss": 2.4163, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.3422795399093763, |
|
"grad_norm": 12.187148094177246, |
|
"learning_rate": 1.2170542635658916e-05, |
|
"loss": 2.3381, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.3562216800278843, |
|
"grad_norm": 10.980198860168457, |
|
"learning_rate": 1.1912144702842377e-05, |
|
"loss": 2.3502, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.3701638201463924, |
|
"grad_norm": 13.697635650634766, |
|
"learning_rate": 1.165374677002584e-05, |
|
"loss": 2.2907, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.384105960264901, |
|
"grad_norm": 12.195317268371582, |
|
"learning_rate": 1.1395348837209304e-05, |
|
"loss": 2.2207, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.398048100383409, |
|
"grad_norm": 39.64952850341797, |
|
"learning_rate": 1.1136950904392765e-05, |
|
"loss": 2.2268, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.411990240501917, |
|
"grad_norm": 11.313281059265137, |
|
"learning_rate": 1.0878552971576228e-05, |
|
"loss": 2.2816, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.4259323806204254, |
|
"grad_norm": 13.669633865356445, |
|
"learning_rate": 1.0620155038759691e-05, |
|
"loss": 2.4572, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.4398745207389334, |
|
"grad_norm": 13.479631423950195, |
|
"learning_rate": 1.0361757105943153e-05, |
|
"loss": 2.094, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.4538166608574414, |
|
"grad_norm": 10.151293754577637, |
|
"learning_rate": 1.0103359173126616e-05, |
|
"loss": 2.0609, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 2.46775880097595, |
|
"grad_norm": 14.358535766601562, |
|
"learning_rate": 9.844961240310079e-06, |
|
"loss": 2.4335, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 2.481700941094458, |
|
"grad_norm": 14.903388023376465, |
|
"learning_rate": 9.58656330749354e-06, |
|
"loss": 2.2056, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 2.4956430812129664, |
|
"grad_norm": 12.196182250976562, |
|
"learning_rate": 9.328165374677004e-06, |
|
"loss": 2.358, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 2.5095852213314744, |
|
"grad_norm": 10.977519989013672, |
|
"learning_rate": 9.069767441860467e-06, |
|
"loss": 2.1615, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 2.5235273614499825, |
|
"grad_norm": 10.606942176818848, |
|
"learning_rate": 8.811369509043928e-06, |
|
"loss": 2.1802, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 2.5374695015684905, |
|
"grad_norm": 11.948677062988281, |
|
"learning_rate": 8.552971576227391e-06, |
|
"loss": 2.3221, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 2.551411641686999, |
|
"grad_norm": 8.935708999633789, |
|
"learning_rate": 8.294573643410853e-06, |
|
"loss": 2.0471, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 2.565353781805507, |
|
"grad_norm": 11.44541072845459, |
|
"learning_rate": 8.036175710594316e-06, |
|
"loss": 2.1508, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 2.5792959219240155, |
|
"grad_norm": 12.39538288116455, |
|
"learning_rate": 7.777777777777777e-06, |
|
"loss": 2.253, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.5932380620425235, |
|
"grad_norm": 16.37424087524414, |
|
"learning_rate": 7.5193798449612415e-06, |
|
"loss": 2.2232, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 2.6071802021610315, |
|
"grad_norm": 11.820206642150879, |
|
"learning_rate": 7.260981912144703e-06, |
|
"loss": 2.4425, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 2.62112234227954, |
|
"grad_norm": 12.931533813476562, |
|
"learning_rate": 7.002583979328166e-06, |
|
"loss": 2.2895, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 2.635064482398048, |
|
"grad_norm": 13.43673038482666, |
|
"learning_rate": 6.744186046511629e-06, |
|
"loss": 2.2439, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 2.6490066225165565, |
|
"grad_norm": 10.057219505310059, |
|
"learning_rate": 6.485788113695091e-06, |
|
"loss": 2.2317, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.6629487626350645, |
|
"grad_norm": 15.320812225341797, |
|
"learning_rate": 6.227390180878554e-06, |
|
"loss": 2.3445, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 2.6768909027535726, |
|
"grad_norm": 13.997590065002441, |
|
"learning_rate": 5.968992248062015e-06, |
|
"loss": 2.1295, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 2.690833042872081, |
|
"grad_norm": 10.897136688232422, |
|
"learning_rate": 5.710594315245478e-06, |
|
"loss": 2.0328, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 2.704775182990589, |
|
"grad_norm": 11.251324653625488, |
|
"learning_rate": 5.452196382428941e-06, |
|
"loss": 2.2964, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 2.718717323109097, |
|
"grad_norm": 12.688488960266113, |
|
"learning_rate": 5.193798449612403e-06, |
|
"loss": 2.585, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.7326594632276056, |
|
"grad_norm": 15.246435165405273, |
|
"learning_rate": 4.935400516795865e-06, |
|
"loss": 2.4457, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 2.7466016033461136, |
|
"grad_norm": 11.022492408752441, |
|
"learning_rate": 4.6770025839793285e-06, |
|
"loss": 1.9957, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 2.7605437434646216, |
|
"grad_norm": 15.17203426361084, |
|
"learning_rate": 4.418604651162791e-06, |
|
"loss": 2.3373, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 2.77448588358313, |
|
"grad_norm": 13.542671203613281, |
|
"learning_rate": 4.160206718346253e-06, |
|
"loss": 2.392, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 2.788428023701638, |
|
"grad_norm": 15.951026916503906, |
|
"learning_rate": 3.901808785529716e-06, |
|
"loss": 1.9829, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.8023701638201466, |
|
"grad_norm": 13.995731353759766, |
|
"learning_rate": 3.6434108527131786e-06, |
|
"loss": 2.4399, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 2.8163123039386546, |
|
"grad_norm": 11.442358016967773, |
|
"learning_rate": 3.385012919896641e-06, |
|
"loss": 2.2327, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 2.8302544440571626, |
|
"grad_norm": 13.158218383789062, |
|
"learning_rate": 3.1266149870801036e-06, |
|
"loss": 2.1585, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 2.844196584175671, |
|
"grad_norm": 14.946197509765625, |
|
"learning_rate": 2.868217054263566e-06, |
|
"loss": 2.2202, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 2.858138724294179, |
|
"grad_norm": 13.085625648498535, |
|
"learning_rate": 2.6098191214470286e-06, |
|
"loss": 2.3849, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.8720808644126876, |
|
"grad_norm": 11.152565002441406, |
|
"learning_rate": 2.351421188630491e-06, |
|
"loss": 2.3714, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 2.8860230045311956, |
|
"grad_norm": 13.09467601776123, |
|
"learning_rate": 2.0930232558139536e-06, |
|
"loss": 2.1876, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 2.8999651446497037, |
|
"grad_norm": 17.58858871459961, |
|
"learning_rate": 1.8346253229974162e-06, |
|
"loss": 2.2959, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 2.9139072847682117, |
|
"grad_norm": 16.1025447845459, |
|
"learning_rate": 1.5762273901808785e-06, |
|
"loss": 2.3838, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 2.92784942488672, |
|
"grad_norm": 13.546030044555664, |
|
"learning_rate": 1.3178294573643412e-06, |
|
"loss": 2.14, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.941791565005228, |
|
"grad_norm": 12.100831985473633, |
|
"learning_rate": 1.0594315245478035e-06, |
|
"loss": 2.0447, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 2.9557337051237367, |
|
"grad_norm": 12.555295944213867, |
|
"learning_rate": 8.010335917312662e-07, |
|
"loss": 2.4814, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 2.9696758452422447, |
|
"grad_norm": 11.319598197937012, |
|
"learning_rate": 5.426356589147287e-07, |
|
"loss": 2.3802, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 2.9836179853607527, |
|
"grad_norm": 10.149794578552246, |
|
"learning_rate": 2.842377260981912e-07, |
|
"loss": 2.3564, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 2.997560125479261, |
|
"grad_norm": 13.360361099243164, |
|
"learning_rate": 2.5839793281653747e-08, |
|
"loss": 2.1182, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.998954339491112, |
|
"eval_accuracy": 0.8684313725490196, |
|
"eval_loss": 0.5095656514167786, |
|
"eval_runtime": 245.9211, |
|
"eval_samples_per_second": 41.477, |
|
"eval_steps_per_second": 1.297, |
|
"step": 2151 |
|
}, |
|
{ |
|
"epoch": 2.998954339491112, |
|
"step": 2151, |
|
"total_flos": 2.1353596059581743e+19, |
|
"train_loss": 4.144423897296315, |
|
"train_runtime": 8280.2172, |
|
"train_samples_per_second": 33.26, |
|
"train_steps_per_second": 0.26 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2151, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.1353596059581743e+19, |
|
"train_batch_size": 32, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|