|
{
|
|
"best_metric": 0.8529411764705882,
|
|
"best_model_checkpoint": "videomae-base-finetuned-ucf101-subset-hand\\checkpoint-1500",
|
|
"epoch": 177.004375,
|
|
"eval_steps": 500,
|
|
"global_step": 1600,
|
|
"is_hyper_param_search": false,
|
|
"is_local_process_zero": true,
|
|
"is_world_process_zero": true,
|
|
"log_history": [
|
|
{
|
|
"epoch": 1.000625,
|
|
"grad_norm": 15.27369499206543,
|
|
"learning_rate": 3.125e-06,
|
|
"loss": 1.689,
|
|
"step": 10
|
|
},
|
|
{
|
|
"epoch": 2.00125,
|
|
"grad_norm": 12.487483024597168,
|
|
"learning_rate": 6.25e-06,
|
|
"loss": 1.6397,
|
|
"step": 20
|
|
},
|
|
{
|
|
"epoch": 3.001875,
|
|
"grad_norm": 21.91889762878418,
|
|
"learning_rate": 9.375000000000001e-06,
|
|
"loss": 1.6462,
|
|
"step": 30
|
|
},
|
|
{
|
|
"epoch": 4.0025,
|
|
"grad_norm": 10.232108116149902,
|
|
"learning_rate": 1.25e-05,
|
|
"loss": 1.6326,
|
|
"step": 40
|
|
},
|
|
{
|
|
"epoch": 5.003125,
|
|
"grad_norm": 15.612605094909668,
|
|
"learning_rate": 1.5625e-05,
|
|
"loss": 1.5915,
|
|
"step": 50
|
|
},
|
|
{
|
|
"epoch": 6.00375,
|
|
"grad_norm": 16.83026123046875,
|
|
"learning_rate": 1.8750000000000002e-05,
|
|
"loss": 1.6447,
|
|
"step": 60
|
|
},
|
|
{
|
|
"epoch": 7.004375,
|
|
"grad_norm": 11.247737884521484,
|
|
"learning_rate": 2.1875e-05,
|
|
"loss": 1.6781,
|
|
"step": 70
|
|
},
|
|
{
|
|
"epoch": 8.005,
|
|
"grad_norm": 10.558433532714844,
|
|
"learning_rate": 2.5e-05,
|
|
"loss": 1.6657,
|
|
"step": 80
|
|
},
|
|
{
|
|
"epoch": 9.005625,
|
|
"grad_norm": 20.691017150878906,
|
|
"learning_rate": 2.8125000000000003e-05,
|
|
"loss": 1.709,
|
|
"step": 90
|
|
},
|
|
{
|
|
"epoch": 11.000625,
|
|
"grad_norm": 11.228839874267578,
|
|
"learning_rate": 3.125e-05,
|
|
"loss": 1.6518,
|
|
"step": 100
|
|
},
|
|
{
|
|
"epoch": 12.00125,
|
|
"grad_norm": 12.333444595336914,
|
|
"learning_rate": 3.4375e-05,
|
|
"loss": 1.6348,
|
|
"step": 110
|
|
},
|
|
{
|
|
"epoch": 13.001875,
|
|
"grad_norm": 8.79792308807373,
|
|
"learning_rate": 3.7500000000000003e-05,
|
|
"loss": 1.7793,
|
|
"step": 120
|
|
},
|
|
{
|
|
"epoch": 14.0025,
|
|
"grad_norm": 11.819356918334961,
|
|
"learning_rate": 4.0625000000000005e-05,
|
|
"loss": 1.7018,
|
|
"step": 130
|
|
},
|
|
{
|
|
"epoch": 15.003125,
|
|
"grad_norm": 10.883867263793945,
|
|
"learning_rate": 4.375e-05,
|
|
"loss": 1.6195,
|
|
"step": 140
|
|
},
|
|
{
|
|
"epoch": 16.00375,
|
|
"grad_norm": 11.140106201171875,
|
|
"learning_rate": 4.6875e-05,
|
|
"loss": 1.6123,
|
|
"step": 150
|
|
},
|
|
{
|
|
"epoch": 17.004375,
|
|
"grad_norm": 9.668269157409668,
|
|
"learning_rate": 5e-05,
|
|
"loss": 1.7115,
|
|
"step": 160
|
|
},
|
|
{
|
|
"epoch": 18.005,
|
|
"grad_norm": 7.844611167907715,
|
|
"learning_rate": 4.965277777777778e-05,
|
|
"loss": 1.6477,
|
|
"step": 170
|
|
},
|
|
{
|
|
"epoch": 19.005625,
|
|
"grad_norm": 15.249470710754395,
|
|
"learning_rate": 4.930555555555556e-05,
|
|
"loss": 1.6387,
|
|
"step": 180
|
|
},
|
|
{
|
|
"epoch": 21.000625,
|
|
"grad_norm": 7.852767467498779,
|
|
"learning_rate": 4.8958333333333335e-05,
|
|
"loss": 1.6663,
|
|
"step": 190
|
|
},
|
|
{
|
|
"epoch": 22.00125,
|
|
"grad_norm": 10.084273338317871,
|
|
"learning_rate": 4.8611111111111115e-05,
|
|
"loss": 1.6515,
|
|
"step": 200
|
|
},
|
|
{
|
|
"epoch": 23.001875,
|
|
"grad_norm": 6.365440368652344,
|
|
"learning_rate": 4.8263888888888895e-05,
|
|
"loss": 1.6323,
|
|
"step": 210
|
|
},
|
|
{
|
|
"epoch": 24.0025,
|
|
"grad_norm": 7.19619607925415,
|
|
"learning_rate": 4.791666666666667e-05,
|
|
"loss": 1.6334,
|
|
"step": 220
|
|
},
|
|
{
|
|
"epoch": 25.003125,
|
|
"grad_norm": 9.672527313232422,
|
|
"learning_rate": 4.756944444444444e-05,
|
|
"loss": 1.6093,
|
|
"step": 230
|
|
},
|
|
{
|
|
"epoch": 26.00375,
|
|
"grad_norm": 6.754970073699951,
|
|
"learning_rate": 4.722222222222222e-05,
|
|
"loss": 1.6237,
|
|
"step": 240
|
|
},
|
|
{
|
|
"epoch": 27.004375,
|
|
"grad_norm": 8.783395767211914,
|
|
"learning_rate": 4.6875e-05,
|
|
"loss": 1.5612,
|
|
"step": 250
|
|
},
|
|
{
|
|
"epoch": 28.005,
|
|
"grad_norm": 9.81071949005127,
|
|
"learning_rate": 4.652777777777778e-05,
|
|
"loss": 1.6906,
|
|
"step": 260
|
|
},
|
|
{
|
|
"epoch": 29.005625,
|
|
"grad_norm": 14.682830810546875,
|
|
"learning_rate": 4.618055555555556e-05,
|
|
"loss": 1.5813,
|
|
"step": 270
|
|
},
|
|
{
|
|
"epoch": 31.000625,
|
|
"grad_norm": 6.165945529937744,
|
|
"learning_rate": 4.5833333333333334e-05,
|
|
"loss": 1.6764,
|
|
"step": 280
|
|
},
|
|
{
|
|
"epoch": 32.00125,
|
|
"grad_norm": 10.008849143981934,
|
|
"learning_rate": 4.5486111111111114e-05,
|
|
"loss": 1.657,
|
|
"step": 290
|
|
},
|
|
{
|
|
"epoch": 33.001875,
|
|
"grad_norm": 8.975122451782227,
|
|
"learning_rate": 4.5138888888888894e-05,
|
|
"loss": 1.4942,
|
|
"step": 300
|
|
},
|
|
{
|
|
"epoch": 34.0025,
|
|
"grad_norm": 9.013062477111816,
|
|
"learning_rate": 4.4791666666666673e-05,
|
|
"loss": 1.5295,
|
|
"step": 310
|
|
},
|
|
{
|
|
"epoch": 35.003125,
|
|
"grad_norm": 5.317986965179443,
|
|
"learning_rate": 4.4444444444444447e-05,
|
|
"loss": 1.4473,
|
|
"step": 320
|
|
},
|
|
{
|
|
"epoch": 36.00375,
|
|
"grad_norm": 6.352290153503418,
|
|
"learning_rate": 4.4097222222222226e-05,
|
|
"loss": 1.378,
|
|
"step": 330
|
|
},
|
|
{
|
|
"epoch": 37.004375,
|
|
"grad_norm": 6.145012378692627,
|
|
"learning_rate": 4.375e-05,
|
|
"loss": 1.2513,
|
|
"step": 340
|
|
},
|
|
{
|
|
"epoch": 38.005,
|
|
"grad_norm": 9.207514762878418,
|
|
"learning_rate": 4.340277777777778e-05,
|
|
"loss": 1.7593,
|
|
"step": 350
|
|
},
|
|
{
|
|
"epoch": 39.005625,
|
|
"grad_norm": 33.3981819152832,
|
|
"learning_rate": 4.305555555555556e-05,
|
|
"loss": 1.5538,
|
|
"step": 360
|
|
},
|
|
{
|
|
"epoch": 41.000625,
|
|
"grad_norm": 8.981507301330566,
|
|
"learning_rate": 4.270833333333333e-05,
|
|
"loss": 1.3785,
|
|
"step": 370
|
|
},
|
|
{
|
|
"epoch": 42.00125,
|
|
"grad_norm": 9.220932006835938,
|
|
"learning_rate": 4.236111111111111e-05,
|
|
"loss": 1.4187,
|
|
"step": 380
|
|
},
|
|
{
|
|
"epoch": 43.001875,
|
|
"grad_norm": 6.985467433929443,
|
|
"learning_rate": 4.201388888888889e-05,
|
|
"loss": 1.3837,
|
|
"step": 390
|
|
},
|
|
{
|
|
"epoch": 44.0025,
|
|
"grad_norm": 14.327313423156738,
|
|
"learning_rate": 4.166666666666667e-05,
|
|
"loss": 1.5855,
|
|
"step": 400
|
|
},
|
|
{
|
|
"epoch": 45.003125,
|
|
"grad_norm": 5.2364821434021,
|
|
"learning_rate": 4.1319444444444445e-05,
|
|
"loss": 1.5322,
|
|
"step": 410
|
|
},
|
|
{
|
|
"epoch": 46.00375,
|
|
"grad_norm": 11.791383743286133,
|
|
"learning_rate": 4.0972222222222225e-05,
|
|
"loss": 1.2774,
|
|
"step": 420
|
|
},
|
|
{
|
|
"epoch": 47.004375,
|
|
"grad_norm": 14.766942977905273,
|
|
"learning_rate": 4.0625000000000005e-05,
|
|
"loss": 1.5675,
|
|
"step": 430
|
|
},
|
|
{
|
|
"epoch": 48.005,
|
|
"grad_norm": 15.439301490783691,
|
|
"learning_rate": 4.027777777777778e-05,
|
|
"loss": 1.2914,
|
|
"step": 440
|
|
},
|
|
{
|
|
"epoch": 49.005625,
|
|
"grad_norm": 18.859539031982422,
|
|
"learning_rate": 3.993055555555556e-05,
|
|
"loss": 1.4614,
|
|
"step": 450
|
|
},
|
|
{
|
|
"epoch": 51.000625,
|
|
"grad_norm": 6.83065128326416,
|
|
"learning_rate": 3.958333333333333e-05,
|
|
"loss": 1.4297,
|
|
"step": 460
|
|
},
|
|
{
|
|
"epoch": 52.00125,
|
|
"grad_norm": 7.136291027069092,
|
|
"learning_rate": 3.923611111111111e-05,
|
|
"loss": 1.4145,
|
|
"step": 470
|
|
},
|
|
{
|
|
"epoch": 53.001875,
|
|
"grad_norm": 6.295926094055176,
|
|
"learning_rate": 3.888888888888889e-05,
|
|
"loss": 1.2577,
|
|
"step": 480
|
|
},
|
|
{
|
|
"epoch": 54.0025,
|
|
"grad_norm": 5.792518615722656,
|
|
"learning_rate": 3.854166666666667e-05,
|
|
"loss": 1.0809,
|
|
"step": 490
|
|
},
|
|
{
|
|
"epoch": 55.003125,
|
|
"grad_norm": 14.563287734985352,
|
|
"learning_rate": 3.8194444444444444e-05,
|
|
"loss": 0.9168,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 55.003125,
|
|
"eval_accuracy": 0.5294117647058824,
|
|
"eval_loss": 1.1491186618804932,
|
|
"eval_runtime": 6.3087,
|
|
"eval_samples_per_second": 5.389,
|
|
"eval_steps_per_second": 1.902,
|
|
"step": 500
|
|
},
|
|
{
|
|
"epoch": 56.00375,
|
|
"grad_norm": 13.170215606689453,
|
|
"learning_rate": 3.7847222222222224e-05,
|
|
"loss": 1.3475,
|
|
"step": 510
|
|
},
|
|
{
|
|
"epoch": 57.004375,
|
|
"grad_norm": 14.003524780273438,
|
|
"learning_rate": 3.7500000000000003e-05,
|
|
"loss": 1.1146,
|
|
"step": 520
|
|
},
|
|
{
|
|
"epoch": 58.005,
|
|
"grad_norm": 57.568973541259766,
|
|
"learning_rate": 3.715277777777778e-05,
|
|
"loss": 1.0694,
|
|
"step": 530
|
|
},
|
|
{
|
|
"epoch": 59.005625,
|
|
"grad_norm": 18.669395446777344,
|
|
"learning_rate": 3.6805555555555556e-05,
|
|
"loss": 1.1377,
|
|
"step": 540
|
|
},
|
|
{
|
|
"epoch": 61.000625,
|
|
"grad_norm": 8.16624927520752,
|
|
"learning_rate": 3.6458333333333336e-05,
|
|
"loss": 1.0386,
|
|
"step": 550
|
|
},
|
|
{
|
|
"epoch": 62.00125,
|
|
"grad_norm": 2.5847628116607666,
|
|
"learning_rate": 3.611111111111111e-05,
|
|
"loss": 0.9063,
|
|
"step": 560
|
|
},
|
|
{
|
|
"epoch": 63.001875,
|
|
"grad_norm": 18.86676788330078,
|
|
"learning_rate": 3.576388888888889e-05,
|
|
"loss": 0.7986,
|
|
"step": 570
|
|
},
|
|
{
|
|
"epoch": 64.0025,
|
|
"grad_norm": 3.9623453617095947,
|
|
"learning_rate": 3.541666666666667e-05,
|
|
"loss": 0.6622,
|
|
"step": 580
|
|
},
|
|
{
|
|
"epoch": 65.003125,
|
|
"grad_norm": 2.4036800861358643,
|
|
"learning_rate": 3.506944444444444e-05,
|
|
"loss": 0.6984,
|
|
"step": 590
|
|
},
|
|
{
|
|
"epoch": 66.00375,
|
|
"grad_norm": 10.653820037841797,
|
|
"learning_rate": 3.472222222222222e-05,
|
|
"loss": 0.9318,
|
|
"step": 600
|
|
},
|
|
{
|
|
"epoch": 67.004375,
|
|
"grad_norm": 10.026162147521973,
|
|
"learning_rate": 3.4375e-05,
|
|
"loss": 1.4336,
|
|
"step": 610
|
|
},
|
|
{
|
|
"epoch": 68.005,
|
|
"grad_norm": 17.375564575195312,
|
|
"learning_rate": 3.402777777777778e-05,
|
|
"loss": 0.7662,
|
|
"step": 620
|
|
},
|
|
{
|
|
"epoch": 69.005625,
|
|
"grad_norm": 1.6536837816238403,
|
|
"learning_rate": 3.368055555555556e-05,
|
|
"loss": 0.6028,
|
|
"step": 630
|
|
},
|
|
{
|
|
"epoch": 71.000625,
|
|
"grad_norm": 3.789870262145996,
|
|
"learning_rate": 3.3333333333333335e-05,
|
|
"loss": 0.4272,
|
|
"step": 640
|
|
},
|
|
{
|
|
"epoch": 72.00125,
|
|
"grad_norm": 22.276479721069336,
|
|
"learning_rate": 3.2986111111111115e-05,
|
|
"loss": 1.0059,
|
|
"step": 650
|
|
},
|
|
{
|
|
"epoch": 73.001875,
|
|
"grad_norm": 10.886910438537598,
|
|
"learning_rate": 3.263888888888889e-05,
|
|
"loss": 0.4979,
|
|
"step": 660
|
|
},
|
|
{
|
|
"epoch": 74.0025,
|
|
"grad_norm": 14.757866859436035,
|
|
"learning_rate": 3.229166666666667e-05,
|
|
"loss": 1.3088,
|
|
"step": 670
|
|
},
|
|
{
|
|
"epoch": 75.003125,
|
|
"grad_norm": 6.947025775909424,
|
|
"learning_rate": 3.194444444444444e-05,
|
|
"loss": 0.6417,
|
|
"step": 680
|
|
},
|
|
{
|
|
"epoch": 76.00375,
|
|
"grad_norm": 10.190616607666016,
|
|
"learning_rate": 3.159722222222222e-05,
|
|
"loss": 0.5463,
|
|
"step": 690
|
|
},
|
|
{
|
|
"epoch": 77.004375,
|
|
"grad_norm": 12.709113121032715,
|
|
"learning_rate": 3.125e-05,
|
|
"loss": 0.5217,
|
|
"step": 700
|
|
},
|
|
{
|
|
"epoch": 78.005,
|
|
"grad_norm": 0.8149201273918152,
|
|
"learning_rate": 3.090277777777778e-05,
|
|
"loss": 0.6461,
|
|
"step": 710
|
|
},
|
|
{
|
|
"epoch": 79.005625,
|
|
"grad_norm": 0.7477602958679199,
|
|
"learning_rate": 3.055555555555556e-05,
|
|
"loss": 0.3186,
|
|
"step": 720
|
|
},
|
|
{
|
|
"epoch": 81.000625,
|
|
"grad_norm": 20.247188568115234,
|
|
"learning_rate": 3.0208333333333334e-05,
|
|
"loss": 0.4566,
|
|
"step": 730
|
|
},
|
|
{
|
|
"epoch": 82.00125,
|
|
"grad_norm": 2.7397711277008057,
|
|
"learning_rate": 2.9861111111111113e-05,
|
|
"loss": 0.673,
|
|
"step": 740
|
|
},
|
|
{
|
|
"epoch": 83.001875,
|
|
"grad_norm": 0.6794953346252441,
|
|
"learning_rate": 2.951388888888889e-05,
|
|
"loss": 0.1364,
|
|
"step": 750
|
|
},
|
|
{
|
|
"epoch": 84.0025,
|
|
"grad_norm": 30.472972869873047,
|
|
"learning_rate": 2.916666666666667e-05,
|
|
"loss": 0.4537,
|
|
"step": 760
|
|
},
|
|
{
|
|
"epoch": 85.003125,
|
|
"grad_norm": 25.230920791625977,
|
|
"learning_rate": 2.8819444444444443e-05,
|
|
"loss": 0.5033,
|
|
"step": 770
|
|
},
|
|
{
|
|
"epoch": 86.00375,
|
|
"grad_norm": 12.282685279846191,
|
|
"learning_rate": 2.8472222222222223e-05,
|
|
"loss": 0.4521,
|
|
"step": 780
|
|
},
|
|
{
|
|
"epoch": 87.004375,
|
|
"grad_norm": 21.0340633392334,
|
|
"learning_rate": 2.8125000000000003e-05,
|
|
"loss": 0.4044,
|
|
"step": 790
|
|
},
|
|
{
|
|
"epoch": 88.005,
|
|
"grad_norm": 10.24821949005127,
|
|
"learning_rate": 2.777777777777778e-05,
|
|
"loss": 0.1864,
|
|
"step": 800
|
|
},
|
|
{
|
|
"epoch": 89.005625,
|
|
"grad_norm": 0.11952634900808334,
|
|
"learning_rate": 2.743055555555556e-05,
|
|
"loss": 0.0229,
|
|
"step": 810
|
|
},
|
|
{
|
|
"epoch": 91.000625,
|
|
"grad_norm": 116.37321472167969,
|
|
"learning_rate": 2.7083333333333332e-05,
|
|
"loss": 0.4937,
|
|
"step": 820
|
|
},
|
|
{
|
|
"epoch": 92.00125,
|
|
"grad_norm": 137.15133666992188,
|
|
"learning_rate": 2.6736111111111112e-05,
|
|
"loss": 0.4143,
|
|
"step": 830
|
|
},
|
|
{
|
|
"epoch": 93.001875,
|
|
"grad_norm": 0.07484932988882065,
|
|
"learning_rate": 2.6388888888888892e-05,
|
|
"loss": 0.1536,
|
|
"step": 840
|
|
},
|
|
{
|
|
"epoch": 94.0025,
|
|
"grad_norm": 6.392551422119141,
|
|
"learning_rate": 2.604166666666667e-05,
|
|
"loss": 0.1022,
|
|
"step": 850
|
|
},
|
|
{
|
|
"epoch": 95.003125,
|
|
"grad_norm": 0.13532483577728271,
|
|
"learning_rate": 2.5694444444444445e-05,
|
|
"loss": 0.508,
|
|
"step": 860
|
|
},
|
|
{
|
|
"epoch": 96.00375,
|
|
"grad_norm": 29.035808563232422,
|
|
"learning_rate": 2.534722222222222e-05,
|
|
"loss": 0.5411,
|
|
"step": 870
|
|
},
|
|
{
|
|
"epoch": 97.004375,
|
|
"grad_norm": 0.5214412808418274,
|
|
"learning_rate": 2.5e-05,
|
|
"loss": 0.2972,
|
|
"step": 880
|
|
},
|
|
{
|
|
"epoch": 98.005,
|
|
"grad_norm": 3.654513120651245,
|
|
"learning_rate": 2.465277777777778e-05,
|
|
"loss": 0.7897,
|
|
"step": 890
|
|
},
|
|
{
|
|
"epoch": 99.005625,
|
|
"grad_norm": 11.014652252197266,
|
|
"learning_rate": 2.4305555555555558e-05,
|
|
"loss": 0.3799,
|
|
"step": 900
|
|
},
|
|
{
|
|
"epoch": 101.000625,
|
|
"grad_norm": 0.1979626715183258,
|
|
"learning_rate": 2.3958333333333334e-05,
|
|
"loss": 0.0879,
|
|
"step": 910
|
|
},
|
|
{
|
|
"epoch": 102.00125,
|
|
"grad_norm": 56.55485153198242,
|
|
"learning_rate": 2.361111111111111e-05,
|
|
"loss": 0.1458,
|
|
"step": 920
|
|
},
|
|
{
|
|
"epoch": 103.001875,
|
|
"grad_norm": 0.8609234690666199,
|
|
"learning_rate": 2.326388888888889e-05,
|
|
"loss": 0.2159,
|
|
"step": 930
|
|
},
|
|
{
|
|
"epoch": 104.0025,
|
|
"grad_norm": 53.709442138671875,
|
|
"learning_rate": 2.2916666666666667e-05,
|
|
"loss": 0.6802,
|
|
"step": 940
|
|
},
|
|
{
|
|
"epoch": 105.003125,
|
|
"grad_norm": 0.06115364283323288,
|
|
"learning_rate": 2.2569444444444447e-05,
|
|
"loss": 0.0572,
|
|
"step": 950
|
|
},
|
|
{
|
|
"epoch": 106.00375,
|
|
"grad_norm": 0.18275491893291473,
|
|
"learning_rate": 2.2222222222222223e-05,
|
|
"loss": 0.0073,
|
|
"step": 960
|
|
},
|
|
{
|
|
"epoch": 107.004375,
|
|
"grad_norm": 0.8914717435836792,
|
|
"learning_rate": 2.1875e-05,
|
|
"loss": 0.043,
|
|
"step": 970
|
|
},
|
|
{
|
|
"epoch": 108.005,
|
|
"grad_norm": 0.08716151118278503,
|
|
"learning_rate": 2.152777777777778e-05,
|
|
"loss": 0.0249,
|
|
"step": 980
|
|
},
|
|
{
|
|
"epoch": 109.005625,
|
|
"grad_norm": 0.025017954409122467,
|
|
"learning_rate": 2.1180555555555556e-05,
|
|
"loss": 0.2096,
|
|
"step": 990
|
|
},
|
|
{
|
|
"epoch": 111.000625,
|
|
"grad_norm": 2.6935932636260986,
|
|
"learning_rate": 2.0833333333333336e-05,
|
|
"loss": 0.3827,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 111.000625,
|
|
"eval_accuracy": 0.8235294117647058,
|
|
"eval_loss": 0.7478592395782471,
|
|
"eval_runtime": 6.5367,
|
|
"eval_samples_per_second": 5.201,
|
|
"eval_steps_per_second": 1.836,
|
|
"step": 1000
|
|
},
|
|
{
|
|
"epoch": 112.00125,
|
|
"grad_norm": 54.73932647705078,
|
|
"learning_rate": 2.0486111111111113e-05,
|
|
"loss": 0.198,
|
|
"step": 1010
|
|
},
|
|
{
|
|
"epoch": 113.001875,
|
|
"grad_norm": 0.056511297821998596,
|
|
"learning_rate": 2.013888888888889e-05,
|
|
"loss": 0.3577,
|
|
"step": 1020
|
|
},
|
|
{
|
|
"epoch": 114.0025,
|
|
"grad_norm": 5.711917877197266,
|
|
"learning_rate": 1.9791666666666665e-05,
|
|
"loss": 0.0365,
|
|
"step": 1030
|
|
},
|
|
{
|
|
"epoch": 115.003125,
|
|
"grad_norm": 0.0672488734126091,
|
|
"learning_rate": 1.9444444444444445e-05,
|
|
"loss": 0.3771,
|
|
"step": 1040
|
|
},
|
|
{
|
|
"epoch": 116.00375,
|
|
"grad_norm": 137.31533813476562,
|
|
"learning_rate": 1.9097222222222222e-05,
|
|
"loss": 0.2123,
|
|
"step": 1050
|
|
},
|
|
{
|
|
"epoch": 117.004375,
|
|
"grad_norm": 31.655075073242188,
|
|
"learning_rate": 1.8750000000000002e-05,
|
|
"loss": 0.19,
|
|
"step": 1060
|
|
},
|
|
{
|
|
"epoch": 118.005,
|
|
"grad_norm": 0.03197183832526207,
|
|
"learning_rate": 1.8402777777777778e-05,
|
|
"loss": 0.5969,
|
|
"step": 1070
|
|
},
|
|
{
|
|
"epoch": 119.005625,
|
|
"grad_norm": 0.11024000495672226,
|
|
"learning_rate": 1.8055555555555555e-05,
|
|
"loss": 0.0148,
|
|
"step": 1080
|
|
},
|
|
{
|
|
"epoch": 121.000625,
|
|
"grad_norm": 0.566511332988739,
|
|
"learning_rate": 1.7708333333333335e-05,
|
|
"loss": 0.2468,
|
|
"step": 1090
|
|
},
|
|
{
|
|
"epoch": 122.00125,
|
|
"grad_norm": 0.12642063200473785,
|
|
"learning_rate": 1.736111111111111e-05,
|
|
"loss": 0.016,
|
|
"step": 1100
|
|
},
|
|
{
|
|
"epoch": 123.001875,
|
|
"grad_norm": 69.47509002685547,
|
|
"learning_rate": 1.701388888888889e-05,
|
|
"loss": 0.3782,
|
|
"step": 1110
|
|
},
|
|
{
|
|
"epoch": 124.0025,
|
|
"grad_norm": 8.828600883483887,
|
|
"learning_rate": 1.6666666666666667e-05,
|
|
"loss": 0.5294,
|
|
"step": 1120
|
|
},
|
|
{
|
|
"epoch": 125.003125,
|
|
"grad_norm": 0.020838657394051552,
|
|
"learning_rate": 1.6319444444444444e-05,
|
|
"loss": 0.0505,
|
|
"step": 1130
|
|
},
|
|
{
|
|
"epoch": 126.00375,
|
|
"grad_norm": 0.021230794489383698,
|
|
"learning_rate": 1.597222222222222e-05,
|
|
"loss": 0.075,
|
|
"step": 1140
|
|
},
|
|
{
|
|
"epoch": 127.004375,
|
|
"grad_norm": 0.053505346179008484,
|
|
"learning_rate": 1.5625e-05,
|
|
"loss": 0.0074,
|
|
"step": 1150
|
|
},
|
|
{
|
|
"epoch": 128.005,
|
|
"grad_norm": 0.09297660738229752,
|
|
"learning_rate": 1.527777777777778e-05,
|
|
"loss": 0.0192,
|
|
"step": 1160
|
|
},
|
|
{
|
|
"epoch": 129.005625,
|
|
"grad_norm": 144.81719970703125,
|
|
"learning_rate": 1.4930555555555557e-05,
|
|
"loss": 0.1825,
|
|
"step": 1170
|
|
},
|
|
{
|
|
"epoch": 131.000625,
|
|
"grad_norm": 0.07185923308134079,
|
|
"learning_rate": 1.4583333333333335e-05,
|
|
"loss": 0.0079,
|
|
"step": 1180
|
|
},
|
|
{
|
|
"epoch": 132.00125,
|
|
"grad_norm": 0.029854485765099525,
|
|
"learning_rate": 1.4236111111111111e-05,
|
|
"loss": 0.002,
|
|
"step": 1190
|
|
},
|
|
{
|
|
"epoch": 133.001875,
|
|
"grad_norm": 0.027790095657110214,
|
|
"learning_rate": 1.388888888888889e-05,
|
|
"loss": 0.0227,
|
|
"step": 1200
|
|
},
|
|
{
|
|
"epoch": 134.0025,
|
|
"grad_norm": 0.0227198526263237,
|
|
"learning_rate": 1.3541666666666666e-05,
|
|
"loss": 0.1834,
|
|
"step": 1210
|
|
},
|
|
{
|
|
"epoch": 135.003125,
|
|
"grad_norm": 0.016053209081292152,
|
|
"learning_rate": 1.3194444444444446e-05,
|
|
"loss": 0.0285,
|
|
"step": 1220
|
|
},
|
|
{
|
|
"epoch": 136.00375,
|
|
"grad_norm": 0.01930040493607521,
|
|
"learning_rate": 1.2847222222222222e-05,
|
|
"loss": 0.1674,
|
|
"step": 1230
|
|
},
|
|
{
|
|
"epoch": 137.004375,
|
|
"grad_norm": 0.019892092794179916,
|
|
"learning_rate": 1.25e-05,
|
|
"loss": 0.1771,
|
|
"step": 1240
|
|
},
|
|
{
|
|
"epoch": 138.005,
|
|
"grad_norm": 0.09665486216545105,
|
|
"learning_rate": 1.2152777777777779e-05,
|
|
"loss": 0.0505,
|
|
"step": 1250
|
|
},
|
|
{
|
|
"epoch": 139.005625,
|
|
"grad_norm": 0.0465998500585556,
|
|
"learning_rate": 1.1805555555555555e-05,
|
|
"loss": 0.0802,
|
|
"step": 1260
|
|
},
|
|
{
|
|
"epoch": 141.000625,
|
|
"grad_norm": 0.03291332721710205,
|
|
"learning_rate": 1.1458333333333333e-05,
|
|
"loss": 0.002,
|
|
"step": 1270
|
|
},
|
|
{
|
|
"epoch": 142.00125,
|
|
"grad_norm": 67.8191146850586,
|
|
"learning_rate": 1.1111111111111112e-05,
|
|
"loss": 0.1477,
|
|
"step": 1280
|
|
},
|
|
{
|
|
"epoch": 143.001875,
|
|
"grad_norm": 0.019737206399440765,
|
|
"learning_rate": 1.076388888888889e-05,
|
|
"loss": 0.1562,
|
|
"step": 1290
|
|
},
|
|
{
|
|
"epoch": 144.0025,
|
|
"grad_norm": 0.018836895003914833,
|
|
"learning_rate": 1.0416666666666668e-05,
|
|
"loss": 0.2501,
|
|
"step": 1300
|
|
},
|
|
{
|
|
"epoch": 145.003125,
|
|
"grad_norm": 0.024592338129878044,
|
|
"learning_rate": 1.0069444444444445e-05,
|
|
"loss": 0.1302,
|
|
"step": 1310
|
|
},
|
|
{
|
|
"epoch": 146.00375,
|
|
"grad_norm": 0.02780103124678135,
|
|
"learning_rate": 9.722222222222223e-06,
|
|
"loss": 0.0105,
|
|
"step": 1320
|
|
},
|
|
{
|
|
"epoch": 147.004375,
|
|
"grad_norm": 0.03460738807916641,
|
|
"learning_rate": 9.375000000000001e-06,
|
|
"loss": 0.0255,
|
|
"step": 1330
|
|
},
|
|
{
|
|
"epoch": 148.005,
|
|
"grad_norm": 0.025903472676873207,
|
|
"learning_rate": 9.027777777777777e-06,
|
|
"loss": 0.1031,
|
|
"step": 1340
|
|
},
|
|
{
|
|
"epoch": 149.005625,
|
|
"grad_norm": 0.03954395651817322,
|
|
"learning_rate": 8.680555555555556e-06,
|
|
"loss": 0.0041,
|
|
"step": 1350
|
|
},
|
|
{
|
|
"epoch": 151.000625,
|
|
"grad_norm": 0.04806604981422424,
|
|
"learning_rate": 8.333333333333334e-06,
|
|
"loss": 0.0054,
|
|
"step": 1360
|
|
},
|
|
{
|
|
"epoch": 152.00125,
|
|
"grad_norm": 51.491668701171875,
|
|
"learning_rate": 7.98611111111111e-06,
|
|
"loss": 0.1716,
|
|
"step": 1370
|
|
},
|
|
{
|
|
"epoch": 153.001875,
|
|
"grad_norm": 28.071439743041992,
|
|
"learning_rate": 7.63888888888889e-06,
|
|
"loss": 0.1991,
|
|
"step": 1380
|
|
},
|
|
{
|
|
"epoch": 154.0025,
|
|
"grad_norm": 5.893643379211426,
|
|
"learning_rate": 7.2916666666666674e-06,
|
|
"loss": 0.0053,
|
|
"step": 1390
|
|
},
|
|
{
|
|
"epoch": 155.003125,
|
|
"grad_norm": 44.624385833740234,
|
|
"learning_rate": 6.944444444444445e-06,
|
|
"loss": 0.0189,
|
|
"step": 1400
|
|
},
|
|
{
|
|
"epoch": 156.00375,
|
|
"grad_norm": 0.021997345611453056,
|
|
"learning_rate": 6.597222222222223e-06,
|
|
"loss": 0.0069,
|
|
"step": 1410
|
|
},
|
|
{
|
|
"epoch": 157.004375,
|
|
"grad_norm": 0.01736549474298954,
|
|
"learning_rate": 6.25e-06,
|
|
"loss": 0.0037,
|
|
"step": 1420
|
|
},
|
|
{
|
|
"epoch": 158.005,
|
|
"grad_norm": 49.95341491699219,
|
|
"learning_rate": 5.902777777777778e-06,
|
|
"loss": 0.2011,
|
|
"step": 1430
|
|
},
|
|
{
|
|
"epoch": 159.005625,
|
|
"grad_norm": 0.027713486924767494,
|
|
"learning_rate": 5.555555555555556e-06,
|
|
"loss": 0.0452,
|
|
"step": 1440
|
|
},
|
|
{
|
|
"epoch": 161.000625,
|
|
"grad_norm": 0.01983625814318657,
|
|
"learning_rate": 5.208333333333334e-06,
|
|
"loss": 0.147,
|
|
"step": 1450
|
|
},
|
|
{
|
|
"epoch": 162.00125,
|
|
"grad_norm": 0.021740276366472244,
|
|
"learning_rate": 4.861111111111111e-06,
|
|
"loss": 0.0015,
|
|
"step": 1460
|
|
},
|
|
{
|
|
"epoch": 163.001875,
|
|
"grad_norm": 0.012222817167639732,
|
|
"learning_rate": 4.513888888888889e-06,
|
|
"loss": 0.0274,
|
|
"step": 1470
|
|
},
|
|
{
|
|
"epoch": 164.0025,
|
|
"grad_norm": 0.017902400344610214,
|
|
"learning_rate": 4.166666666666667e-06,
|
|
"loss": 0.017,
|
|
"step": 1480
|
|
},
|
|
{
|
|
"epoch": 165.003125,
|
|
"grad_norm": 7.585049629211426,
|
|
"learning_rate": 3.819444444444445e-06,
|
|
"loss": 0.1415,
|
|
"step": 1490
|
|
},
|
|
{
|
|
"epoch": 166.00375,
|
|
"grad_norm": 2.5589301586151123,
|
|
"learning_rate": 3.4722222222222224e-06,
|
|
"loss": 0.0043,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 166.00375,
|
|
"eval_accuracy": 0.8529411764705882,
|
|
"eval_loss": 0.5559400916099548,
|
|
"eval_runtime": 6.6757,
|
|
"eval_samples_per_second": 5.093,
|
|
"eval_steps_per_second": 1.798,
|
|
"step": 1500
|
|
},
|
|
{
|
|
"epoch": 167.004375,
|
|
"grad_norm": 0.014812835492193699,
|
|
"learning_rate": 3.125e-06,
|
|
"loss": 0.0019,
|
|
"step": 1510
|
|
},
|
|
{
|
|
"epoch": 168.005,
|
|
"grad_norm": 0.4938020706176758,
|
|
"learning_rate": 2.777777777777778e-06,
|
|
"loss": 0.0317,
|
|
"step": 1520
|
|
},
|
|
{
|
|
"epoch": 169.005625,
|
|
"grad_norm": 0.0188869908452034,
|
|
"learning_rate": 2.4305555555555557e-06,
|
|
"loss": 0.0038,
|
|
"step": 1530
|
|
},
|
|
{
|
|
"epoch": 171.000625,
|
|
"grad_norm": 0.01809411682188511,
|
|
"learning_rate": 2.0833333333333334e-06,
|
|
"loss": 0.0016,
|
|
"step": 1540
|
|
},
|
|
{
|
|
"epoch": 172.00125,
|
|
"grad_norm": 35.047969818115234,
|
|
"learning_rate": 1.7361111111111112e-06,
|
|
"loss": 0.0828,
|
|
"step": 1550
|
|
},
|
|
{
|
|
"epoch": 173.001875,
|
|
"grad_norm": 0.020055020228028297,
|
|
"learning_rate": 1.388888888888889e-06,
|
|
"loss": 0.4192,
|
|
"step": 1560
|
|
},
|
|
{
|
|
"epoch": 174.0025,
|
|
"grad_norm": 0.026819027960300446,
|
|
"learning_rate": 1.0416666666666667e-06,
|
|
"loss": 0.0251,
|
|
"step": 1570
|
|
},
|
|
{
|
|
"epoch": 175.003125,
|
|
"grad_norm": 21.63642692565918,
|
|
"learning_rate": 6.944444444444445e-07,
|
|
"loss": 0.2385,
|
|
"step": 1580
|
|
},
|
|
{
|
|
"epoch": 176.00375,
|
|
"grad_norm": 0.058039579540491104,
|
|
"learning_rate": 3.4722222222222224e-07,
|
|
"loss": 0.161,
|
|
"step": 1590
|
|
},
|
|
{
|
|
"epoch": 177.004375,
|
|
"grad_norm": 0.018306700512766838,
|
|
"learning_rate": 0.0,
|
|
"loss": 0.1146,
|
|
"step": 1600
|
|
},
|
|
{
|
|
"epoch": 177.004375,
|
|
"step": 1600,
|
|
"total_flos": 5.540151384371036e+18,
|
|
"train_loss": 0.6977802323055221,
|
|
"train_runtime": 1181.4515,
|
|
"train_samples_per_second": 4.063,
|
|
"train_steps_per_second": 1.354
|
|
},
|
|
{
|
|
"epoch": 177.004375,
|
|
"eval_accuracy": 0.7916666666666666,
|
|
"eval_loss": 0.8197056651115417,
|
|
"eval_runtime": 9.9982,
|
|
"eval_samples_per_second": 4.801,
|
|
"eval_steps_per_second": 1.6,
|
|
"step": 1600
|
|
}
|
|
],
|
|
"logging_steps": 10,
|
|
"max_steps": 1600,
|
|
"num_input_tokens_seen": 0,
|
|
"num_train_epochs": 9223372036854775807,
|
|
"save_steps": 500,
|
|
"stateful_callbacks": {
|
|
"TrainerControl": {
|
|
"args": {
|
|
"should_epoch_stop": false,
|
|
"should_evaluate": false,
|
|
"should_log": false,
|
|
"should_save": true,
|
|
"should_training_stop": true
|
|
},
|
|
"attributes": {}
|
|
}
|
|
},
|
|
"total_flos": 5.540151384371036e+18,
|
|
"train_batch_size": 3,
|
|
"trial_name": null,
|
|
"trial_params": null
|
|
}
|
|
|