|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 4.999350804660539, |
|
"global_step": 4570, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 1.5e-06, |
|
"loss": 2.6965, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 3e-06, |
|
"loss": 1.85, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 4.5e-06, |
|
"loss": 1.7546, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22, |
|
"learning_rate": 6e-06, |
|
"loss": 1.6255, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 7.5e-06, |
|
"loss": 1.5579, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.33, |
|
"learning_rate": 9e-06, |
|
"loss": 1.5321, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 1.05e-05, |
|
"loss": 1.4919, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 1.2e-05, |
|
"loss": 1.4036, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 1.3500000000000001e-05, |
|
"loss": 1.4189, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"learning_rate": 1.5e-05, |
|
"loss": 1.4814, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"learning_rate": 1.65e-05, |
|
"loss": 1.5028, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.66, |
|
"learning_rate": 1.8e-05, |
|
"loss": 1.4442, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"learning_rate": 1.95e-05, |
|
"loss": 1.4614, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 2.1e-05, |
|
"loss": 1.3753, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.82, |
|
"learning_rate": 2.25e-05, |
|
"loss": 1.3651, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"learning_rate": 2.4e-05, |
|
"loss": 1.4083, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 2.55e-05, |
|
"loss": 1.4364, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 2.7000000000000002e-05, |
|
"loss": 1.448, |
|
"step": 900 |
|
}, |
|
{ |
|
"HasAns_exact": 58.954531394989175, |
|
"HasAns_f1": 74.09900890720559, |
|
"HasAns_total": 3233, |
|
"NoAns_exact": 71.16564417177914, |
|
"NoAns_f1": 71.16564417177914, |
|
"NoAns_total": 163, |
|
"best_exact": 60.924617196702, |
|
"best_exact_thresh": 0.0, |
|
"best_f1": 75.3421954643685, |
|
"best_f1_thresh": 0.0, |
|
"epoch": 1.0, |
|
"exact": 59.540636042402824, |
|
"f1": 73.9582143100694, |
|
"step": 914, |
|
"total": 3396 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 2.8499999999999998e-05, |
|
"loss": 1.3679, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 3e-05, |
|
"loss": 1.2567, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 2.957983193277311e-05, |
|
"loss": 1.2319, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 2.9159663865546218e-05, |
|
"loss": 1.2453, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 2.8739495798319328e-05, |
|
"loss": 1.3484, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 2.831932773109244e-05, |
|
"loss": 1.2464, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 2.789915966386555e-05, |
|
"loss": 1.2314, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 2.7478991596638655e-05, |
|
"loss": 1.3351, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.48, |
|
"learning_rate": 2.7058823529411766e-05, |
|
"loss": 1.2563, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 2.6638655462184873e-05, |
|
"loss": 1.3376, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 1.59, |
|
"learning_rate": 2.6218487394957983e-05, |
|
"loss": 1.2481, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 1.64, |
|
"learning_rate": 2.5798319327731093e-05, |
|
"loss": 1.1875, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"learning_rate": 2.5378151260504203e-05, |
|
"loss": 1.2709, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 1.75, |
|
"learning_rate": 2.4957983193277314e-05, |
|
"loss": 1.1849, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 2.453781512605042e-05, |
|
"loss": 1.2105, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 2.411764705882353e-05, |
|
"loss": 1.2137, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 2.3697478991596638e-05, |
|
"loss": 1.2807, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 1.97, |
|
"learning_rate": 2.3277310924369748e-05, |
|
"loss": 1.2816, |
|
"step": 1800 |
|
}, |
|
{ |
|
"HasAns_exact": 61.58366841942468, |
|
"HasAns_f1": 76.76180559350382, |
|
"HasAns_total": 3233, |
|
"NoAns_exact": 54.60122699386503, |
|
"NoAns_f1": 54.60122699386503, |
|
"NoAns_total": 163, |
|
"best_exact": 63.42756183745583, |
|
"best_exact_thresh": 0.0, |
|
"best_f1": 77.87718418250819, |
|
"best_f1_thresh": 0.0, |
|
"epoch": 2.0, |
|
"exact": 61.24852767962309, |
|
"f1": 75.69815002467547, |
|
"step": 1828, |
|
"total": 3396 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 2.2857142857142858e-05, |
|
"loss": 1.0327, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 2.2436974789915968e-05, |
|
"loss": 0.896, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"learning_rate": 2.2016806722689075e-05, |
|
"loss": 0.9569, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 2.1596638655462185e-05, |
|
"loss": 0.8276, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 2.24, |
|
"learning_rate": 2.1176470588235296e-05, |
|
"loss": 0.9038, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 2.0756302521008403e-05, |
|
"loss": 0.9296, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 2.0336134453781513e-05, |
|
"loss": 0.868, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"learning_rate": 1.9915966386554623e-05, |
|
"loss": 0.884, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 1.9495798319327733e-05, |
|
"loss": 0.8594, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 1.907563025210084e-05, |
|
"loss": 0.8973, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 1.865546218487395e-05, |
|
"loss": 0.9248, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 2.63, |
|
"learning_rate": 1.8235294117647057e-05, |
|
"loss": 0.8629, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 2.68, |
|
"learning_rate": 1.7815126050420167e-05, |
|
"loss": 0.9535, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 1.7394957983193278e-05, |
|
"loss": 0.9778, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 1.6974789915966388e-05, |
|
"loss": 0.8858, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 1.6554621848739498e-05, |
|
"loss": 0.8855, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 2.9, |
|
"learning_rate": 1.6134453781512605e-05, |
|
"loss": 0.9669, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 1.5714285714285715e-05, |
|
"loss": 0.8746, |
|
"step": 2700 |
|
}, |
|
{ |
|
"HasAns_exact": 60.81039282400248, |
|
"HasAns_f1": 76.48517908547677, |
|
"HasAns_total": 3233, |
|
"NoAns_exact": 57.668711656441715, |
|
"NoAns_f1": 57.668711656441715, |
|
"NoAns_total": 163, |
|
"best_exact": 62.69140164899882, |
|
"best_exact_thresh": 0.0, |
|
"best_f1": 77.61383509521386, |
|
"best_f1_thresh": 0.0, |
|
"epoch": 3.0, |
|
"exact": 60.65959952885748, |
|
"f1": 75.58203297507256, |
|
"step": 2742, |
|
"total": 3396 |
|
}, |
|
{ |
|
"epoch": 3.01, |
|
"learning_rate": 1.5294117647058822e-05, |
|
"loss": 0.8392, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 1.4873949579831934e-05, |
|
"loss": 0.6246, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 1.4453781512605043e-05, |
|
"loss": 0.6114, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 3.17, |
|
"learning_rate": 1.4033613445378151e-05, |
|
"loss": 0.6607, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 1.3613445378151261e-05, |
|
"loss": 0.6362, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 1.319327731092437e-05, |
|
"loss": 0.6954, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 1.2773109243697479e-05, |
|
"loss": 0.5845, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 1.2352941176470587e-05, |
|
"loss": 0.6107, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 1.1932773109243697e-05, |
|
"loss": 0.5863, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 1.1512605042016808e-05, |
|
"loss": 0.6415, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 1.1092436974789916e-05, |
|
"loss": 0.6494, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 1.0672268907563026e-05, |
|
"loss": 0.6521, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 1.0252100840336135e-05, |
|
"loss": 0.6182, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 9.831932773109244e-06, |
|
"loss": 0.6293, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 9.411764705882354e-06, |
|
"loss": 0.5783, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 8.991596638655462e-06, |
|
"loss": 0.5962, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 8.571428571428571e-06, |
|
"loss": 0.6096, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 3.94, |
|
"learning_rate": 8.15126050420168e-06, |
|
"loss": 0.6828, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"learning_rate": 7.73109243697479e-06, |
|
"loss": 0.6435, |
|
"step": 3650 |
|
}, |
|
{ |
|
"HasAns_exact": 61.24342715743891, |
|
"HasAns_f1": 76.51996301360617, |
|
"HasAns_total": 3233, |
|
"NoAns_exact": 60.736196319018404, |
|
"NoAns_f1": 60.736196319018404, |
|
"NoAns_total": 163, |
|
"best_exact": 63.10365135453475, |
|
"best_exact_thresh": 0.0, |
|
"best_f1": 77.64694947673401, |
|
"best_f1_thresh": 0.0, |
|
"epoch": 4.0, |
|
"exact": 61.21908127208481, |
|
"f1": 75.76237939428408, |
|
"step": 3656, |
|
"total": 3396 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 7.310924369747899e-06, |
|
"loss": 0.4823, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 6.8907563025210085e-06, |
|
"loss": 0.4437, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 6.470588235294118e-06, |
|
"loss": 0.4424, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 6.050420168067227e-06, |
|
"loss": 0.4736, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 5.630252100840336e-06, |
|
"loss": 0.4747, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 5.210084033613445e-06, |
|
"loss": 0.4094, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 4.789915966386555e-06, |
|
"loss": 0.4644, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 4.43, |
|
"learning_rate": 4.369747899159664e-06, |
|
"loss": 0.4356, |
|
"step": 4050 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 3.9495798319327735e-06, |
|
"loss": 0.4324, |
|
"step": 4100 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 3.5294117647058825e-06, |
|
"loss": 0.4429, |
|
"step": 4150 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 3.109243697478992e-06, |
|
"loss": 0.4524, |
|
"step": 4200 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 2.689075630252101e-06, |
|
"loss": 0.4388, |
|
"step": 4250 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 2.26890756302521e-06, |
|
"loss": 0.4409, |
|
"step": 4300 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 1.8487394957983194e-06, |
|
"loss": 0.4438, |
|
"step": 4350 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 1.4285714285714286e-06, |
|
"loss": 0.3694, |
|
"step": 4400 |
|
}, |
|
{ |
|
"epoch": 4.87, |
|
"learning_rate": 1.0084033613445378e-06, |
|
"loss": 0.4524, |
|
"step": 4450 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 5.882352941176471e-07, |
|
"loss": 0.4175, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"learning_rate": 1.680672268907563e-07, |
|
"loss": 0.4197, |
|
"step": 4550 |
|
}, |
|
{ |
|
"HasAns_exact": 61.11970306217136, |
|
"HasAns_f1": 76.44576049759735, |
|
"HasAns_total": 3233, |
|
"NoAns_exact": 65.03067484662577, |
|
"NoAns_f1": 65.03067484662577, |
|
"NoAns_total": 163, |
|
"best_exact": 62.985865724381625, |
|
"best_exact_thresh": 0.0, |
|
"best_f1": 77.5763085066938, |
|
"best_f1_thresh": 0.0, |
|
"epoch": 5.0, |
|
"exact": 61.30742049469965, |
|
"f1": 75.89786327701185, |
|
"step": 4570, |
|
"total": 3396 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"step": 4570, |
|
"total_flos": 1.5925721822134272e+17, |
|
"train_runtime": 26138.3161, |
|
"train_samples_per_second": 0.175 |
|
} |
|
], |
|
"max_steps": 4570, |
|
"num_train_epochs": 5, |
|
"total_flos": 1.5925721822134272e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|