|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 10.0, |
|
"global_step": 1110, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 4.504504504504505e-07, |
|
"loss": 3.2559, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.00900900900901e-07, |
|
"loss": 3.2281, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 3.1953, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 1.801801801801802e-06, |
|
"loss": 3.0844, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 2.2522522522522524e-06, |
|
"loss": 3.2563, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 3.098, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 3.1531531531531532e-06, |
|
"loss": 2.9754, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 3.603603603603604e-06, |
|
"loss": 3.1168, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 3.2441, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 4.504504504504505e-06, |
|
"loss": 3.084, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 4.954954954954955e-06, |
|
"loss": 3.1965, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 2.9336, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 5.855855855855856e-06, |
|
"loss": 2.977, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 6.3063063063063065e-06, |
|
"loss": 3.0297, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 2.9363, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 7.207207207207208e-06, |
|
"loss": 2.9477, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 7.657657657657658e-06, |
|
"loss": 3.0219, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 2.9004, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 8.55855855855856e-06, |
|
"loss": 2.991, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.00900900900901e-06, |
|
"loss": 2.8973, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 2.9184, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.90990990990991e-06, |
|
"loss": 2.868, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 9.95995995995996e-06, |
|
"loss": 2.8301, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 9.90990990990991e-06, |
|
"loss": 2.716, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 9.85985985985986e-06, |
|
"loss": 2.8859, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 9.80980980980981e-06, |
|
"loss": 2.8418, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 9.75975975975976e-06, |
|
"loss": 2.8004, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 9.70970970970971e-06, |
|
"loss": 2.8203, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 9.65965965965966e-06, |
|
"loss": 2.8668, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 9.60960960960961e-06, |
|
"loss": 2.8055, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 9.55955955955956e-06, |
|
"loss": 2.7934, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 9.50950950950951e-06, |
|
"loss": 2.9164, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 9.45945945945946e-06, |
|
"loss": 2.9328, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 9.40940940940941e-06, |
|
"loss": 2.9363, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 9.35935935935936e-06, |
|
"loss": 2.841, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"learning_rate": 9.30930930930931e-06, |
|
"loss": 2.7398, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 9.25925925925926e-06, |
|
"loss": 2.8695, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 9.20920920920921e-06, |
|
"loss": 2.8363, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 9.15915915915916e-06, |
|
"loss": 2.8844, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.8, |
|
"learning_rate": 9.10910910910911e-06, |
|
"loss": 2.7594, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 9.05905905905906e-06, |
|
"loss": 2.8078, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 9.00900900900901e-06, |
|
"loss": 2.9926, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.95895895895896e-06, |
|
"loss": 2.8766, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"learning_rate": 8.90890890890891e-06, |
|
"loss": 2.8145, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 8.85885885885886e-06, |
|
"loss": 2.7383, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 8.80880880880881e-06, |
|
"loss": 2.7613, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 8.75875875875876e-06, |
|
"loss": 2.9582, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 2.16, |
|
"learning_rate": 8.70870870870871e-06, |
|
"loss": 2.8043, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 8.65865865865866e-06, |
|
"loss": 2.7922, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 8.60860860860861e-06, |
|
"loss": 2.9848, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 8.55855855855856e-06, |
|
"loss": 2.8777, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 2.34, |
|
"learning_rate": 8.50850850850851e-06, |
|
"loss": 2.9402, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 8.45845845845846e-06, |
|
"loss": 2.7785, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 8.408408408408409e-06, |
|
"loss": 2.8871, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 8.358358358358359e-06, |
|
"loss": 2.8348, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 2.52, |
|
"learning_rate": 8.308308308308309e-06, |
|
"loss": 2.7664, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 8.258258258258259e-06, |
|
"loss": 2.7973, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 8.208208208208209e-06, |
|
"loss": 2.7527, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 8.158158158158159e-06, |
|
"loss": 2.7844, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 2.7, |
|
"learning_rate": 8.108108108108109e-06, |
|
"loss": 2.8102, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 8.058058058058059e-06, |
|
"loss": 2.923, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 8.00800800800801e-06, |
|
"loss": 2.7535, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.95795795795796e-06, |
|
"loss": 2.8164, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"learning_rate": 7.90790790790791e-06, |
|
"loss": 2.8312, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.85785785785786e-06, |
|
"loss": 2.8027, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.807807807807808e-06, |
|
"loss": 2.7281, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 7.757757757757758e-06, |
|
"loss": 2.8328, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 3.06, |
|
"learning_rate": 7.707707707707708e-06, |
|
"loss": 2.8695, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 3.11, |
|
"learning_rate": 7.657657657657658e-06, |
|
"loss": 2.8, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 7.607607607607608e-06, |
|
"loss": 2.8832, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 7.557557557557558e-06, |
|
"loss": 2.7445, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"learning_rate": 7.507507507507507e-06, |
|
"loss": 2.7359, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 3.29, |
|
"learning_rate": 7.457457457457457e-06, |
|
"loss": 2.6559, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 7.4074074074074075e-06, |
|
"loss": 2.7598, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 3.38, |
|
"learning_rate": 7.3573573573573575e-06, |
|
"loss": 2.8637, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 3.42, |
|
"learning_rate": 7.307307307307308e-06, |
|
"loss": 2.7266, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 3.47, |
|
"learning_rate": 7.257257257257258e-06, |
|
"loss": 2.7289, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 7.207207207207208e-06, |
|
"loss": 2.7895, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"learning_rate": 7.157157157157158e-06, |
|
"loss": 2.7336, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"learning_rate": 7.107107107107107e-06, |
|
"loss": 2.6094, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 3.65, |
|
"learning_rate": 7.057057057057057e-06, |
|
"loss": 2.9809, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 3.69, |
|
"learning_rate": 7.007007007007007e-06, |
|
"loss": 2.8156, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 3.74, |
|
"learning_rate": 6.956956956956957e-06, |
|
"loss": 2.798, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 3.78, |
|
"learning_rate": 6.906906906906907e-06, |
|
"loss": 2.6703, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"learning_rate": 6.856856856856857e-06, |
|
"loss": 2.8105, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"learning_rate": 6.8068068068068075e-06, |
|
"loss": 2.6707, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 3.92, |
|
"learning_rate": 6.7567567567567575e-06, |
|
"loss": 2.7289, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 3.96, |
|
"learning_rate": 6.706706706706707e-06, |
|
"loss": 2.7004, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 6.656656656656657e-06, |
|
"loss": 2.8719, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 4.05, |
|
"learning_rate": 6.606606606606607e-06, |
|
"loss": 2.6273, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 4.1, |
|
"learning_rate": 6.556556556556557e-06, |
|
"loss": 2.8629, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 4.14, |
|
"learning_rate": 6.506506506506507e-06, |
|
"loss": 2.8672, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 6.456456456456457e-06, |
|
"loss": 2.725, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"learning_rate": 6.406406406406407e-06, |
|
"loss": 2.8004, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 4.28, |
|
"learning_rate": 6.356356356356357e-06, |
|
"loss": 2.882, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 6.3063063063063065e-06, |
|
"loss": 2.8027, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 6.2562562562562565e-06, |
|
"loss": 2.8988, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 4.41, |
|
"learning_rate": 6.206206206206207e-06, |
|
"loss": 2.6898, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"learning_rate": 6.156156156156157e-06, |
|
"loss": 2.9855, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 6.106106106106107e-06, |
|
"loss": 2.8246, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 6.056056056056057e-06, |
|
"loss": 2.8918, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 4.59, |
|
"learning_rate": 6.006006006006007e-06, |
|
"loss": 2.8988, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 4.64, |
|
"learning_rate": 5.955955955955957e-06, |
|
"loss": 2.7313, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 5.905905905905906e-06, |
|
"loss": 2.8082, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 5.855855855855856e-06, |
|
"loss": 2.8164, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 4.77, |
|
"learning_rate": 5.805805805805806e-06, |
|
"loss": 2.7496, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"learning_rate": 5.755755755755756e-06, |
|
"loss": 2.5914, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 5.7057057057057065e-06, |
|
"loss": 2.7824, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.6556556556556565e-06, |
|
"loss": 2.6574, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 4.95, |
|
"learning_rate": 5.605605605605607e-06, |
|
"loss": 2.8434, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 5.0, |
|
"learning_rate": 5.555555555555557e-06, |
|
"loss": 2.8793, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 5.05, |
|
"learning_rate": 5.505505505505506e-06, |
|
"loss": 2.7293, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 5.455455455455456e-06, |
|
"loss": 2.7664, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 5.405405405405406e-06, |
|
"loss": 2.7891, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"learning_rate": 5.355355355355356e-06, |
|
"loss": 2.868, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 5.23, |
|
"learning_rate": 5.305305305305306e-06, |
|
"loss": 2.8664, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 5.255255255255256e-06, |
|
"loss": 2.8324, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 5.205205205205206e-06, |
|
"loss": 2.7469, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 5.36, |
|
"learning_rate": 5.155155155155156e-06, |
|
"loss": 2.7973, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"learning_rate": 5.105105105105106e-06, |
|
"loss": 2.8254, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 5.055055055055056e-06, |
|
"loss": 2.7656, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 5.005005005005006e-06, |
|
"loss": 2.766, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 5.54, |
|
"learning_rate": 4.954954954954955e-06, |
|
"loss": 2.8266, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 5.59, |
|
"learning_rate": 4.904904904904905e-06, |
|
"loss": 2.868, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 4.854854854854855e-06, |
|
"loss": 2.7523, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.804804804804805e-06, |
|
"loss": 2.8625, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 5.72, |
|
"learning_rate": 4.754754754754755e-06, |
|
"loss": 2.7098, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"learning_rate": 4.704704704704705e-06, |
|
"loss": 2.584, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.654654654654655e-06, |
|
"loss": 2.7652, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 5.86, |
|
"learning_rate": 4.604604604604605e-06, |
|
"loss": 2.7668, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 5.9, |
|
"learning_rate": 4.554554554554555e-06, |
|
"loss": 2.9934, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 5.95, |
|
"learning_rate": 4.504504504504505e-06, |
|
"loss": 2.807, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.454454454454455e-06, |
|
"loss": 2.7543, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"learning_rate": 4.404404404404405e-06, |
|
"loss": 2.7055, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"learning_rate": 4.354354354354355e-06, |
|
"loss": 2.6199, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 6.13, |
|
"learning_rate": 4.304304304304305e-06, |
|
"loss": 2.757, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 6.17, |
|
"learning_rate": 4.254254254254255e-06, |
|
"loss": 2.7941, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 6.22, |
|
"learning_rate": 4.204204204204204e-06, |
|
"loss": 2.7836, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 6.26, |
|
"learning_rate": 4.154154154154154e-06, |
|
"loss": 2.7062, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 6.31, |
|
"learning_rate": 4.1041041041041045e-06, |
|
"loss": 2.807, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 6.35, |
|
"learning_rate": 4.0540540540540545e-06, |
|
"loss": 2.8246, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"learning_rate": 4.004004004004005e-06, |
|
"loss": 2.8395, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"learning_rate": 3.953953953953955e-06, |
|
"loss": 2.7781, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.903903903903904e-06, |
|
"loss": 2.732, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 6.53, |
|
"learning_rate": 3.853853853853854e-06, |
|
"loss": 2.7281, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 6.58, |
|
"learning_rate": 3.803803803803804e-06, |
|
"loss": 2.6957, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 6.62, |
|
"learning_rate": 3.7537537537537537e-06, |
|
"loss": 2.7516, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.7037037037037037e-06, |
|
"loss": 2.741, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 6.71, |
|
"learning_rate": 3.653653653653654e-06, |
|
"loss": 2.798, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"learning_rate": 3.603603603603604e-06, |
|
"loss": 2.741, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.5535535535535535e-06, |
|
"loss": 2.7691, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.5035035035035036e-06, |
|
"loss": 2.7039, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 6.89, |
|
"learning_rate": 3.4534534534534537e-06, |
|
"loss": 2.7797, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 6.94, |
|
"learning_rate": 3.4034034034034037e-06, |
|
"loss": 2.918, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.3533533533533534e-06, |
|
"loss": 2.6195, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 3.3033033033033035e-06, |
|
"loss": 2.5387, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 3.2532532532532535e-06, |
|
"loss": 2.7352, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 7.12, |
|
"learning_rate": 3.2032032032032036e-06, |
|
"loss": 2.5961, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 3.1531531531531532e-06, |
|
"loss": 2.8273, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 3.1031031031031033e-06, |
|
"loss": 2.6625, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 3.0530530530530534e-06, |
|
"loss": 2.7, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 7.3, |
|
"learning_rate": 3.0030030030030034e-06, |
|
"loss": 2.7527, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.952952952952953e-06, |
|
"loss": 2.7172, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 2.902902902902903e-06, |
|
"loss": 2.7395, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 2.8528528528528532e-06, |
|
"loss": 2.6105, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 7.48, |
|
"learning_rate": 2.8028028028028033e-06, |
|
"loss": 2.7566, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 2.752752752752753e-06, |
|
"loss": 2.6984, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 2.702702702702703e-06, |
|
"loss": 2.8969, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.652652652652653e-06, |
|
"loss": 2.6301, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"learning_rate": 2.602602602602603e-06, |
|
"loss": 2.7102, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.552552552552553e-06, |
|
"loss": 2.9152, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.502502502502503e-06, |
|
"loss": 2.6543, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.4524524524524525e-06, |
|
"loss": 2.6109, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 7.84, |
|
"learning_rate": 2.4024024024024026e-06, |
|
"loss": 2.5871, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.3523523523523527e-06, |
|
"loss": 2.752, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 2.3023023023023023e-06, |
|
"loss": 2.716, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.2522522522522524e-06, |
|
"loss": 2.807, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"learning_rate": 2.2022022022022024e-06, |
|
"loss": 2.877, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 2.1521521521521525e-06, |
|
"loss": 2.716, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 2.102102102102102e-06, |
|
"loss": 2.6879, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 2.0520520520520522e-06, |
|
"loss": 2.7137, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 8.2, |
|
"learning_rate": 2.0020020020020023e-06, |
|
"loss": 2.7133, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 1.951951951951952e-06, |
|
"loss": 2.7453, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 1.901901901901902e-06, |
|
"loss": 2.8281, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 1.8518518518518519e-06, |
|
"loss": 2.857, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"learning_rate": 1.801801801801802e-06, |
|
"loss": 2.7105, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.7517517517517518e-06, |
|
"loss": 2.7879, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.7017017017017019e-06, |
|
"loss": 2.8539, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.6516516516516517e-06, |
|
"loss": 2.7656, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.6016016016016018e-06, |
|
"loss": 2.677, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.5515515515515517e-06, |
|
"loss": 2.809, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.5015015015015017e-06, |
|
"loss": 2.5945, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.4514514514514516e-06, |
|
"loss": 2.8148, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 1.4014014014014016e-06, |
|
"loss": 2.7242, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.3513513513513515e-06, |
|
"loss": 2.9172, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.3013013013013016e-06, |
|
"loss": 2.7965, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.2512512512512514e-06, |
|
"loss": 2.8918, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 1.2012012012012013e-06, |
|
"loss": 2.6551, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 1.1511511511511512e-06, |
|
"loss": 2.8613, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 1.1011011011011012e-06, |
|
"loss": 2.7629, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"learning_rate": 1.051051051051051e-06, |
|
"loss": 2.5543, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 9.1, |
|
"learning_rate": 1.0010010010010011e-06, |
|
"loss": 2.9523, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 9.14, |
|
"learning_rate": 9.50950950950951e-07, |
|
"loss": 2.7785, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 9.19, |
|
"learning_rate": 9.00900900900901e-07, |
|
"loss": 2.6758, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 9.23, |
|
"learning_rate": 8.508508508508509e-07, |
|
"loss": 2.5539, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"learning_rate": 8.008008008008009e-07, |
|
"loss": 2.6223, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"learning_rate": 7.507507507507509e-07, |
|
"loss": 2.882, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 9.37, |
|
"learning_rate": 7.007007007007008e-07, |
|
"loss": 2.9254, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 9.41, |
|
"learning_rate": 6.506506506506508e-07, |
|
"loss": 2.825, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 9.46, |
|
"learning_rate": 6.006006006006006e-07, |
|
"loss": 2.7879, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 9.5, |
|
"learning_rate": 5.505505505505506e-07, |
|
"loss": 2.7621, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 9.55, |
|
"learning_rate": 5.005005005005006e-07, |
|
"loss": 2.7711, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 9.59, |
|
"learning_rate": 4.504504504504505e-07, |
|
"loss": 2.7539, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 9.64, |
|
"learning_rate": 4.0040040040040045e-07, |
|
"loss": 2.7781, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 9.68, |
|
"learning_rate": 3.503503503503504e-07, |
|
"loss": 2.7246, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 9.73, |
|
"learning_rate": 3.003003003003003e-07, |
|
"loss": 2.9125, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 9.77, |
|
"learning_rate": 2.502502502502503e-07, |
|
"loss": 2.757, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 9.82, |
|
"learning_rate": 2.0020020020020022e-07, |
|
"loss": 2.7824, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 9.86, |
|
"learning_rate": 1.5015015015015016e-07, |
|
"loss": 2.6824, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 9.91, |
|
"learning_rate": 1.0010010010010011e-07, |
|
"loss": 2.6035, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 9.95, |
|
"learning_rate": 5.0050050050050056e-08, |
|
"loss": 2.7152, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"learning_rate": 0.0, |
|
"loss": 2.7191, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 10.0, |
|
"step": 1110, |
|
"total_flos": 2.1618654360131705e+21, |
|
"train_loss": 2.8070910754504506, |
|
"train_runtime": 11217.8167, |
|
"train_samples_per_second": 101.845, |
|
"train_steps_per_second": 0.099 |
|
} |
|
], |
|
"max_steps": 1110, |
|
"num_train_epochs": 10, |
|
"total_flos": 2.1618654360131705e+21, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|