|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.996845425867508, |
|
"eval_steps": 500, |
|
"global_step": 1108, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0, |
|
"eval_f1": 0.1456953642384106, |
|
"eval_fn": 11, |
|
"eval_fp": 4117, |
|
"eval_loss": 0.7862394452095032, |
|
"eval_precision": 0.07876482434549116, |
|
"eval_recall": 0.9696969696969697, |
|
"eval_runtime": 48.4563, |
|
"eval_samples_per_second": 96.437, |
|
"eval_steps_per_second": 6.047, |
|
"eval_tn": 193, |
|
"eval_tp": 352, |
|
"step": 0 |
|
}, |
|
{ |
|
"epoch": 0.018026137899954935, |
|
"grad_norm": 5.399981498718262, |
|
"learning_rate": 4.9548736462093865e-05, |
|
"loss": 0.5179, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03605227579990987, |
|
"grad_norm": 1.6319377422332764, |
|
"learning_rate": 4.909747292418773e-05, |
|
"loss": 0.2969, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.054078413699864804, |
|
"grad_norm": 1.8271431922912598, |
|
"learning_rate": 4.864620938628159e-05, |
|
"loss": 0.3075, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.07210455159981974, |
|
"grad_norm": 2.5912694931030273, |
|
"learning_rate": 4.819494584837546e-05, |
|
"loss": 0.3096, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.09013068949977468, |
|
"grad_norm": 3.3950061798095703, |
|
"learning_rate": 4.7743682310469314e-05, |
|
"loss": 0.2585, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.10815682739972961, |
|
"grad_norm": 1.6037261486053467, |
|
"learning_rate": 4.7292418772563177e-05, |
|
"loss": 0.2461, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.12618296529968454, |
|
"grad_norm": 3.6747143268585205, |
|
"learning_rate": 4.684115523465704e-05, |
|
"loss": 0.2803, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.14420910319963948, |
|
"grad_norm": 6.330898284912109, |
|
"learning_rate": 4.63898916967509e-05, |
|
"loss": 0.2735, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.16223524109959442, |
|
"grad_norm": 4.724136829376221, |
|
"learning_rate": 4.5938628158844764e-05, |
|
"loss": 0.2551, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.18026137899954936, |
|
"grad_norm": 3.852949619293213, |
|
"learning_rate": 4.548736462093863e-05, |
|
"loss": 0.2286, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.19828751689950427, |
|
"grad_norm": 10.743219375610352, |
|
"learning_rate": 4.5036101083032495e-05, |
|
"loss": 0.2511, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.21631365479945922, |
|
"grad_norm": 24.62398910522461, |
|
"learning_rate": 4.458483754512636e-05, |
|
"loss": 0.1756, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.23433979269941416, |
|
"grad_norm": 6.864269256591797, |
|
"learning_rate": 4.413357400722022e-05, |
|
"loss": 0.1947, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.25236593059936907, |
|
"grad_norm": 11.810012817382812, |
|
"learning_rate": 4.368231046931408e-05, |
|
"loss": 0.1976, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.270392068499324, |
|
"grad_norm": 4.794726848602295, |
|
"learning_rate": 4.3231046931407945e-05, |
|
"loss": 0.2081, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.28841820639927895, |
|
"grad_norm": 7.1115875244140625, |
|
"learning_rate": 4.277978339350181e-05, |
|
"loss": 0.2055, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.3064443442992339, |
|
"grad_norm": 6.199843883514404, |
|
"learning_rate": 4.232851985559567e-05, |
|
"loss": 0.1764, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.32447048219918884, |
|
"grad_norm": 5.7516679763793945, |
|
"learning_rate": 4.187725631768953e-05, |
|
"loss": 0.185, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3424966200991438, |
|
"grad_norm": 17.204120635986328, |
|
"learning_rate": 4.1425992779783394e-05, |
|
"loss": 0.175, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3605227579990987, |
|
"grad_norm": 6.416110038757324, |
|
"learning_rate": 4.0974729241877256e-05, |
|
"loss": 0.1197, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3785488958990536, |
|
"grad_norm": 8.525187492370605, |
|
"learning_rate": 4.052346570397112e-05, |
|
"loss": 0.1968, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.39657503379900855, |
|
"grad_norm": 42.04795455932617, |
|
"learning_rate": 4.007220216606498e-05, |
|
"loss": 0.1718, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.4146011716989635, |
|
"grad_norm": 4.502872467041016, |
|
"learning_rate": 3.962093862815885e-05, |
|
"loss": 0.1442, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.43262730959891843, |
|
"grad_norm": 3.215198278427124, |
|
"learning_rate": 3.916967509025271e-05, |
|
"loss": 0.1675, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.45065344749887337, |
|
"grad_norm": 6.314309120178223, |
|
"learning_rate": 3.8718411552346575e-05, |
|
"loss": 0.1492, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4686795853988283, |
|
"grad_norm": 8.839702606201172, |
|
"learning_rate": 3.826714801444044e-05, |
|
"loss": 0.1399, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.48670572329878325, |
|
"grad_norm": 183.9261016845703, |
|
"learning_rate": 3.78158844765343e-05, |
|
"loss": 0.1801, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.5047318611987381, |
|
"grad_norm": 5.393545150756836, |
|
"learning_rate": 3.7364620938628155e-05, |
|
"loss": 0.1353, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.5227579990986931, |
|
"grad_norm": 3.587885856628418, |
|
"learning_rate": 3.6913357400722025e-05, |
|
"loss": 0.1428, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.540784136998648, |
|
"grad_norm": 9.456450462341309, |
|
"learning_rate": 3.646209386281589e-05, |
|
"loss": 0.1758, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.558810274898603, |
|
"grad_norm": 4.051924705505371, |
|
"learning_rate": 3.601083032490975e-05, |
|
"loss": 0.1874, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5768364127985579, |
|
"grad_norm": 6.4390363693237305, |
|
"learning_rate": 3.555956678700361e-05, |
|
"loss": 0.1472, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.5948625506985128, |
|
"grad_norm": 3.86102557182312, |
|
"learning_rate": 3.5108303249097474e-05, |
|
"loss": 0.1659, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.6128886885984678, |
|
"grad_norm": 6.6616644859313965, |
|
"learning_rate": 3.4657039711191336e-05, |
|
"loss": 0.191, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.6309148264984227, |
|
"grad_norm": 6.867959976196289, |
|
"learning_rate": 3.42057761732852e-05, |
|
"loss": 0.1343, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6489409643983777, |
|
"grad_norm": 8.724226951599121, |
|
"learning_rate": 3.375451263537907e-05, |
|
"loss": 0.145, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6669671022983326, |
|
"grad_norm": 3.5500874519348145, |
|
"learning_rate": 3.330324909747293e-05, |
|
"loss": 0.1153, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6849932401982876, |
|
"grad_norm": 9.760895729064941, |
|
"learning_rate": 3.2851985559566786e-05, |
|
"loss": 0.1362, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.7030193780982424, |
|
"grad_norm": 3.9465765953063965, |
|
"learning_rate": 3.240072202166065e-05, |
|
"loss": 0.1281, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.7210455159981974, |
|
"grad_norm": 3.495382070541382, |
|
"learning_rate": 3.194945848375451e-05, |
|
"loss": 0.1792, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.7390716538981523, |
|
"grad_norm": 2.8898510932922363, |
|
"learning_rate": 3.149819494584837e-05, |
|
"loss": 0.1085, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.7570977917981072, |
|
"grad_norm": 3.3261382579803467, |
|
"learning_rate": 3.104693140794224e-05, |
|
"loss": 0.203, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7751239296980622, |
|
"grad_norm": 3.417875051498413, |
|
"learning_rate": 3.0595667870036104e-05, |
|
"loss": 0.1491, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7931500675980171, |
|
"grad_norm": 7.868630409240723, |
|
"learning_rate": 3.0144404332129967e-05, |
|
"loss": 0.1777, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.8111762054979721, |
|
"grad_norm": 2.16607403755188, |
|
"learning_rate": 2.969314079422383e-05, |
|
"loss": 0.1406, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.829202343397927, |
|
"grad_norm": 5.121480464935303, |
|
"learning_rate": 2.924187725631769e-05, |
|
"loss": 0.1387, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.847228481297882, |
|
"grad_norm": 6.610152244567871, |
|
"learning_rate": 2.879061371841155e-05, |
|
"loss": 0.1158, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8652546191978369, |
|
"grad_norm": 5.962329387664795, |
|
"learning_rate": 2.8339350180505413e-05, |
|
"loss": 0.127, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8832807570977917, |
|
"grad_norm": 5.11064338684082, |
|
"learning_rate": 2.7888086642599282e-05, |
|
"loss": 0.1232, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.9013068949977467, |
|
"grad_norm": 5.128263473510742, |
|
"learning_rate": 2.7436823104693144e-05, |
|
"loss": 0.1084, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.9193330328977016, |
|
"grad_norm": 8.249767303466797, |
|
"learning_rate": 2.6985559566787007e-05, |
|
"loss": 0.1453, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.9373591707976566, |
|
"grad_norm": 14.850152969360352, |
|
"learning_rate": 2.6534296028880866e-05, |
|
"loss": 0.1102, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.9553853086976115, |
|
"grad_norm": 4.934476852416992, |
|
"learning_rate": 2.6083032490974728e-05, |
|
"loss": 0.1493, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9734114465975665, |
|
"grad_norm": 6.984730243682861, |
|
"learning_rate": 2.563176895306859e-05, |
|
"loss": 0.1344, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9914375844975214, |
|
"grad_norm": 4.595370292663574, |
|
"learning_rate": 2.518050541516246e-05, |
|
"loss": 0.1246, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"eval_f1": 0.7272727272727273, |
|
"eval_fn": 95, |
|
"eval_fp": 106, |
|
"eval_loss": 0.11311017721891403, |
|
"eval_precision": 0.7165775401069518, |
|
"eval_recall": 0.7382920110192838, |
|
"eval_runtime": 56.8141, |
|
"eval_samples_per_second": 82.251, |
|
"eval_steps_per_second": 5.157, |
|
"eval_tn": 4204, |
|
"eval_tp": 268, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 1.0090130689499774, |
|
"grad_norm": 3.8490426540374756, |
|
"learning_rate": 2.472924187725632e-05, |
|
"loss": 0.131, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.0270392068499323, |
|
"grad_norm": 8.608445167541504, |
|
"learning_rate": 2.427797833935018e-05, |
|
"loss": 0.0894, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.0450653447498874, |
|
"grad_norm": 7.926303863525391, |
|
"learning_rate": 2.3826714801444043e-05, |
|
"loss": 0.1462, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0630914826498423, |
|
"grad_norm": 5.411970138549805, |
|
"learning_rate": 2.3375451263537906e-05, |
|
"loss": 0.1556, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0811176205497972, |
|
"grad_norm": 6.707615375518799, |
|
"learning_rate": 2.292418772563177e-05, |
|
"loss": 0.0999, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.099143758449752, |
|
"grad_norm": 5.215950012207031, |
|
"learning_rate": 2.2472924187725634e-05, |
|
"loss": 0.0854, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.117169896349707, |
|
"grad_norm": 1.6493467092514038, |
|
"learning_rate": 2.2021660649819496e-05, |
|
"loss": 0.0806, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.135196034249662, |
|
"grad_norm": 8.559683799743652, |
|
"learning_rate": 2.157039711191336e-05, |
|
"loss": 0.1422, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.153222172149617, |
|
"grad_norm": 3.0874404907226562, |
|
"learning_rate": 2.111913357400722e-05, |
|
"loss": 0.1436, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1712483100495719, |
|
"grad_norm": 13.975804328918457, |
|
"learning_rate": 2.0667870036101083e-05, |
|
"loss": 0.1294, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.1892744479495267, |
|
"grad_norm": 52.61249923706055, |
|
"learning_rate": 2.0216606498194946e-05, |
|
"loss": 0.127, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.2073005858494819, |
|
"grad_norm": 4.427943706512451, |
|
"learning_rate": 1.976534296028881e-05, |
|
"loss": 0.1101, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.2253267237494367, |
|
"grad_norm": 3.466806411743164, |
|
"learning_rate": 1.9314079422382674e-05, |
|
"loss": 0.0733, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.2433528616493916, |
|
"grad_norm": 7.252458572387695, |
|
"learning_rate": 1.8862815884476533e-05, |
|
"loss": 0.1129, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.2613789995493465, |
|
"grad_norm": 7.521577835083008, |
|
"learning_rate": 1.84115523465704e-05, |
|
"loss": 0.0894, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2794051374493014, |
|
"grad_norm": 2.96766996383667, |
|
"learning_rate": 1.796028880866426e-05, |
|
"loss": 0.0828, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2974312753492563, |
|
"grad_norm": 3.5544350147247314, |
|
"learning_rate": 1.7509025270758123e-05, |
|
"loss": 0.1205, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.3154574132492114, |
|
"grad_norm": 2.323742389678955, |
|
"learning_rate": 1.7057761732851986e-05, |
|
"loss": 0.1114, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.3334835511491663, |
|
"grad_norm": 4.532501220703125, |
|
"learning_rate": 1.6606498194945848e-05, |
|
"loss": 0.1242, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.3515096890491212, |
|
"grad_norm": 4.962907314300537, |
|
"learning_rate": 1.615523465703971e-05, |
|
"loss": 0.1222, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.3695358269490763, |
|
"grad_norm": 2.745126724243164, |
|
"learning_rate": 1.5703971119133576e-05, |
|
"loss": 0.1089, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.3875619648490312, |
|
"grad_norm": 1.2508844137191772, |
|
"learning_rate": 1.5252707581227438e-05, |
|
"loss": 0.086, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.405588102748986, |
|
"grad_norm": 9.282389640808105, |
|
"learning_rate": 1.4801444043321299e-05, |
|
"loss": 0.1142, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.423614240648941, |
|
"grad_norm": 3.1791019439697266, |
|
"learning_rate": 1.4350180505415165e-05, |
|
"loss": 0.1237, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.4416403785488958, |
|
"grad_norm": 6.121169567108154, |
|
"learning_rate": 1.3898916967509026e-05, |
|
"loss": 0.1346, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.4596665164488507, |
|
"grad_norm": 19.356491088867188, |
|
"learning_rate": 1.3447653429602888e-05, |
|
"loss": 0.0937, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.4776926543488058, |
|
"grad_norm": 6.563851356506348, |
|
"learning_rate": 1.299638989169675e-05, |
|
"loss": 0.1136, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4957187922487607, |
|
"grad_norm": 5.794551372528076, |
|
"learning_rate": 1.2545126353790614e-05, |
|
"loss": 0.119, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.5137449301487156, |
|
"grad_norm": 2.885023832321167, |
|
"learning_rate": 1.2093862815884477e-05, |
|
"loss": 0.1073, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.5317710680486707, |
|
"grad_norm": 3.519407272338867, |
|
"learning_rate": 1.164259927797834e-05, |
|
"loss": 0.122, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.5497972059486256, |
|
"grad_norm": 3.4222164154052734, |
|
"learning_rate": 1.1191335740072201e-05, |
|
"loss": 0.1002, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.5678233438485805, |
|
"grad_norm": 8.442163467407227, |
|
"learning_rate": 1.0740072202166066e-05, |
|
"loss": 0.1301, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.5858494817485354, |
|
"grad_norm": 5.987030982971191, |
|
"learning_rate": 1.028880866425993e-05, |
|
"loss": 0.1197, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.6038756196484902, |
|
"grad_norm": 3.641470432281494, |
|
"learning_rate": 9.83754512635379e-06, |
|
"loss": 0.094, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.6219017575484451, |
|
"grad_norm": 4.365111827850342, |
|
"learning_rate": 9.386281588447654e-06, |
|
"loss": 0.1309, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.6399278954484002, |
|
"grad_norm": 6.229074478149414, |
|
"learning_rate": 8.935018050541517e-06, |
|
"loss": 0.1002, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.6579540333483551, |
|
"grad_norm": 8.32435417175293, |
|
"learning_rate": 8.483754512635379e-06, |
|
"loss": 0.1155, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.67598017124831, |
|
"grad_norm": 2.6587719917297363, |
|
"learning_rate": 8.032490974729243e-06, |
|
"loss": 0.1047, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.694006309148265, |
|
"grad_norm": 31.095243453979492, |
|
"learning_rate": 7.581227436823105e-06, |
|
"loss": 0.0957, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.71203244704822, |
|
"grad_norm": 148.5176544189453, |
|
"learning_rate": 7.129963898916968e-06, |
|
"loss": 0.1138, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.7300585849481749, |
|
"grad_norm": 4.901301383972168, |
|
"learning_rate": 6.678700361010831e-06, |
|
"loss": 0.0939, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.7480847228481298, |
|
"grad_norm": 4.844261646270752, |
|
"learning_rate": 6.227436823104693e-06, |
|
"loss": 0.0905, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.7661108607480847, |
|
"grad_norm": 6.100379943847656, |
|
"learning_rate": 5.776173285198557e-06, |
|
"loss": 0.1169, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.7841369986480395, |
|
"grad_norm": 9.385725021362305, |
|
"learning_rate": 5.324909747292419e-06, |
|
"loss": 0.0843, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.8021631365479944, |
|
"grad_norm": 8.386088371276855, |
|
"learning_rate": 4.873646209386281e-06, |
|
"loss": 0.1044, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.8201892744479495, |
|
"grad_norm": 13.418025970458984, |
|
"learning_rate": 4.422382671480145e-06, |
|
"loss": 0.1351, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.8382154123479044, |
|
"grad_norm": 13.804243087768555, |
|
"learning_rate": 3.971119133574008e-06, |
|
"loss": 0.0919, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.8562415502478595, |
|
"grad_norm": 7.5179924964904785, |
|
"learning_rate": 3.51985559566787e-06, |
|
"loss": 0.1305, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.8742676881478144, |
|
"grad_norm": 4.629436016082764, |
|
"learning_rate": 3.068592057761733e-06, |
|
"loss": 0.1278, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.8922938260477693, |
|
"grad_norm": 5.896274566650391, |
|
"learning_rate": 2.6173285198555957e-06, |
|
"loss": 0.1211, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.9103199639477242, |
|
"grad_norm": 7.580524921417236, |
|
"learning_rate": 2.1660649819494585e-06, |
|
"loss": 0.0993, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.928346101847679, |
|
"grad_norm": 12.92769718170166, |
|
"learning_rate": 1.7148014440433215e-06, |
|
"loss": 0.1557, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.946372239747634, |
|
"grad_norm": 12.059624671936035, |
|
"learning_rate": 1.263537906137184e-06, |
|
"loss": 0.0929, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.9643983776475888, |
|
"grad_norm": 4.273173809051514, |
|
"learning_rate": 8.122743682310469e-07, |
|
"loss": 0.1053, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.982424515547544, |
|
"grad_norm": 7.6728596687316895, |
|
"learning_rate": 3.610108303249098e-07, |
|
"loss": 0.0997, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.996845425867508, |
|
"eval_f1": 0.7513513513513513, |
|
"eval_fn": 85, |
|
"eval_fp": 99, |
|
"eval_loss": 0.10865739732980728, |
|
"eval_precision": 0.7374005305039788, |
|
"eval_recall": 0.7658402203856749, |
|
"eval_runtime": 59.9339, |
|
"eval_samples_per_second": 77.969, |
|
"eval_steps_per_second": 4.889, |
|
"eval_tn": 4211, |
|
"eval_tp": 278, |
|
"step": 1108 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1108, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.68611172962304e+16, |
|
"train_batch_size": 16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|