|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1022, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.009784735812133072, |
|
"grad_norm": 4.04649150142379, |
|
"learning_rate": 9.70873786407767e-07, |
|
"loss": 1.7598, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.019569471624266144, |
|
"grad_norm": 1.8898281767230303, |
|
"learning_rate": 1.941747572815534e-06, |
|
"loss": 1.6964, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.029354207436399216, |
|
"grad_norm": 1.483970324647816, |
|
"learning_rate": 2.912621359223301e-06, |
|
"loss": 1.611, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.03913894324853229, |
|
"grad_norm": 1.3044067583298566, |
|
"learning_rate": 3.883495145631068e-06, |
|
"loss": 1.4946, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04892367906066536, |
|
"grad_norm": 1.1307579978177575, |
|
"learning_rate": 4.854368932038836e-06, |
|
"loss": 1.4397, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.05870841487279843, |
|
"grad_norm": 1.1230632491576633, |
|
"learning_rate": 5.825242718446602e-06, |
|
"loss": 1.3915, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0684931506849315, |
|
"grad_norm": 1.2171487481951635, |
|
"learning_rate": 6.79611650485437e-06, |
|
"loss": 1.3412, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.07827788649706457, |
|
"grad_norm": 1.1602684245071337, |
|
"learning_rate": 7.766990291262136e-06, |
|
"loss": 1.3442, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.08806262230919765, |
|
"grad_norm": 1.1297981589874249, |
|
"learning_rate": 8.737864077669904e-06, |
|
"loss": 1.307, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.09784735812133072, |
|
"grad_norm": 1.1989099168675559, |
|
"learning_rate": 9.708737864077671e-06, |
|
"loss": 1.3041, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.10763209393346379, |
|
"grad_norm": 1.0923068630227757, |
|
"learning_rate": 9.998568523945896e-06, |
|
"loss": 1.2911, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.11741682974559686, |
|
"grad_norm": 1.2597514623600858, |
|
"learning_rate": 9.991559185756013e-06, |
|
"loss": 1.2871, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.12720156555772993, |
|
"grad_norm": 1.137541898001787, |
|
"learning_rate": 9.978717241383576e-06, |
|
"loss": 1.261, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.136986301369863, |
|
"grad_norm": 1.1658887896761427, |
|
"learning_rate": 9.960057696561538e-06, |
|
"loss": 1.2554, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.14677103718199608, |
|
"grad_norm": 1.2229261479256421, |
|
"learning_rate": 9.935602354853163e-06, |
|
"loss": 1.2542, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.15655577299412915, |
|
"grad_norm": 1.1243006143779628, |
|
"learning_rate": 9.905379792174697e-06, |
|
"loss": 1.2521, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.16634050880626222, |
|
"grad_norm": 1.0968643854275186, |
|
"learning_rate": 9.869425323404593e-06, |
|
"loss": 1.2677, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.1761252446183953, |
|
"grad_norm": 1.1045669372583213, |
|
"learning_rate": 9.82778096111829e-06, |
|
"loss": 1.2511, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.18590998043052837, |
|
"grad_norm": 1.1112118778947822, |
|
"learning_rate": 9.780495366496772e-06, |
|
"loss": 1.2446, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.19569471624266144, |
|
"grad_norm": 1.0826613870591981, |
|
"learning_rate": 9.727623792466285e-06, |
|
"loss": 1.2314, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.2054794520547945, |
|
"grad_norm": 1.098249884945961, |
|
"learning_rate": 9.669228019135625e-06, |
|
"loss": 1.2458, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.21526418786692758, |
|
"grad_norm": 1.1612733633383774, |
|
"learning_rate": 9.605376281606472e-06, |
|
"loss": 1.2237, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.22504892367906065, |
|
"grad_norm": 1.125936251992049, |
|
"learning_rate": 9.53614319024108e-06, |
|
"loss": 1.2146, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.23483365949119372, |
|
"grad_norm": 1.1467535473229693, |
|
"learning_rate": 9.461609643480541e-06, |
|
"loss": 1.2138, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.2446183953033268, |
|
"grad_norm": 1.0542912347811166, |
|
"learning_rate": 9.381862733315444e-06, |
|
"loss": 1.2111, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.25440313111545987, |
|
"grad_norm": 1.149152919711082, |
|
"learning_rate": 9.296995643519432e-06, |
|
"loss": 1.2127, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.26418786692759294, |
|
"grad_norm": 1.0920081819134964, |
|
"learning_rate": 9.207107540764519e-06, |
|
"loss": 1.2338, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.273972602739726, |
|
"grad_norm": 1.1780253225746746, |
|
"learning_rate": 9.112303458745462e-06, |
|
"loss": 1.2054, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.2837573385518591, |
|
"grad_norm": 1.0205557268076189, |
|
"learning_rate": 9.012694175448526e-06, |
|
"loss": 1.2223, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.29354207436399216, |
|
"grad_norm": 1.182238809951801, |
|
"learning_rate": 8.90839608370809e-06, |
|
"loss": 1.2137, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.30332681017612523, |
|
"grad_norm": 1.0623824819302699, |
|
"learning_rate": 8.799531055202332e-06, |
|
"loss": 1.2021, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.3131115459882583, |
|
"grad_norm": 1.103614217675967, |
|
"learning_rate": 8.686226298046938e-06, |
|
"loss": 1.2107, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.32289628180039137, |
|
"grad_norm": 1.0562421868073395, |
|
"learning_rate": 8.568614208153181e-06, |
|
"loss": 1.2051, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.33268101761252444, |
|
"grad_norm": 1.061840954937612, |
|
"learning_rate": 8.446832214524132e-06, |
|
"loss": 1.1906, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.3424657534246575, |
|
"grad_norm": 1.1618629276892396, |
|
"learning_rate": 8.321022618669706e-06, |
|
"loss": 1.2189, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.3522504892367906, |
|
"grad_norm": 0.9952732308566709, |
|
"learning_rate": 8.191332428328227e-06, |
|
"loss": 1.2056, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.36203522504892366, |
|
"grad_norm": 1.08417836514905, |
|
"learning_rate": 8.057913185688789e-06, |
|
"loss": 1.2058, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.37181996086105673, |
|
"grad_norm": 1.0664180337103972, |
|
"learning_rate": 7.920920790315139e-06, |
|
"loss": 1.1787, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.3816046966731898, |
|
"grad_norm": 1.0237212990645388, |
|
"learning_rate": 7.780515316978003e-06, |
|
"loss": 1.1868, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.3913894324853229, |
|
"grad_norm": 1.0196131058576328, |
|
"learning_rate": 7.6368608286087e-06, |
|
"loss": 1.1945, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.40117416829745595, |
|
"grad_norm": 1.095727756369159, |
|
"learning_rate": 7.490125184592607e-06, |
|
"loss": 1.1813, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.410958904109589, |
|
"grad_norm": 1.0260192699123412, |
|
"learning_rate": 7.340479844626516e-06, |
|
"loss": 1.1996, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.4207436399217221, |
|
"grad_norm": 1.0159524740262653, |
|
"learning_rate": 7.188099668369015e-06, |
|
"loss": 1.193, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.43052837573385516, |
|
"grad_norm": 1.1094365729981088, |
|
"learning_rate": 7.033162711118077e-06, |
|
"loss": 1.182, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.44031311154598823, |
|
"grad_norm": 1.0230643234668957, |
|
"learning_rate": 6.8758500157545185e-06, |
|
"loss": 1.1781, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.4500978473581213, |
|
"grad_norm": 1.0282046095119801, |
|
"learning_rate": 6.716345401194519e-06, |
|
"loss": 1.1613, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.4598825831702544, |
|
"grad_norm": 1.0383674282070607, |
|
"learning_rate": 6.554835247598347e-06, |
|
"loss": 1.1798, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.46966731898238745, |
|
"grad_norm": 0.9837704553579479, |
|
"learning_rate": 6.391508278586287e-06, |
|
"loss": 1.181, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.4794520547945205, |
|
"grad_norm": 1.0580246864370755, |
|
"learning_rate": 6.226555340716258e-06, |
|
"loss": 1.1887, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.4892367906066536, |
|
"grad_norm": 1.017013165650618, |
|
"learning_rate": 6.060169180480776e-06, |
|
"loss": 1.1714, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.49902152641878667, |
|
"grad_norm": 1.0072748086969616, |
|
"learning_rate": 5.8925442190838735e-06, |
|
"loss": 1.1818, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.5088062622309197, |
|
"grad_norm": 1.0791337064960191, |
|
"learning_rate": 5.723876325261109e-06, |
|
"loss": 1.1766, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.5185909980430529, |
|
"grad_norm": 1.0401219276204419, |
|
"learning_rate": 5.554362586408164e-06, |
|
"loss": 1.188, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.5283757338551859, |
|
"grad_norm": 1.04026227937617, |
|
"learning_rate": 5.3842010782854306e-06, |
|
"loss": 1.1793, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.538160469667319, |
|
"grad_norm": 1.17417294090351, |
|
"learning_rate": 5.2135906335677075e-06, |
|
"loss": 1.1734, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.547945205479452, |
|
"grad_norm": 1.014702815772571, |
|
"learning_rate": 5.0427306095094365e-06, |
|
"loss": 1.169, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.5577299412915852, |
|
"grad_norm": 1.0275982848917589, |
|
"learning_rate": 4.871820654996985e-06, |
|
"loss": 1.1856, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.5675146771037182, |
|
"grad_norm": 1.0530785296526888, |
|
"learning_rate": 4.701060477260152e-06, |
|
"loss": 1.1662, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.5772994129158513, |
|
"grad_norm": 1.023986668412493, |
|
"learning_rate": 4.5306496085154905e-06, |
|
"loss": 1.1786, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.5870841487279843, |
|
"grad_norm": 1.0639406354816834, |
|
"learning_rate": 4.360787172814148e-06, |
|
"loss": 1.1516, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.5968688845401174, |
|
"grad_norm": 0.9500520632946152, |
|
"learning_rate": 4.191671653366638e-06, |
|
"loss": 1.175, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.6066536203522505, |
|
"grad_norm": 0.984391446028831, |
|
"learning_rate": 4.02350066061643e-06, |
|
"loss": 1.1656, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.6164383561643836, |
|
"grad_norm": 1.005147372886877, |
|
"learning_rate": 3.85647070133336e-06, |
|
"loss": 1.1655, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.6262230919765166, |
|
"grad_norm": 1.015642402945713, |
|
"learning_rate": 3.69077694899668e-06, |
|
"loss": 1.1572, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.6360078277886497, |
|
"grad_norm": 1.0201007215429072, |
|
"learning_rate": 3.5266130157360557e-06, |
|
"loss": 1.1707, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.6457925636007827, |
|
"grad_norm": 0.9956935095703923, |
|
"learning_rate": 3.3641707260969724e-06, |
|
"loss": 1.1619, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.6555772994129159, |
|
"grad_norm": 0.9644023257276766, |
|
"learning_rate": 3.2036398928949475e-06, |
|
"loss": 1.1681, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 0.6653620352250489, |
|
"grad_norm": 1.029502516764066, |
|
"learning_rate": 3.0452080954204133e-06, |
|
"loss": 1.1578, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.675146771037182, |
|
"grad_norm": 0.9452739847782414, |
|
"learning_rate": 2.8890604602534777e-06, |
|
"loss": 1.166, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 0.684931506849315, |
|
"grad_norm": 0.9549493201312858, |
|
"learning_rate": 2.735379444944655e-06, |
|
"loss": 1.156, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.6947162426614482, |
|
"grad_norm": 0.9808822395078565, |
|
"learning_rate": 2.5843446248143308e-06, |
|
"loss": 1.1498, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 0.7045009784735812, |
|
"grad_norm": 0.9725284687123298, |
|
"learning_rate": 2.436132483120121e-06, |
|
"loss": 1.1414, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.0146025684804074, |
|
"learning_rate": 2.290916204837262e-06, |
|
"loss": 1.1494, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 0.7240704500978473, |
|
"grad_norm": 0.9942038199138818, |
|
"learning_rate": 2.148865474293036e-06, |
|
"loss": 1.1532, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.7338551859099804, |
|
"grad_norm": 0.9634937304590753, |
|
"learning_rate": 2.010146276891675e-06, |
|
"loss": 1.1524, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 0.7436399217221135, |
|
"grad_norm": 0.9646041486253625, |
|
"learning_rate": 1.8749207051614448e-06, |
|
"loss": 1.1553, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.7534246575342466, |
|
"grad_norm": 0.9645946065696461, |
|
"learning_rate": 1.743346769350519e-06, |
|
"loss": 1.1493, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 0.7632093933463796, |
|
"grad_norm": 0.9987538300542498, |
|
"learning_rate": 1.6155782127929787e-06, |
|
"loss": 1.1544, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.7729941291585127, |
|
"grad_norm": 0.9706627230186383, |
|
"learning_rate": 1.4917643322606668e-06, |
|
"loss": 1.169, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 0.7827788649706457, |
|
"grad_norm": 0.995763457209775, |
|
"learning_rate": 1.3720498035108393e-06, |
|
"loss": 1.1406, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.7925636007827789, |
|
"grad_norm": 0.9780490169698375, |
|
"learning_rate": 1.256574512233426e-06, |
|
"loss": 1.1464, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 0.8023483365949119, |
|
"grad_norm": 1.058131773284045, |
|
"learning_rate": 1.1454733905954679e-06, |
|
"loss": 1.1682, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.812133072407045, |
|
"grad_norm": 0.9663337521771117, |
|
"learning_rate": 1.0388762595737012e-06, |
|
"loss": 1.1514, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 0.821917808219178, |
|
"grad_norm": 1.0060251032305096, |
|
"learning_rate": 9.36907677259562e-07, |
|
"loss": 1.1381, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.8317025440313112, |
|
"grad_norm": 1.0006702241602894, |
|
"learning_rate": 8.396867933138031e-07, |
|
"loss": 1.1544, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 0.8414872798434442, |
|
"grad_norm": 0.9655200991402652, |
|
"learning_rate": 7.473272097408657e-07, |
|
"loss": 1.1572, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.8512720156555773, |
|
"grad_norm": 1.0010147904810007, |
|
"learning_rate": 6.599368481456237e-07, |
|
"loss": 1.1645, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 0.8610567514677103, |
|
"grad_norm": 0.9615849194888773, |
|
"learning_rate": 5.776178236276642e-07, |
|
"loss": 1.1402, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.8708414872798435, |
|
"grad_norm": 0.9899952335825283, |
|
"learning_rate": 5.004663254604125e-07, |
|
"loss": 1.1434, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 0.8806262230919765, |
|
"grad_norm": 0.9522480445427419, |
|
"learning_rate": 4.28572504694566e-07, |
|
"loss": 1.1404, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.8904109589041096, |
|
"grad_norm": 0.974551459961046, |
|
"learning_rate": 3.620203688171381e-07, |
|
"loss": 1.1577, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 0.9001956947162426, |
|
"grad_norm": 0.9668235111962397, |
|
"learning_rate": 3.0088768358923545e-07, |
|
"loss": 1.1427, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.9099804305283757, |
|
"grad_norm": 1.0319353813022967, |
|
"learning_rate": 2.4524588217723746e-07, |
|
"loss": 1.1373, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 0.9197651663405088, |
|
"grad_norm": 1.029861953702673, |
|
"learning_rate": 1.9515998168358809e-07, |
|
"loss": 1.1565, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.9295499021526419, |
|
"grad_norm": 0.950517130129004, |
|
"learning_rate": 1.506885071747033e-07, |
|
"loss": 1.1409, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 0.9393346379647749, |
|
"grad_norm": 0.9918632320485316, |
|
"learning_rate": 1.1188342329479962e-07, |
|
"loss": 1.1592, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.949119373776908, |
|
"grad_norm": 0.9513450607234911, |
|
"learning_rate": 7.879007354552271e-08, |
|
"loss": 1.1331, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 0.958904109589041, |
|
"grad_norm": 0.9860328317263265, |
|
"learning_rate": 5.14471273023448e-08, |
|
"loss": 1.1588, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.9686888454011742, |
|
"grad_norm": 0.9557954337884952, |
|
"learning_rate": 2.9886534629638975e-08, |
|
"loss": 1.1482, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 0.9784735812133072, |
|
"grad_norm": 0.982073690453174, |
|
"learning_rate": 1.4133488947225548e-08, |
|
"loss": 1.17, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.9882583170254403, |
|
"grad_norm": 0.9769182862134684, |
|
"learning_rate": 4.2063975920153545e-09, |
|
"loss": 1.155, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 0.9980430528375733, |
|
"grad_norm": 0.9371852604717477, |
|
"learning_rate": 1.168603091489251e-10, |
|
"loss": 1.1449, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"step": 1022, |
|
"total_flos": 1.3956152400113828e+18, |
|
"train_loss": 1.210920991029758, |
|
"train_runtime": 4090.7445, |
|
"train_samples_per_second": 31.964, |
|
"train_steps_per_second": 0.25 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1022, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 1000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.3956152400113828e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|