sallywww's picture
llama
2a6a7a6
raw
history blame contribute delete
No virus
9.42 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 24.0,
"global_step": 1500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.32,
"learning_rate": 1.978494623655914e-05,
"loss": 2.3806,
"step": 20
},
{
"epoch": 0.64,
"learning_rate": 1.956989247311828e-05,
"loss": 2.2445,
"step": 40
},
{
"epoch": 0.96,
"learning_rate": 1.935483870967742e-05,
"loss": 1.9394,
"step": 60
},
{
"epoch": 1.28,
"learning_rate": 1.9150537634408603e-05,
"loss": 1.7953,
"step": 80
},
{
"epoch": 1.6,
"learning_rate": 1.8935483870967742e-05,
"loss": 1.6449,
"step": 100
},
{
"epoch": 1.92,
"learning_rate": 1.8720430107526882e-05,
"loss": 1.4618,
"step": 120
},
{
"epoch": 2.24,
"learning_rate": 1.8505376344086025e-05,
"loss": 1.2538,
"step": 140
},
{
"epoch": 2.56,
"learning_rate": 1.8290322580645165e-05,
"loss": 1.0844,
"step": 160
},
{
"epoch": 2.88,
"learning_rate": 1.8075268817204305e-05,
"loss": 0.9909,
"step": 180
},
{
"epoch": 3.2,
"learning_rate": 1.7860215053763444e-05,
"loss": 0.9419,
"step": 200
},
{
"epoch": 3.52,
"learning_rate": 1.764516129032258e-05,
"loss": 0.8202,
"step": 220
},
{
"epoch": 3.84,
"learning_rate": 1.743010752688172e-05,
"loss": 0.7706,
"step": 240
},
{
"epoch": 4.16,
"learning_rate": 1.721505376344086e-05,
"loss": 0.7102,
"step": 260
},
{
"epoch": 4.48,
"learning_rate": 1.7e-05,
"loss": 0.6723,
"step": 280
},
{
"epoch": 4.8,
"learning_rate": 1.678494623655914e-05,
"loss": 0.6262,
"step": 300
},
{
"epoch": 5.12,
"learning_rate": 1.656989247311828e-05,
"loss": 0.5895,
"step": 320
},
{
"epoch": 5.44,
"learning_rate": 1.6354838709677422e-05,
"loss": 0.5636,
"step": 340
},
{
"epoch": 5.76,
"learning_rate": 1.6139784946236562e-05,
"loss": 0.5095,
"step": 360
},
{
"epoch": 6.08,
"learning_rate": 1.5924731182795702e-05,
"loss": 0.5093,
"step": 380
},
{
"epoch": 6.4,
"learning_rate": 1.570967741935484e-05,
"loss": 0.4729,
"step": 400
},
{
"epoch": 6.72,
"learning_rate": 1.549462365591398e-05,
"loss": 0.4805,
"step": 420
},
{
"epoch": 7.04,
"learning_rate": 1.527956989247312e-05,
"loss": 0.4508,
"step": 440
},
{
"epoch": 7.36,
"learning_rate": 1.5064516129032259e-05,
"loss": 0.4254,
"step": 460
},
{
"epoch": 7.68,
"learning_rate": 1.4849462365591399e-05,
"loss": 0.4104,
"step": 480
},
{
"epoch": 8.0,
"learning_rate": 1.4634408602150539e-05,
"loss": 0.4135,
"step": 500
},
{
"epoch": 8.32,
"learning_rate": 1.4419354838709678e-05,
"loss": 0.3828,
"step": 520
},
{
"epoch": 8.64,
"learning_rate": 1.4204301075268818e-05,
"loss": 0.3924,
"step": 540
},
{
"epoch": 8.96,
"learning_rate": 1.3989247311827958e-05,
"loss": 0.3782,
"step": 560
},
{
"epoch": 9.28,
"learning_rate": 1.3774193548387098e-05,
"loss": 0.3569,
"step": 580
},
{
"epoch": 9.6,
"learning_rate": 1.3559139784946237e-05,
"loss": 0.3609,
"step": 600
},
{
"epoch": 9.92,
"learning_rate": 1.3344086021505379e-05,
"loss": 0.3385,
"step": 620
},
{
"epoch": 10.24,
"learning_rate": 1.3129032258064518e-05,
"loss": 0.3246,
"step": 640
},
{
"epoch": 10.56,
"learning_rate": 1.2913978494623658e-05,
"loss": 0.3229,
"step": 660
},
{
"epoch": 10.88,
"learning_rate": 1.2698924731182796e-05,
"loss": 0.3051,
"step": 680
},
{
"epoch": 11.2,
"learning_rate": 1.2483870967741936e-05,
"loss": 0.3159,
"step": 700
},
{
"epoch": 11.52,
"learning_rate": 1.2268817204301076e-05,
"loss": 0.2884,
"step": 720
},
{
"epoch": 11.84,
"learning_rate": 1.2053763440860215e-05,
"loss": 0.2976,
"step": 740
},
{
"epoch": 12.16,
"learning_rate": 1.1838709677419355e-05,
"loss": 0.2934,
"step": 760
},
{
"epoch": 12.48,
"learning_rate": 1.1623655913978495e-05,
"loss": 0.2739,
"step": 780
},
{
"epoch": 12.8,
"learning_rate": 1.1408602150537636e-05,
"loss": 0.2807,
"step": 800
},
{
"epoch": 13.12,
"learning_rate": 1.1193548387096776e-05,
"loss": 0.2593,
"step": 820
},
{
"epoch": 13.44,
"learning_rate": 1.0978494623655916e-05,
"loss": 0.2589,
"step": 840
},
{
"epoch": 13.76,
"learning_rate": 1.0763440860215055e-05,
"loss": 0.2679,
"step": 860
},
{
"epoch": 14.08,
"learning_rate": 1.0548387096774195e-05,
"loss": 0.2463,
"step": 880
},
{
"epoch": 14.4,
"learning_rate": 1.0333333333333335e-05,
"loss": 0.2435,
"step": 900
},
{
"epoch": 14.72,
"learning_rate": 1.0118279569892473e-05,
"loss": 0.2469,
"step": 920
},
{
"epoch": 15.04,
"learning_rate": 9.903225806451614e-06,
"loss": 0.2406,
"step": 940
},
{
"epoch": 15.36,
"learning_rate": 9.688172043010754e-06,
"loss": 0.2231,
"step": 960
},
{
"epoch": 15.68,
"learning_rate": 9.473118279569892e-06,
"loss": 0.2317,
"step": 980
},
{
"epoch": 16.0,
"learning_rate": 9.258064516129034e-06,
"loss": 0.2305,
"step": 1000
},
{
"epoch": 16.32,
"learning_rate": 9.043010752688173e-06,
"loss": 0.2227,
"step": 1020
},
{
"epoch": 16.64,
"learning_rate": 8.827956989247313e-06,
"loss": 0.2253,
"step": 1040
},
{
"epoch": 16.96,
"learning_rate": 8.612903225806453e-06,
"loss": 0.2174,
"step": 1060
},
{
"epoch": 17.28,
"learning_rate": 8.397849462365592e-06,
"loss": 0.2054,
"step": 1080
},
{
"epoch": 17.6,
"learning_rate": 8.182795698924732e-06,
"loss": 0.2171,
"step": 1100
},
{
"epoch": 17.92,
"learning_rate": 7.967741935483872e-06,
"loss": 0.2104,
"step": 1120
},
{
"epoch": 18.24,
"learning_rate": 7.752688172043012e-06,
"loss": 0.2194,
"step": 1140
},
{
"epoch": 18.56,
"learning_rate": 7.537634408602151e-06,
"loss": 0.1948,
"step": 1160
},
{
"epoch": 18.88,
"learning_rate": 7.322580645161291e-06,
"loss": 0.2013,
"step": 1180
},
{
"epoch": 19.2,
"learning_rate": 7.10752688172043e-06,
"loss": 0.2051,
"step": 1200
},
{
"epoch": 19.52,
"learning_rate": 6.89247311827957e-06,
"loss": 0.1946,
"step": 1220
},
{
"epoch": 19.84,
"learning_rate": 6.67741935483871e-06,
"loss": 0.1979,
"step": 1240
},
{
"epoch": 20.16,
"learning_rate": 6.46236559139785e-06,
"loss": 0.1954,
"step": 1260
},
{
"epoch": 20.48,
"learning_rate": 6.24731182795699e-06,
"loss": 0.1846,
"step": 1280
},
{
"epoch": 20.8,
"learning_rate": 6.0322580645161295e-06,
"loss": 0.194,
"step": 1300
},
{
"epoch": 21.12,
"learning_rate": 5.817204301075268e-06,
"loss": 0.19,
"step": 1320
},
{
"epoch": 21.44,
"learning_rate": 5.602150537634409e-06,
"loss": 0.1869,
"step": 1340
},
{
"epoch": 21.76,
"learning_rate": 5.387096774193549e-06,
"loss": 0.1948,
"step": 1360
},
{
"epoch": 22.08,
"learning_rate": 5.1720430107526885e-06,
"loss": 0.1773,
"step": 1380
},
{
"epoch": 22.4,
"learning_rate": 4.956989247311829e-06,
"loss": 0.1742,
"step": 1400
},
{
"epoch": 22.72,
"learning_rate": 4.741935483870968e-06,
"loss": 0.1834,
"step": 1420
},
{
"epoch": 23.04,
"learning_rate": 4.526881720430108e-06,
"loss": 0.1927,
"step": 1440
},
{
"epoch": 23.36,
"learning_rate": 4.311827956989247e-06,
"loss": 0.1817,
"step": 1460
},
{
"epoch": 23.68,
"learning_rate": 4.096774193548387e-06,
"loss": 0.1723,
"step": 1480
},
{
"epoch": 24.0,
"learning_rate": 3.881720430107528e-06,
"loss": 0.1869,
"step": 1500
}
],
"max_steps": 1860,
"num_train_epochs": 30,
"total_flos": 7.7973833613312e+18,
"trial_name": null,
"trial_params": null
}