pythia-exp / trainer_state.json
TinyPixel's picture
Upload folder using huggingface_hub
c063dc4
raw
history blame
18.4 kB
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 2.983219390926041,
"eval_steps": 500,
"global_step": 300,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.02,
"learning_rate": 4.444444444444444e-06,
"loss": 2.6769,
"step": 2
},
{
"epoch": 0.04,
"learning_rate": 8.888888888888888e-06,
"loss": 2.6837,
"step": 4
},
{
"epoch": 0.06,
"learning_rate": 1.3333333333333333e-05,
"loss": 2.6307,
"step": 6
},
{
"epoch": 0.08,
"learning_rate": 1.7777777777777777e-05,
"loss": 2.5928,
"step": 8
},
{
"epoch": 0.1,
"learning_rate": 1.9999417253661235e-05,
"loss": 2.6261,
"step": 10
},
{
"epoch": 0.12,
"learning_rate": 1.9994755690455154e-05,
"loss": 2.6851,
"step": 12
},
{
"epoch": 0.14,
"learning_rate": 1.998543473718677e-05,
"loss": 2.6421,
"step": 14
},
{
"epoch": 0.16,
"learning_rate": 1.9971458739130598e-05,
"loss": 2.7906,
"step": 16
},
{
"epoch": 0.18,
"learning_rate": 1.995283421166614e-05,
"loss": 2.4095,
"step": 18
},
{
"epoch": 0.2,
"learning_rate": 1.9929569837240567e-05,
"loss": 2.2509,
"step": 20
},
{
"epoch": 0.22,
"learning_rate": 1.990167646132107e-05,
"loss": 2.3942,
"step": 22
},
{
"epoch": 0.24,
"learning_rate": 1.9869167087338908e-05,
"loss": 2.4104,
"step": 24
},
{
"epoch": 0.26,
"learning_rate": 1.983205687062742e-05,
"loss": 2.5293,
"step": 26
},
{
"epoch": 0.28,
"learning_rate": 1.9790363111356838e-05,
"loss": 2.6231,
"step": 28
},
{
"epoch": 0.3,
"learning_rate": 1.9744105246469264e-05,
"loss": 2.5915,
"step": 30
},
{
"epoch": 0.32,
"learning_rate": 1.9693304840617456e-05,
"loss": 2.4235,
"step": 32
},
{
"epoch": 0.34,
"learning_rate": 1.963798557611178e-05,
"loss": 2.5622,
"step": 34
},
{
"epoch": 0.36,
"learning_rate": 1.957817324187987e-05,
"loss": 2.3644,
"step": 36
},
{
"epoch": 0.38,
"learning_rate": 1.9513895721444286e-05,
"loss": 2.3714,
"step": 38
},
{
"epoch": 0.4,
"learning_rate": 1.9445182979923657e-05,
"loss": 2.5419,
"step": 40
},
{
"epoch": 0.42,
"learning_rate": 1.937206705006344e-05,
"loss": 2.4557,
"step": 42
},
{
"epoch": 0.44,
"learning_rate": 1.9294582017302797e-05,
"loss": 2.3226,
"step": 44
},
{
"epoch": 0.46,
"learning_rate": 1.921276400388451e-05,
"loss": 2.3402,
"step": 46
},
{
"epoch": 0.48,
"learning_rate": 1.9126651152015404e-05,
"loss": 2.3476,
"step": 48
},
{
"epoch": 0.5,
"learning_rate": 1.9036283606085057e-05,
"loss": 2.3808,
"step": 50
},
{
"epoch": 0.52,
"learning_rate": 1.8941703493951163e-05,
"loss": 2.4262,
"step": 52
},
{
"epoch": 0.54,
"learning_rate": 1.8842954907300236e-05,
"loss": 2.3338,
"step": 54
},
{
"epoch": 0.56,
"learning_rate": 1.874008388109276e-05,
"loss": 2.4052,
"step": 56
},
{
"epoch": 0.58,
"learning_rate": 1.863313837210247e-05,
"loss": 2.2928,
"step": 58
},
{
"epoch": 0.6,
"learning_rate": 1.8522168236559693e-05,
"loss": 2.4616,
"step": 60
},
{
"epoch": 0.62,
"learning_rate": 1.840722520690921e-05,
"loss": 2.2975,
"step": 62
},
{
"epoch": 0.64,
"learning_rate": 1.8288362867693414e-05,
"loss": 2.379,
"step": 64
},
{
"epoch": 0.66,
"learning_rate": 1.816563663057211e-05,
"loss": 2.3342,
"step": 66
},
{
"epoch": 0.68,
"learning_rate": 1.8039103708490503e-05,
"loss": 2.2533,
"step": 68
},
{
"epoch": 0.7,
"learning_rate": 1.790882308900746e-05,
"loss": 2.2486,
"step": 70
},
{
"epoch": 0.72,
"learning_rate": 1.7774855506796497e-05,
"loss": 2.2496,
"step": 72
},
{
"epoch": 0.74,
"learning_rate": 1.7637263415332272e-05,
"loss": 2.3153,
"step": 74
},
{
"epoch": 0.76,
"learning_rate": 1.749611095777581e-05,
"loss": 2.2758,
"step": 76
},
{
"epoch": 0.78,
"learning_rate": 1.7351463937072008e-05,
"loss": 2.5391,
"step": 78
},
{
"epoch": 0.8,
"learning_rate": 1.7203389785273402e-05,
"loss": 2.3089,
"step": 80
},
{
"epoch": 0.82,
"learning_rate": 1.705195753210446e-05,
"loss": 2.2669,
"step": 82
},
{
"epoch": 0.84,
"learning_rate": 1.6897237772781046e-05,
"loss": 2.3467,
"step": 84
},
{
"epoch": 0.86,
"learning_rate": 1.673930263510011e-05,
"loss": 2.3333,
"step": 86
},
{
"epoch": 0.88,
"learning_rate": 1.6578225745814907e-05,
"loss": 2.4031,
"step": 88
},
{
"epoch": 0.89,
"learning_rate": 1.6414082196311402e-05,
"loss": 2.3823,
"step": 90
},
{
"epoch": 0.91,
"learning_rate": 1.6246948507601915e-05,
"loss": 2.2399,
"step": 92
},
{
"epoch": 0.93,
"learning_rate": 1.607690259465229e-05,
"loss": 2.1444,
"step": 94
},
{
"epoch": 0.95,
"learning_rate": 1.5904023730059227e-05,
"loss": 2.2844,
"step": 96
},
{
"epoch": 0.97,
"learning_rate": 1.57283925070947e-05,
"loss": 2.2505,
"step": 98
},
{
"epoch": 0.99,
"learning_rate": 1.55500908021347e-05,
"loss": 2.0644,
"step": 100
},
{
"epoch": 1.01,
"learning_rate": 1.536920173648984e-05,
"loss": 2.2254,
"step": 102
},
{
"epoch": 1.03,
"learning_rate": 1.5185809637655548e-05,
"loss": 2.3203,
"step": 104
},
{
"epoch": 1.05,
"learning_rate": 1.5000000000000002e-05,
"loss": 2.2265,
"step": 106
},
{
"epoch": 1.07,
"learning_rate": 1.4811859444908053e-05,
"loss": 2.3855,
"step": 108
},
{
"epoch": 1.09,
"learning_rate": 1.4621475680399771e-05,
"loss": 2.358,
"step": 110
},
{
"epoch": 1.11,
"learning_rate": 1.4428937460242417e-05,
"loss": 2.3075,
"step": 112
},
{
"epoch": 1.13,
"learning_rate": 1.4234334542574906e-05,
"loss": 2.267,
"step": 114
},
{
"epoch": 1.15,
"learning_rate": 1.4037757648064019e-05,
"loss": 2.2067,
"step": 116
},
{
"epoch": 1.17,
"learning_rate": 1.3839298417611964e-05,
"loss": 2.1989,
"step": 118
},
{
"epoch": 1.19,
"learning_rate": 1.3639049369634878e-05,
"loss": 2.1818,
"step": 120
},
{
"epoch": 1.21,
"learning_rate": 1.3437103856932266e-05,
"loss": 2.1537,
"step": 122
},
{
"epoch": 1.23,
"learning_rate": 1.3233556023167487e-05,
"loss": 2.158,
"step": 124
},
{
"epoch": 1.25,
"learning_rate": 1.3028500758979507e-05,
"loss": 2.3302,
"step": 126
},
{
"epoch": 1.27,
"learning_rate": 1.2822033657746478e-05,
"loss": 2.3653,
"step": 128
},
{
"epoch": 1.29,
"learning_rate": 1.2614250971021658e-05,
"loss": 2.1906,
"step": 130
},
{
"epoch": 1.31,
"learning_rate": 1.2405249563662539e-05,
"loss": 2.3488,
"step": 132
},
{
"epoch": 1.33,
"learning_rate": 1.2195126868674052e-05,
"loss": 2.3102,
"step": 134
},
{
"epoch": 1.35,
"learning_rate": 1.1983980841786899e-05,
"loss": 2.4081,
"step": 136
},
{
"epoch": 1.37,
"learning_rate": 1.177190991579223e-05,
"loss": 2.2191,
"step": 138
},
{
"epoch": 1.39,
"learning_rate": 1.1559012954653865e-05,
"loss": 2.2446,
"step": 140
},
{
"epoch": 1.41,
"learning_rate": 1.1345389207419588e-05,
"loss": 2.3967,
"step": 142
},
{
"epoch": 1.43,
"learning_rate": 1.1131138261952845e-05,
"loss": 2.0867,
"step": 144
},
{
"epoch": 1.45,
"learning_rate": 1.0916359998506549e-05,
"loss": 2.0844,
"step": 146
},
{
"epoch": 1.47,
"learning_rate": 1.070115454316054e-05,
"loss": 2.1256,
"step": 148
},
{
"epoch": 1.49,
"learning_rate": 1.0485622221144485e-05,
"loss": 2.2274,
"step": 150
},
{
"epoch": 1.51,
"learning_rate": 1.0269863510067872e-05,
"loss": 2.1116,
"step": 152
},
{
"epoch": 1.53,
"learning_rate": 1.0053978993079046e-05,
"loss": 2.4296,
"step": 154
},
{
"epoch": 1.55,
"learning_rate": 9.838069311974986e-06,
"loss": 2.3066,
"step": 156
},
{
"epoch": 1.57,
"learning_rate": 9.622235120283769e-06,
"loss": 2.1659,
"step": 158
},
{
"epoch": 1.59,
"learning_rate": 9.406577036341548e-06,
"loss": 2.3285,
"step": 160
},
{
"epoch": 1.61,
"learning_rate": 9.19119559638596e-06,
"loss": 2.2788,
"step": 162
},
{
"epoch": 1.63,
"learning_rate": 8.976191207687775e-06,
"loss": 2.2487,
"step": 164
},
{
"epoch": 1.65,
"learning_rate": 8.7616641017427e-06,
"loss": 2.1421,
"step": 166
},
{
"epoch": 1.67,
"learning_rate": 8.5477142875451e-06,
"loss": 2.3432,
"step": 168
},
{
"epoch": 1.69,
"learning_rate": 8.334441504965456e-06,
"loss": 1.9564,
"step": 170
},
{
"epoch": 1.71,
"learning_rate": 8.1219451782533e-06,
"loss": 2.1576,
"step": 172
},
{
"epoch": 1.73,
"learning_rate": 7.91032436968725e-06,
"loss": 2.2133,
"step": 174
},
{
"epoch": 1.75,
"learning_rate": 7.699677733393827e-06,
"loss": 2.2797,
"step": 176
},
{
"epoch": 1.77,
"learning_rate": 7.490103469356513e-06,
"loss": 2.3495,
"step": 178
},
{
"epoch": 1.79,
"learning_rate": 7.2816992776365714e-06,
"loss": 2.2715,
"step": 180
},
{
"epoch": 1.81,
"learning_rate": 7.0745623128268605e-06,
"loss": 2.3838,
"step": 182
},
{
"epoch": 1.83,
"learning_rate": 6.868789138759977e-06,
"loss": 2.3654,
"step": 184
},
{
"epoch": 1.85,
"learning_rate": 6.664475683491797e-06,
"loss": 2.2189,
"step": 186
},
{
"epoch": 1.87,
"learning_rate": 6.461717194581394e-06,
"loss": 2.3374,
"step": 188
},
{
"epoch": 1.89,
"learning_rate": 6.260608194688207e-06,
"loss": 2.3332,
"step": 190
},
{
"epoch": 1.91,
"learning_rate": 6.061242437507131e-06,
"loss": 2.2279,
"step": 192
},
{
"epoch": 1.93,
"learning_rate": 5.863712864062089e-06,
"loss": 2.102,
"step": 194
},
{
"epoch": 1.95,
"learning_rate": 5.6681115593784705e-06,
"loss": 2.1584,
"step": 196
},
{
"epoch": 1.97,
"learning_rate": 5.4745297095546125e-06,
"loss": 2.1382,
"step": 198
},
{
"epoch": 1.99,
"learning_rate": 5.2830575592523415e-06,
"loss": 2.0084,
"step": 200
},
{
"epoch": 2.01,
"learning_rate": 5.093784369626397e-06,
"loss": 1.9967,
"step": 202
},
{
"epoch": 2.03,
"learning_rate": 4.9067983767123736e-06,
"loss": 2.3797,
"step": 204
},
{
"epoch": 2.05,
"learning_rate": 4.722186750292511e-06,
"loss": 2.3558,
"step": 206
},
{
"epoch": 2.07,
"learning_rate": 4.54003555325862e-06,
"loss": 2.1337,
"step": 208
},
{
"epoch": 2.09,
"learning_rate": 4.360429701490935e-06,
"loss": 2.2661,
"step": 210
},
{
"epoch": 2.11,
"learning_rate": 4.183452924271776e-06,
"loss": 2.1616,
"step": 212
},
{
"epoch": 2.13,
"learning_rate": 4.009187725252309e-06,
"loss": 2.355,
"step": 214
},
{
"epoch": 2.15,
"learning_rate": 3.837715343990727e-06,
"loss": 2.224,
"step": 216
},
{
"epoch": 2.17,
"learning_rate": 3.669115718079702e-06,
"loss": 2.2079,
"step": 218
},
{
"epoch": 2.19,
"learning_rate": 3.5034674458807893e-06,
"loss": 2.086,
"step": 220
},
{
"epoch": 2.21,
"learning_rate": 3.3408477498831917e-06,
"loss": 2.126,
"step": 222
},
{
"epoch": 2.23,
"learning_rate": 3.1813324407038826e-06,
"loss": 2.1389,
"step": 224
},
{
"epoch": 2.25,
"learning_rate": 3.024995881745972e-06,
"loss": 2.1352,
"step": 226
},
{
"epoch": 2.27,
"learning_rate": 2.8719109545317102e-06,
"loss": 2.2183,
"step": 228
},
{
"epoch": 2.29,
"learning_rate": 2.722149024726307e-06,
"loss": 2.2478,
"step": 230
},
{
"epoch": 2.31,
"learning_rate": 2.5757799088684654e-06,
"loss": 2.2587,
"step": 232
},
{
"epoch": 2.33,
"learning_rate": 2.432871841823047e-06,
"loss": 2.3191,
"step": 234
},
{
"epoch": 2.35,
"learning_rate": 2.293491444971109e-06,
"loss": 2.2398,
"step": 236
},
{
"epoch": 2.37,
"learning_rate": 2.157703695152109e-06,
"loss": 2.3926,
"step": 238
},
{
"epoch": 2.39,
"learning_rate": 2.025571894372794e-06,
"loss": 2.2304,
"step": 240
},
{
"epoch": 2.41,
"learning_rate": 1.897157640296825e-06,
"loss": 2.2469,
"step": 242
},
{
"epoch": 2.43,
"learning_rate": 1.7725207975289883e-06,
"loss": 2.1065,
"step": 244
},
{
"epoch": 2.45,
"learning_rate": 1.6517194697072903e-06,
"loss": 2.1856,
"step": 246
},
{
"epoch": 2.47,
"learning_rate": 1.534809972415998e-06,
"loss": 2.1236,
"step": 248
},
{
"epoch": 2.49,
"learning_rate": 1.4218468069322576e-06,
"loss": 2.1961,
"step": 250
},
{
"epoch": 2.51,
"learning_rate": 1.3128826348184886e-06,
"loss": 2.0173,
"step": 252
},
{
"epoch": 2.53,
"learning_rate": 1.207968253372438e-06,
"loss": 2.2847,
"step": 254
},
{
"epoch": 2.55,
"learning_rate": 1.1071525719463094e-06,
"loss": 2.3931,
"step": 256
},
{
"epoch": 2.57,
"learning_rate": 1.010482589146048e-06,
"loss": 2.2776,
"step": 258
},
{
"epoch": 2.59,
"learning_rate": 9.180033709213454e-07,
"loss": 2.3249,
"step": 260
},
{
"epoch": 2.61,
"learning_rate": 8.297580295566576e-07,
"loss": 2.1791,
"step": 262
},
{
"epoch": 2.63,
"learning_rate": 7.457877035729588e-07,
"loss": 2.2177,
"step": 264
},
{
"epoch": 2.65,
"learning_rate": 6.661315385496426e-07,
"loss": 2.2903,
"step": 266
},
{
"epoch": 2.67,
"learning_rate": 5.908266688755049e-07,
"loss": 2.2478,
"step": 268
},
{
"epoch": 2.68,
"learning_rate": 5.199082004372958e-07,
"loss": 2.2076,
"step": 270
},
{
"epoch": 2.7,
"learning_rate": 4.534091942539476e-07,
"loss": 2.0342,
"step": 272
},
{
"epoch": 2.72,
"learning_rate": 3.913606510640644e-07,
"loss": 2.2668,
"step": 274
},
{
"epoch": 2.74,
"learning_rate": 3.3379149687388866e-07,
"loss": 2.1174,
"step": 276
},
{
"epoch": 2.76,
"learning_rate": 2.807285694724804e-07,
"loss": 2.2882,
"step": 278
},
{
"epoch": 2.78,
"learning_rate": 2.3219660592038285e-07,
"loss": 2.1654,
"step": 280
},
{
"epoch": 2.8,
"learning_rate": 1.8821823101760949e-07,
"loss": 2.2209,
"step": 282
},
{
"epoch": 2.82,
"learning_rate": 1.4881394675633543e-07,
"loss": 2.1442,
"step": 284
},
{
"epoch": 2.84,
"learning_rate": 1.1400212276321377e-07,
"loss": 2.3807,
"step": 286
},
{
"epoch": 2.86,
"learning_rate": 8.379898773574924e-08,
"loss": 2.4242,
"step": 288
},
{
"epoch": 2.88,
"learning_rate": 5.821862187675775e-08,
"loss": 2.2171,
"step": 290
},
{
"epoch": 2.9,
"learning_rate": 3.727295033040035e-08,
"loss": 2.277,
"step": 292
},
{
"epoch": 2.92,
"learning_rate": 2.0971737622883515e-08,
"loss": 2.0345,
"step": 294
},
{
"epoch": 2.94,
"learning_rate": 9.322583110392692e-09,
"loss": 2.0743,
"step": 296
},
{
"epoch": 2.96,
"learning_rate": 2.330917436402791e-09,
"loss": 2.0902,
"step": 298
},
{
"epoch": 2.98,
"learning_rate": 0.0,
"loss": 2.07,
"step": 300
}
],
"logging_steps": 2,
"max_steps": 300,
"num_train_epochs": 3,
"save_steps": 500,
"total_flos": 2.591436490764288e+16,
"trial_name": null,
"trial_params": null
}