{ | |
"best_metric": null, | |
"best_model_checkpoint": null, | |
"epoch": 2.9972041006523766, | |
"eval_steps": 500, | |
"global_step": 201, | |
"is_hyper_param_search": false, | |
"is_local_process_zero": true, | |
"is_world_process_zero": true, | |
"log_history": [ | |
{ | |
"epoch": 0.03, | |
"learning_rate": 0.0002, | |
"loss": 2.2028, | |
"step": 2 | |
}, | |
{ | |
"epoch": 0.06, | |
"learning_rate": 0.0002, | |
"loss": 1.8921, | |
"step": 4 | |
}, | |
{ | |
"epoch": 0.09, | |
"learning_rate": 0.0002, | |
"loss": 1.837, | |
"step": 6 | |
}, | |
{ | |
"epoch": 0.12, | |
"learning_rate": 0.0002, | |
"loss": 1.8034, | |
"step": 8 | |
}, | |
{ | |
"epoch": 0.15, | |
"learning_rate": 0.0002, | |
"loss": 1.531, | |
"step": 10 | |
}, | |
{ | |
"epoch": 0.18, | |
"learning_rate": 0.0002, | |
"loss": 1.7161, | |
"step": 12 | |
}, | |
{ | |
"epoch": 0.21, | |
"learning_rate": 0.0002, | |
"loss": 1.9213, | |
"step": 14 | |
}, | |
{ | |
"epoch": 0.24, | |
"learning_rate": 0.0002, | |
"loss": 1.8214, | |
"step": 16 | |
}, | |
{ | |
"epoch": 0.27, | |
"learning_rate": 0.0002, | |
"loss": 2.8503, | |
"step": 18 | |
}, | |
{ | |
"epoch": 0.3, | |
"learning_rate": 0.0002, | |
"loss": 1.7335, | |
"step": 20 | |
}, | |
{ | |
"epoch": 0.33, | |
"learning_rate": 0.0002, | |
"loss": 1.81, | |
"step": 22 | |
}, | |
{ | |
"epoch": 0.36, | |
"learning_rate": 0.0002, | |
"loss": 1.7934, | |
"step": 24 | |
}, | |
{ | |
"epoch": 0.39, | |
"learning_rate": 0.0002, | |
"loss": 1.7097, | |
"step": 26 | |
}, | |
{ | |
"epoch": 0.42, | |
"learning_rate": 0.0002, | |
"loss": 1.6223, | |
"step": 28 | |
}, | |
{ | |
"epoch": 0.45, | |
"learning_rate": 0.0002, | |
"loss": 1.8643, | |
"step": 30 | |
}, | |
{ | |
"epoch": 0.48, | |
"learning_rate": 0.0002, | |
"loss": 1.7201, | |
"step": 32 | |
}, | |
{ | |
"epoch": 0.51, | |
"learning_rate": 0.0002, | |
"loss": 2.6561, | |
"step": 34 | |
}, | |
{ | |
"epoch": 0.54, | |
"learning_rate": 0.0002, | |
"loss": 2.0588, | |
"step": 36 | |
}, | |
{ | |
"epoch": 0.57, | |
"learning_rate": 0.0002, | |
"loss": 1.6402, | |
"step": 38 | |
}, | |
{ | |
"epoch": 0.6, | |
"learning_rate": 0.0002, | |
"loss": 1.6665, | |
"step": 40 | |
}, | |
{ | |
"epoch": 0.63, | |
"learning_rate": 0.0002, | |
"loss": 1.4429, | |
"step": 42 | |
}, | |
{ | |
"epoch": 0.66, | |
"learning_rate": 0.0002, | |
"loss": 1.5136, | |
"step": 44 | |
}, | |
{ | |
"epoch": 0.69, | |
"learning_rate": 0.0002, | |
"loss": 1.7228, | |
"step": 46 | |
}, | |
{ | |
"epoch": 0.72, | |
"learning_rate": 0.0002, | |
"loss": 1.7019, | |
"step": 48 | |
}, | |
{ | |
"epoch": 0.75, | |
"learning_rate": 0.0002, | |
"loss": 2.2361, | |
"step": 50 | |
}, | |
{ | |
"epoch": 0.78, | |
"learning_rate": 0.0002, | |
"loss": 1.7823, | |
"step": 52 | |
}, | |
{ | |
"epoch": 0.81, | |
"learning_rate": 0.0002, | |
"loss": 1.7056, | |
"step": 54 | |
}, | |
{ | |
"epoch": 0.84, | |
"learning_rate": 0.0002, | |
"loss": 1.7234, | |
"step": 56 | |
}, | |
{ | |
"epoch": 0.86, | |
"learning_rate": 0.0002, | |
"loss": 1.4893, | |
"step": 58 | |
}, | |
{ | |
"epoch": 0.89, | |
"learning_rate": 0.0002, | |
"loss": 1.6342, | |
"step": 60 | |
}, | |
{ | |
"epoch": 0.92, | |
"learning_rate": 0.0002, | |
"loss": 1.7274, | |
"step": 62 | |
}, | |
{ | |
"epoch": 0.95, | |
"learning_rate": 0.0002, | |
"loss": 1.4569, | |
"step": 64 | |
}, | |
{ | |
"epoch": 0.98, | |
"learning_rate": 0.0002, | |
"loss": 1.4923, | |
"step": 66 | |
}, | |
{ | |
"epoch": 1.01, | |
"learning_rate": 0.0002, | |
"loss": 2.0466, | |
"step": 68 | |
}, | |
{ | |
"epoch": 1.04, | |
"learning_rate": 0.0002, | |
"loss": 2.034, | |
"step": 70 | |
}, | |
{ | |
"epoch": 1.07, | |
"learning_rate": 0.0002, | |
"loss": 1.5523, | |
"step": 72 | |
}, | |
{ | |
"epoch": 1.1, | |
"learning_rate": 0.0002, | |
"loss": 1.6654, | |
"step": 74 | |
}, | |
{ | |
"epoch": 1.13, | |
"learning_rate": 0.0002, | |
"loss": 1.5583, | |
"step": 76 | |
}, | |
{ | |
"epoch": 1.16, | |
"learning_rate": 0.0002, | |
"loss": 1.4325, | |
"step": 78 | |
}, | |
{ | |
"epoch": 1.19, | |
"learning_rate": 0.0002, | |
"loss": 1.5099, | |
"step": 80 | |
}, | |
{ | |
"epoch": 1.22, | |
"learning_rate": 0.0002, | |
"loss": 1.5582, | |
"step": 82 | |
}, | |
{ | |
"epoch": 1.25, | |
"learning_rate": 0.0002, | |
"loss": 1.6519, | |
"step": 84 | |
}, | |
{ | |
"epoch": 1.28, | |
"learning_rate": 0.0002, | |
"loss": 1.9358, | |
"step": 86 | |
}, | |
{ | |
"epoch": 1.31, | |
"learning_rate": 0.0002, | |
"loss": 1.6292, | |
"step": 88 | |
}, | |
{ | |
"epoch": 1.34, | |
"learning_rate": 0.0002, | |
"loss": 1.5512, | |
"step": 90 | |
}, | |
{ | |
"epoch": 1.37, | |
"learning_rate": 0.0002, | |
"loss": 1.3745, | |
"step": 92 | |
}, | |
{ | |
"epoch": 1.4, | |
"learning_rate": 0.0002, | |
"loss": 1.3224, | |
"step": 94 | |
}, | |
{ | |
"epoch": 1.43, | |
"learning_rate": 0.0002, | |
"loss": 1.4843, | |
"step": 96 | |
}, | |
{ | |
"epoch": 1.46, | |
"learning_rate": 0.0002, | |
"loss": 1.6006, | |
"step": 98 | |
}, | |
{ | |
"epoch": 1.49, | |
"learning_rate": 0.0002, | |
"loss": 1.6436, | |
"step": 100 | |
}, | |
{ | |
"epoch": 1.52, | |
"learning_rate": 0.0002, | |
"loss": 1.8413, | |
"step": 102 | |
}, | |
{ | |
"epoch": 1.55, | |
"learning_rate": 0.0002, | |
"loss": 1.6665, | |
"step": 104 | |
}, | |
{ | |
"epoch": 1.58, | |
"learning_rate": 0.0002, | |
"loss": 1.6881, | |
"step": 106 | |
}, | |
{ | |
"epoch": 1.61, | |
"learning_rate": 0.0002, | |
"loss": 1.6871, | |
"step": 108 | |
}, | |
{ | |
"epoch": 1.64, | |
"learning_rate": 0.0002, | |
"loss": 1.357, | |
"step": 110 | |
}, | |
{ | |
"epoch": 1.67, | |
"learning_rate": 0.0002, | |
"loss": 1.349, | |
"step": 112 | |
}, | |
{ | |
"epoch": 1.7, | |
"learning_rate": 0.0002, | |
"loss": 1.5072, | |
"step": 114 | |
}, | |
{ | |
"epoch": 1.73, | |
"learning_rate": 0.0002, | |
"loss": 1.8975, | |
"step": 116 | |
}, | |
{ | |
"epoch": 1.76, | |
"learning_rate": 0.0002, | |
"loss": 2.2641, | |
"step": 118 | |
}, | |
{ | |
"epoch": 1.79, | |
"learning_rate": 0.0002, | |
"loss": 1.7053, | |
"step": 120 | |
}, | |
{ | |
"epoch": 1.82, | |
"learning_rate": 0.0002, | |
"loss": 1.6115, | |
"step": 122 | |
}, | |
{ | |
"epoch": 1.85, | |
"learning_rate": 0.0002, | |
"loss": 1.5428, | |
"step": 124 | |
}, | |
{ | |
"epoch": 1.88, | |
"learning_rate": 0.0002, | |
"loss": 1.2018, | |
"step": 126 | |
}, | |
{ | |
"epoch": 1.91, | |
"learning_rate": 0.0002, | |
"loss": 1.3601, | |
"step": 128 | |
}, | |
{ | |
"epoch": 1.94, | |
"learning_rate": 0.0002, | |
"loss": 1.5072, | |
"step": 130 | |
}, | |
{ | |
"epoch": 1.97, | |
"learning_rate": 0.0002, | |
"loss": 1.4283, | |
"step": 132 | |
}, | |
{ | |
"epoch": 2.0, | |
"learning_rate": 0.0002, | |
"loss": 1.4977, | |
"step": 134 | |
}, | |
{ | |
"epoch": 2.03, | |
"learning_rate": 0.0002, | |
"loss": 2.1986, | |
"step": 136 | |
}, | |
{ | |
"epoch": 2.06, | |
"learning_rate": 0.0002, | |
"loss": 2.0282, | |
"step": 138 | |
}, | |
{ | |
"epoch": 2.09, | |
"learning_rate": 0.0002, | |
"loss": 1.6776, | |
"step": 140 | |
}, | |
{ | |
"epoch": 2.12, | |
"learning_rate": 0.0002, | |
"loss": 1.5972, | |
"step": 142 | |
}, | |
{ | |
"epoch": 2.15, | |
"learning_rate": 0.0002, | |
"loss": 1.3801, | |
"step": 144 | |
}, | |
{ | |
"epoch": 2.18, | |
"learning_rate": 0.0002, | |
"loss": 1.2358, | |
"step": 146 | |
}, | |
{ | |
"epoch": 2.21, | |
"learning_rate": 0.0002, | |
"loss": 1.3283, | |
"step": 148 | |
}, | |
{ | |
"epoch": 2.24, | |
"learning_rate": 0.0002, | |
"loss": 1.1866, | |
"step": 150 | |
}, | |
{ | |
"epoch": 2.27, | |
"learning_rate": 0.0002, | |
"loss": 1.8487, | |
"step": 152 | |
}, | |
{ | |
"epoch": 2.3, | |
"learning_rate": 0.0002, | |
"loss": 1.8731, | |
"step": 154 | |
}, | |
{ | |
"epoch": 2.33, | |
"learning_rate": 0.0002, | |
"loss": 1.5395, | |
"step": 156 | |
}, | |
{ | |
"epoch": 2.36, | |
"learning_rate": 0.0002, | |
"loss": 1.4486, | |
"step": 158 | |
}, | |
{ | |
"epoch": 2.39, | |
"learning_rate": 0.0002, | |
"loss": 1.2876, | |
"step": 160 | |
}, | |
{ | |
"epoch": 2.42, | |
"learning_rate": 0.0002, | |
"loss": 1.2185, | |
"step": 162 | |
}, | |
{ | |
"epoch": 2.45, | |
"learning_rate": 0.0002, | |
"loss": 1.3274, | |
"step": 164 | |
}, | |
{ | |
"epoch": 2.48, | |
"learning_rate": 0.0002, | |
"loss": 1.1431, | |
"step": 166 | |
}, | |
{ | |
"epoch": 2.51, | |
"learning_rate": 0.0002, | |
"loss": 2.1022, | |
"step": 168 | |
}, | |
{ | |
"epoch": 2.53, | |
"learning_rate": 0.0002, | |
"loss": 1.3974, | |
"step": 170 | |
}, | |
{ | |
"epoch": 2.56, | |
"learning_rate": 0.0002, | |
"loss": 1.5858, | |
"step": 172 | |
}, | |
{ | |
"epoch": 2.59, | |
"learning_rate": 0.0002, | |
"loss": 1.555, | |
"step": 174 | |
}, | |
{ | |
"epoch": 2.62, | |
"learning_rate": 0.0002, | |
"loss": 1.3075, | |
"step": 176 | |
}, | |
{ | |
"epoch": 2.65, | |
"learning_rate": 0.0002, | |
"loss": 1.1968, | |
"step": 178 | |
}, | |
{ | |
"epoch": 2.68, | |
"learning_rate": 0.0002, | |
"loss": 1.2942, | |
"step": 180 | |
}, | |
{ | |
"epoch": 2.71, | |
"learning_rate": 0.0002, | |
"loss": 1.0941, | |
"step": 182 | |
}, | |
{ | |
"epoch": 2.74, | |
"learning_rate": 0.0002, | |
"loss": 2.4209, | |
"step": 184 | |
}, | |
{ | |
"epoch": 2.77, | |
"learning_rate": 0.0002, | |
"loss": 1.5967, | |
"step": 186 | |
}, | |
{ | |
"epoch": 2.8, | |
"learning_rate": 0.0002, | |
"loss": 1.623, | |
"step": 188 | |
}, | |
{ | |
"epoch": 2.83, | |
"learning_rate": 0.0002, | |
"loss": 1.5194, | |
"step": 190 | |
}, | |
{ | |
"epoch": 2.86, | |
"learning_rate": 0.0002, | |
"loss": 1.4854, | |
"step": 192 | |
}, | |
{ | |
"epoch": 2.89, | |
"learning_rate": 0.0002, | |
"loss": 1.3782, | |
"step": 194 | |
}, | |
{ | |
"epoch": 2.92, | |
"learning_rate": 0.0002, | |
"loss": 1.3657, | |
"step": 196 | |
}, | |
{ | |
"epoch": 2.95, | |
"learning_rate": 0.0002, | |
"loss": 1.1328, | |
"step": 198 | |
}, | |
{ | |
"epoch": 2.98, | |
"learning_rate": 0.0002, | |
"loss": 1.8249, | |
"step": 200 | |
} | |
], | |
"logging_steps": 2, | |
"max_steps": 201, | |
"num_input_tokens_seen": 0, | |
"num_train_epochs": 3, | |
"save_steps": 500, | |
"total_flos": 8935930385571840.0, | |
"train_batch_size": 1, | |
"trial_name": null, | |
"trial_params": null | |
} | |