|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.76056338028169, |
|
"eval_steps": 42, |
|
"global_step": 500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0035211267605633804, |
|
"eval_loss": 0.71390700340271, |
|
"eval_runtime": 33.4366, |
|
"eval_samples_per_second": 57.213, |
|
"eval_steps_per_second": 1.794, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.01056338028169014, |
|
"grad_norm": 17.077974319458008, |
|
"learning_rate": 1.5e-05, |
|
"loss": 2.8897, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.02112676056338028, |
|
"grad_norm": 11.338067054748535, |
|
"learning_rate": 3e-05, |
|
"loss": 2.4422, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03169014084507042, |
|
"grad_norm": 4.915891170501709, |
|
"learning_rate": 4.5e-05, |
|
"loss": 1.4287, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04225352112676056, |
|
"grad_norm": 4.167311668395996, |
|
"learning_rate": 4.9997944716957985e-05, |
|
"loss": 1.0401, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0528169014084507, |
|
"grad_norm": 3.024125576019287, |
|
"learning_rate": 4.99871554050172e-05, |
|
"loss": 0.8671, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.06338028169014084, |
|
"grad_norm": 3.4865105152130127, |
|
"learning_rate": 4.996712222958461e-05, |
|
"loss": 0.8196, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.07394366197183098, |
|
"grad_norm": 2.5807318687438965, |
|
"learning_rate": 4.993785260182552e-05, |
|
"loss": 0.78, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.08450704225352113, |
|
"grad_norm": 2.547455310821533, |
|
"learning_rate": 4.989935734988098e-05, |
|
"loss": 0.7355, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.09507042253521127, |
|
"grad_norm": 2.533689022064209, |
|
"learning_rate": 4.9851650714862006e-05, |
|
"loss": 0.7632, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.1056338028169014, |
|
"grad_norm": 2.3361666202545166, |
|
"learning_rate": 4.979475034558115e-05, |
|
"loss": 0.6511, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.11619718309859155, |
|
"grad_norm": 2.409919023513794, |
|
"learning_rate": 4.9728677292023405e-05, |
|
"loss": 0.7696, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1267605633802817, |
|
"grad_norm": 2.3986947536468506, |
|
"learning_rate": 4.965345599755887e-05, |
|
"loss": 0.7176, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.13732394366197184, |
|
"grad_norm": 2.424473762512207, |
|
"learning_rate": 4.95691142899001e-05, |
|
"loss": 0.657, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.14788732394366197, |
|
"grad_norm": 2.3283729553222656, |
|
"learning_rate": 4.9475683370807326e-05, |
|
"loss": 0.6722, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.14788732394366197, |
|
"eval_loss": 0.16498678922653198, |
|
"eval_runtime": 33.548, |
|
"eval_samples_per_second": 57.023, |
|
"eval_steps_per_second": 1.788, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.15845070422535212, |
|
"grad_norm": 2.373122215270996, |
|
"learning_rate": 4.937319780454559e-05, |
|
"loss": 0.6457, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.16901408450704225, |
|
"grad_norm": 2.525294303894043, |
|
"learning_rate": 4.926169550509787e-05, |
|
"loss": 0.7483, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.1795774647887324, |
|
"grad_norm": 1.940788745880127, |
|
"learning_rate": 4.914121772213898e-05, |
|
"loss": 0.6246, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.19014084507042253, |
|
"grad_norm": 4.233572483062744, |
|
"learning_rate": 4.9011809025775486e-05, |
|
"loss": 0.7226, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2007042253521127, |
|
"grad_norm": 1.9542790651321411, |
|
"learning_rate": 4.887351729005726e-05, |
|
"loss": 0.6427, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2112676056338028, |
|
"grad_norm": 1.8455359935760498, |
|
"learning_rate": 4.8726393675266716e-05, |
|
"loss": 0.5821, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.22183098591549297, |
|
"grad_norm": 1.9550554752349854, |
|
"learning_rate": 4.8570492608992325e-05, |
|
"loss": 0.6654, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.2323943661971831, |
|
"grad_norm": 2.3653812408447266, |
|
"learning_rate": 4.8405871765993433e-05, |
|
"loss": 0.6337, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.24295774647887325, |
|
"grad_norm": 2.590679883956909, |
|
"learning_rate": 4.82325920468638e-05, |
|
"loss": 0.6181, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.2535211267605634, |
|
"grad_norm": 1.894047737121582, |
|
"learning_rate": 4.805071755550177e-05, |
|
"loss": 0.5873, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.2640845070422535, |
|
"grad_norm": 1.9947123527526855, |
|
"learning_rate": 4.7860315575395316e-05, |
|
"loss": 0.5982, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.2746478873239437, |
|
"grad_norm": 1.8202632665634155, |
|
"learning_rate": 4.766145654473095e-05, |
|
"loss": 0.6069, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.2852112676056338, |
|
"grad_norm": 1.9413880109786987, |
|
"learning_rate": 4.745421403033548e-05, |
|
"loss": 0.5623, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.29577464788732394, |
|
"grad_norm": 2.8411378860473633, |
|
"learning_rate": 4.72386647004603e-05, |
|
"loss": 0.6329, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.29577464788732394, |
|
"eval_loss": 0.1504964828491211, |
|
"eval_runtime": 33.5299, |
|
"eval_samples_per_second": 57.054, |
|
"eval_steps_per_second": 1.789, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.30633802816901406, |
|
"grad_norm": 1.8375580310821533, |
|
"learning_rate": 4.701488829641845e-05, |
|
"loss": 0.6443, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.31690140845070425, |
|
"grad_norm": 1.757625699043274, |
|
"learning_rate": 4.678296760308474e-05, |
|
"loss": 0.59, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.3274647887323944, |
|
"grad_norm": 2.1442489624023438, |
|
"learning_rate": 4.6542988418269876e-05, |
|
"loss": 0.5946, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.3380281690140845, |
|
"grad_norm": 1.9878476858139038, |
|
"learning_rate": 4.629503952098011e-05, |
|
"loss": 0.6107, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.3485915492957746, |
|
"grad_norm": 1.8092180490493774, |
|
"learning_rate": 4.6039212638573833e-05, |
|
"loss": 0.6134, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.3591549295774648, |
|
"grad_norm": 2.0128672122955322, |
|
"learning_rate": 4.5775602412827604e-05, |
|
"loss": 0.5717, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.36971830985915494, |
|
"grad_norm": 2.2072324752807617, |
|
"learning_rate": 4.55043063649239e-05, |
|
"loss": 0.5671, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.38028169014084506, |
|
"grad_norm": 1.711063027381897, |
|
"learning_rate": 4.522542485937369e-05, |
|
"loss": 0.6111, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.3908450704225352, |
|
"grad_norm": 2.159810781478882, |
|
"learning_rate": 4.493906106688712e-05, |
|
"loss": 0.5901, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.4014084507042254, |
|
"grad_norm": 1.8703676462173462, |
|
"learning_rate": 4.4645320926206064e-05, |
|
"loss": 0.5546, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.4119718309859155, |
|
"grad_norm": 2.7771778106689453, |
|
"learning_rate": 4.434431310491267e-05, |
|
"loss": 0.5974, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.4225352112676056, |
|
"grad_norm": 2.062894105911255, |
|
"learning_rate": 4.4036148959228365e-05, |
|
"loss": 0.592, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.43309859154929575, |
|
"grad_norm": 4.134242534637451, |
|
"learning_rate": 4.372094249281821e-05, |
|
"loss": 0.5704, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.44366197183098594, |
|
"grad_norm": 2.1971914768218994, |
|
"learning_rate": 4.3398810314615876e-05, |
|
"loss": 0.5936, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.44366197183098594, |
|
"eval_loss": 0.1457224041223526, |
|
"eval_runtime": 33.5854, |
|
"eval_samples_per_second": 56.959, |
|
"eval_steps_per_second": 1.786, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.45422535211267606, |
|
"grad_norm": 2.5988810062408447, |
|
"learning_rate": 4.306987159568479e-05, |
|
"loss": 0.618, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.4647887323943662, |
|
"grad_norm": 1.7022452354431152, |
|
"learning_rate": 4.273424802513145e-05, |
|
"loss": 0.5488, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.4753521126760563, |
|
"grad_norm": 7.090500831604004, |
|
"learning_rate": 4.239206376508717e-05, |
|
"loss": 0.6025, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.4859154929577465, |
|
"grad_norm": 13.125855445861816, |
|
"learning_rate": 4.204344540477499e-05, |
|
"loss": 0.5575, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.4964788732394366, |
|
"grad_norm": 2.1748900413513184, |
|
"learning_rate": 4.16885219136787e-05, |
|
"loss": 0.5685, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.5070422535211268, |
|
"grad_norm": 1.8787870407104492, |
|
"learning_rate": 4.132742459383122e-05, |
|
"loss": 0.5615, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.5176056338028169, |
|
"grad_norm": 1.8326219320297241, |
|
"learning_rate": 4.096028703124014e-05, |
|
"loss": 0.5695, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.528169014084507, |
|
"grad_norm": 1.6756395101547241, |
|
"learning_rate": 4.058724504646834e-05, |
|
"loss": 0.5926, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.5387323943661971, |
|
"grad_norm": 1.572495937347412, |
|
"learning_rate": 4.0208436644387834e-05, |
|
"loss": 0.6206, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.5492957746478874, |
|
"grad_norm": 1.5724139213562012, |
|
"learning_rate": 3.982400196312564e-05, |
|
"loss": 0.6059, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.5598591549295775, |
|
"grad_norm": 2.1546130180358887, |
|
"learning_rate": 3.943408322222049e-05, |
|
"loss": 0.6087, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.5704225352112676, |
|
"grad_norm": 1.74432373046875, |
|
"learning_rate": 3.903882467000937e-05, |
|
"loss": 0.5395, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.5809859154929577, |
|
"grad_norm": 2.2862908840179443, |
|
"learning_rate": 3.8638372530263715e-05, |
|
"loss": 0.5786, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.5915492957746479, |
|
"grad_norm": 1.8650025129318237, |
|
"learning_rate": 3.823287494809469e-05, |
|
"loss": 0.5783, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.5915492957746479, |
|
"eval_loss": 0.14048069715499878, |
|
"eval_runtime": 33.5379, |
|
"eval_samples_per_second": 57.04, |
|
"eval_steps_per_second": 1.789, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.602112676056338, |
|
"grad_norm": 1.719423770904541, |
|
"learning_rate": 3.782248193514766e-05, |
|
"loss": 0.5673, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.6126760563380281, |
|
"grad_norm": 1.777215600013733, |
|
"learning_rate": 3.740734531410626e-05, |
|
"loss": 0.5777, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.6232394366197183, |
|
"grad_norm": 1.7212867736816406, |
|
"learning_rate": 3.698761866252635e-05, |
|
"loss": 0.5956, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.6338028169014085, |
|
"grad_norm": 1.849625825881958, |
|
"learning_rate": 3.656345725602089e-05, |
|
"loss": 0.5805, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.6443661971830986, |
|
"grad_norm": 1.5129181146621704, |
|
"learning_rate": 3.6135018010816477e-05, |
|
"loss": 0.5271, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.6549295774647887, |
|
"grad_norm": 1.7070224285125732, |
|
"learning_rate": 3.570245942570315e-05, |
|
"loss": 0.5715, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.6654929577464789, |
|
"grad_norm": 1.416464924812317, |
|
"learning_rate": 3.526594152339845e-05, |
|
"loss": 0.528, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.676056338028169, |
|
"grad_norm": 1.569150686264038, |
|
"learning_rate": 3.4825625791348096e-05, |
|
"loss": 0.5646, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.6866197183098591, |
|
"grad_norm": 1.3740211725234985, |
|
"learning_rate": 3.438167512198436e-05, |
|
"loss": 0.5574, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.6971830985915493, |
|
"grad_norm": 1.7749651670455933, |
|
"learning_rate": 3.393425375246503e-05, |
|
"loss": 0.5988, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.7077464788732394, |
|
"grad_norm": 1.5180374383926392, |
|
"learning_rate": 3.348352720391469e-05, |
|
"loss": 0.5696, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.7183098591549296, |
|
"grad_norm": 1.4863420724868774, |
|
"learning_rate": 3.3029662220191144e-05, |
|
"loss": 0.5903, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.7288732394366197, |
|
"grad_norm": 1.5769625902175903, |
|
"learning_rate": 3.2572826706199305e-05, |
|
"loss": 0.5445, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.7394366197183099, |
|
"grad_norm": 1.7347217798233032, |
|
"learning_rate": 3.211318966577581e-05, |
|
"loss": 0.5684, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.7394366197183099, |
|
"eval_loss": 0.13805150985717773, |
|
"eval_runtime": 33.5478, |
|
"eval_samples_per_second": 57.023, |
|
"eval_steps_per_second": 1.788, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 2.053368330001831, |
|
"learning_rate": 3.165092113916688e-05, |
|
"loss": 0.573, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.7605633802816901, |
|
"grad_norm": 1.5371932983398438, |
|
"learning_rate": 3.118619214012286e-05, |
|
"loss": 0.5915, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.7711267605633803, |
|
"grad_norm": 1.4873214960098267, |
|
"learning_rate": 3.071917459263264e-05, |
|
"loss": 0.5577, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.7816901408450704, |
|
"grad_norm": 1.5104900598526, |
|
"learning_rate": 3.0250041267321232e-05, |
|
"loss": 0.5432, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.7922535211267606, |
|
"grad_norm": 1.6719636917114258, |
|
"learning_rate": 2.9778965717534313e-05, |
|
"loss": 0.5495, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.8028169014084507, |
|
"grad_norm": 1.4213000535964966, |
|
"learning_rate": 2.9306122215132976e-05, |
|
"loss": 0.5193, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.8133802816901409, |
|
"grad_norm": 1.4301376342773438, |
|
"learning_rate": 2.8831685686022897e-05, |
|
"loss": 0.5207, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.823943661971831, |
|
"grad_norm": 1.6136512756347656, |
|
"learning_rate": 2.8355831645441388e-05, |
|
"loss": 0.5428, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.8345070422535211, |
|
"grad_norm": 1.4768859148025513, |
|
"learning_rate": 2.787873613302649e-05, |
|
"loss": 0.5475, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.8450704225352113, |
|
"grad_norm": 1.5305073261260986, |
|
"learning_rate": 2.7400575647692046e-05, |
|
"loss": 0.5587, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.8556338028169014, |
|
"grad_norm": 1.3817962408065796, |
|
"learning_rate": 2.692152708233292e-05, |
|
"loss": 0.5434, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.8661971830985915, |
|
"grad_norm": 1.2823965549468994, |
|
"learning_rate": 2.6441767658384366e-05, |
|
"loss": 0.5301, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.8767605633802817, |
|
"grad_norm": 1.4110352993011475, |
|
"learning_rate": 2.596147486025996e-05, |
|
"loss": 0.5135, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.8873239436619719, |
|
"grad_norm": 1.5290179252624512, |
|
"learning_rate": 2.5480826369692178e-05, |
|
"loss": 0.5662, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.8873239436619719, |
|
"eval_loss": 0.13341283798217773, |
|
"eval_runtime": 33.5418, |
|
"eval_samples_per_second": 57.033, |
|
"eval_steps_per_second": 1.789, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.897887323943662, |
|
"grad_norm": 1.5652258396148682, |
|
"learning_rate": 2.5e-05, |
|
"loss": 0.5204, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.9084507042253521, |
|
"grad_norm": 1.7161169052124023, |
|
"learning_rate": 2.4519173630307825e-05, |
|
"loss": 0.5507, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.9190140845070423, |
|
"grad_norm": 1.455829381942749, |
|
"learning_rate": 2.403852513974004e-05, |
|
"loss": 0.5352, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.9295774647887324, |
|
"grad_norm": 1.7038428783416748, |
|
"learning_rate": 2.3558232341615643e-05, |
|
"loss": 0.5343, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.9401408450704225, |
|
"grad_norm": 1.3477355241775513, |
|
"learning_rate": 2.3078472917667092e-05, |
|
"loss": 0.5213, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.9507042253521126, |
|
"grad_norm": 1.510071039199829, |
|
"learning_rate": 2.2599424352307957e-05, |
|
"loss": 0.5502, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.9612676056338029, |
|
"grad_norm": 1.5797042846679688, |
|
"learning_rate": 2.212126386697352e-05, |
|
"loss": 0.5568, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.971830985915493, |
|
"grad_norm": 1.6164652109146118, |
|
"learning_rate": 2.164416835455862e-05, |
|
"loss": 0.5321, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.9823943661971831, |
|
"grad_norm": 1.586676836013794, |
|
"learning_rate": 2.11683143139771e-05, |
|
"loss": 0.5525, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.9929577464788732, |
|
"grad_norm": 1.4788326025009155, |
|
"learning_rate": 2.069387778486703e-05, |
|
"loss": 0.5534, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.0035211267605635, |
|
"grad_norm": 1.3445155620574951, |
|
"learning_rate": 2.02210342824657e-05, |
|
"loss": 0.5317, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.0140845070422535, |
|
"grad_norm": 1.3771849870681763, |
|
"learning_rate": 1.9749958732678767e-05, |
|
"loss": 0.5034, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.0246478873239437, |
|
"grad_norm": 1.3646653890609741, |
|
"learning_rate": 1.928082540736737e-05, |
|
"loss": 0.4771, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.0352112676056338, |
|
"grad_norm": 1.2086783647537231, |
|
"learning_rate": 1.8813807859877147e-05, |
|
"loss": 0.4207, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.0352112676056338, |
|
"eval_loss": 0.13161411881446838, |
|
"eval_runtime": 33.5416, |
|
"eval_samples_per_second": 57.034, |
|
"eval_steps_per_second": 1.789, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.045774647887324, |
|
"grad_norm": 1.3563522100448608, |
|
"learning_rate": 1.8349078860833123e-05, |
|
"loss": 0.4868, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.056338028169014, |
|
"grad_norm": 1.4925919771194458, |
|
"learning_rate": 1.7886810334224192e-05, |
|
"loss": 0.4677, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.0669014084507042, |
|
"grad_norm": 1.559565544128418, |
|
"learning_rate": 1.74271732938007e-05, |
|
"loss": 0.4977, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.0774647887323943, |
|
"grad_norm": 1.6586874723434448, |
|
"learning_rate": 1.6970337779808862e-05, |
|
"loss": 0.4578, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.0880281690140845, |
|
"grad_norm": 1.4285913705825806, |
|
"learning_rate": 1.6516472796085315e-05, |
|
"loss": 0.4942, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.0985915492957747, |
|
"grad_norm": 1.3942043781280518, |
|
"learning_rate": 1.6065746247534984e-05, |
|
"loss": 0.4937, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.1091549295774648, |
|
"grad_norm": 1.3517378568649292, |
|
"learning_rate": 1.561832487801565e-05, |
|
"loss": 0.5066, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.119718309859155, |
|
"grad_norm": 1.447427749633789, |
|
"learning_rate": 1.5174374208651912e-05, |
|
"loss": 0.4476, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.130281690140845, |
|
"grad_norm": 1.5619040727615356, |
|
"learning_rate": 1.4734058476601553e-05, |
|
"loss": 0.4542, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.1408450704225352, |
|
"grad_norm": 1.5647931098937988, |
|
"learning_rate": 1.4297540574296869e-05, |
|
"loss": 0.4721, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.1514084507042253, |
|
"grad_norm": 1.5523713827133179, |
|
"learning_rate": 1.386498198918352e-05, |
|
"loss": 0.4627, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.1619718309859155, |
|
"grad_norm": 1.444485068321228, |
|
"learning_rate": 1.3436542743979125e-05, |
|
"loss": 0.4618, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.1725352112676055, |
|
"grad_norm": 1.5264121294021606, |
|
"learning_rate": 1.3012381337473656e-05, |
|
"loss": 0.465, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.1830985915492958, |
|
"grad_norm": 1.390081763267517, |
|
"learning_rate": 1.2592654685893757e-05, |
|
"loss": 0.4356, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.1830985915492958, |
|
"eval_loss": 0.13032591342926025, |
|
"eval_runtime": 33.5427, |
|
"eval_samples_per_second": 57.032, |
|
"eval_steps_per_second": 1.789, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.193661971830986, |
|
"grad_norm": 1.4353517293930054, |
|
"learning_rate": 1.217751806485235e-05, |
|
"loss": 0.4514, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.204225352112676, |
|
"grad_norm": 1.5341055393218994, |
|
"learning_rate": 1.1767125051905315e-05, |
|
"loss": 0.4656, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.2147887323943662, |
|
"grad_norm": 4.069514274597168, |
|
"learning_rate": 1.1361627469736285e-05, |
|
"loss": 0.4822, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.2253521126760563, |
|
"grad_norm": 1.6193852424621582, |
|
"learning_rate": 1.096117532999063e-05, |
|
"loss": 0.4966, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.2359154929577465, |
|
"grad_norm": 1.5094631910324097, |
|
"learning_rate": 1.0565916777779519e-05, |
|
"loss": 0.4881, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.2464788732394365, |
|
"grad_norm": 1.4816490411758423, |
|
"learning_rate": 1.0175998036874356e-05, |
|
"loss": 0.4703, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.2570422535211268, |
|
"grad_norm": 1.4219728708267212, |
|
"learning_rate": 9.791563355612172e-06, |
|
"loss": 0.4566, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.267605633802817, |
|
"grad_norm": 1.435050129890442, |
|
"learning_rate": 9.412754953531663e-06, |
|
"loss": 0.4399, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.278169014084507, |
|
"grad_norm": 1.5008336305618286, |
|
"learning_rate": 9.039712968759864e-06, |
|
"loss": 0.4941, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.2887323943661972, |
|
"grad_norm": 1.4802623987197876, |
|
"learning_rate": 8.672575406168782e-06, |
|
"loss": 0.464, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.2992957746478873, |
|
"grad_norm": 1.5385937690734863, |
|
"learning_rate": 8.3114780863213e-06, |
|
"loss": 0.4723, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.3098591549295775, |
|
"grad_norm": 1.5559135675430298, |
|
"learning_rate": 7.956554595225016e-06, |
|
"loss": 0.4874, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.3204225352112675, |
|
"grad_norm": 2.4486403465270996, |
|
"learning_rate": 7.607936234912841e-06, |
|
"loss": 0.4517, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.3309859154929577, |
|
"grad_norm": 1.5418450832366943, |
|
"learning_rate": 7.265751974868554e-06, |
|
"loss": 0.4508, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.3309859154929577, |
|
"eval_loss": 0.12915180623531342, |
|
"eval_runtime": 33.5598, |
|
"eval_samples_per_second": 57.003, |
|
"eval_steps_per_second": 1.788, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.341549295774648, |
|
"grad_norm": 1.704418420791626, |
|
"learning_rate": 6.930128404315214e-06, |
|
"loss": 0.4565, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.352112676056338, |
|
"grad_norm": 1.593887448310852, |
|
"learning_rate": 6.601189685384126e-06, |
|
"loss": 0.442, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.3626760563380282, |
|
"grad_norm": 1.6920288801193237, |
|
"learning_rate": 6.279057507181796e-06, |
|
"loss": 0.4766, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.3732394366197183, |
|
"grad_norm": 1.4904849529266357, |
|
"learning_rate": 5.9638510407716394e-06, |
|
"loss": 0.4798, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.3838028169014085, |
|
"grad_norm": 1.403585433959961, |
|
"learning_rate": 5.655686895087329e-06, |
|
"loss": 0.4288, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.3943661971830985, |
|
"grad_norm": 1.7020981311798096, |
|
"learning_rate": 5.354679073793942e-06, |
|
"loss": 0.4431, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.4049295774647887, |
|
"grad_norm": 1.389512538909912, |
|
"learning_rate": 5.060938933112891e-06, |
|
"loss": 0.4269, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.415492957746479, |
|
"grad_norm": 1.4572571516036987, |
|
"learning_rate": 4.7745751406263165e-06, |
|
"loss": 0.4362, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.426056338028169, |
|
"grad_norm": 1.386979341506958, |
|
"learning_rate": 4.495693635076101e-06, |
|
"loss": 0.4528, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.436619718309859, |
|
"grad_norm": 1.5919663906097412, |
|
"learning_rate": 4.224397587172402e-06, |
|
"loss": 0.4448, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.4471830985915493, |
|
"grad_norm": 1.4099351167678833, |
|
"learning_rate": 3.9607873614261715e-06, |
|
"loss": 0.4233, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 1.4577464788732395, |
|
"grad_norm": 1.6724328994750977, |
|
"learning_rate": 3.7049604790198976e-06, |
|
"loss": 0.4849, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 1.4683098591549295, |
|
"grad_norm": 1.8909960985183716, |
|
"learning_rate": 3.4570115817301243e-06, |
|
"loss": 0.4443, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 1.4788732394366197, |
|
"grad_norm": 1.5311111211776733, |
|
"learning_rate": 3.217032396915265e-06, |
|
"loss": 0.4651, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.4788732394366197, |
|
"eval_loss": 0.12859447300434113, |
|
"eval_runtime": 33.526, |
|
"eval_samples_per_second": 57.06, |
|
"eval_steps_per_second": 1.79, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.48943661971831, |
|
"grad_norm": 1.5658754110336304, |
|
"learning_rate": 2.98511170358155e-06, |
|
"loss": 0.452, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.5586401224136353, |
|
"learning_rate": 2.7613352995397078e-06, |
|
"loss": 0.4307, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 1.51056338028169, |
|
"grad_norm": 1.8097403049468994, |
|
"learning_rate": 2.545785969664524e-06, |
|
"loss": 0.4415, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 1.5211267605633803, |
|
"grad_norm": 2.813575267791748, |
|
"learning_rate": 2.338543455269046e-06, |
|
"loss": 0.4346, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 1.5316901408450705, |
|
"grad_norm": 1.5137277841567993, |
|
"learning_rate": 2.1396844246046903e-06, |
|
"loss": 0.4203, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 1.5422535211267605, |
|
"grad_norm": 1.5360363721847534, |
|
"learning_rate": 1.949282444498238e-06, |
|
"loss": 0.4264, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 1.5528169014084507, |
|
"grad_norm": 1.4588457345962524, |
|
"learning_rate": 1.767407953136202e-06, |
|
"loss": 0.4363, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 1.563380281690141, |
|
"grad_norm": 1.5039774179458618, |
|
"learning_rate": 1.59412823400657e-06, |
|
"loss": 0.4399, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 1.573943661971831, |
|
"grad_norm": 1.6695863008499146, |
|
"learning_rate": 1.4295073910076757e-06, |
|
"loss": 0.4678, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 1.584507042253521, |
|
"grad_norm": 1.5582469701766968, |
|
"learning_rate": 1.273606324733284e-06, |
|
"loss": 0.4264, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.5950704225352113, |
|
"grad_norm": 1.6020346879959106, |
|
"learning_rate": 1.1264827099427417e-06, |
|
"loss": 0.4423, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 1.6056338028169015, |
|
"grad_norm": 1.8120399713516235, |
|
"learning_rate": 9.881909742245177e-07, |
|
"loss": 0.4793, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 1.6161971830985915, |
|
"grad_norm": 1.804922342300415, |
|
"learning_rate": 8.587822778610283e-07, |
|
"loss": 0.4396, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 1.6267605633802817, |
|
"grad_norm": 1.4649447202682495, |
|
"learning_rate": 7.383044949021339e-07, |
|
"loss": 0.4505, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.6267605633802817, |
|
"eval_loss": 0.12798862159252167, |
|
"eval_runtime": 33.5293, |
|
"eval_samples_per_second": 57.055, |
|
"eval_steps_per_second": 1.789, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 1.637323943661972, |
|
"grad_norm": 1.6809414625167847, |
|
"learning_rate": 6.268021954544096e-07, |
|
"loss": 0.4512, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 1.647887323943662, |
|
"grad_norm": 1.5099436044692993, |
|
"learning_rate": 5.243166291926782e-07, |
|
"loss": 0.4482, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 1.658450704225352, |
|
"grad_norm": 1.7300583124160767, |
|
"learning_rate": 4.308857100999042e-07, |
|
"loss": 0.465, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 1.6690140845070423, |
|
"grad_norm": 1.4459805488586426, |
|
"learning_rate": 3.465440024411265e-07, |
|
"loss": 0.4559, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 1.6795774647887325, |
|
"grad_norm": 1.5714209079742432, |
|
"learning_rate": 2.7132270797659563e-07, |
|
"loss": 0.4662, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 1.6901408450704225, |
|
"grad_norm": 1.716829776763916, |
|
"learning_rate": 2.052496544188487e-07, |
|
"loss": 0.4866, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.7007042253521125, |
|
"grad_norm": 1.6884804964065552, |
|
"learning_rate": 1.483492851379914e-07, |
|
"loss": 0.4441, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 1.711267605633803, |
|
"grad_norm": 1.439448356628418, |
|
"learning_rate": 1.006426501190233e-07, |
|
"loss": 0.4588, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 1.721830985915493, |
|
"grad_norm": 1.6845688819885254, |
|
"learning_rate": 6.214739817448633e-08, |
|
"loss": 0.4704, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 1.732394366197183, |
|
"grad_norm": 2.208343029022217, |
|
"learning_rate": 3.287777041539042e-08, |
|
"loss": 0.469, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 1.7429577464788732, |
|
"grad_norm": 1.6495448350906372, |
|
"learning_rate": 1.284459498280266e-08, |
|
"loss": 0.4331, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 1.7535211267605635, |
|
"grad_norm": 1.6916940212249756, |
|
"learning_rate": 2.055283042018408e-09, |
|
"loss": 0.466, |
|
"step": 498 |
|
} |
|
], |
|
"logging_steps": 3, |
|
"max_steps": 500, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 42, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.193006204764553e+18, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|