|
{ |
|
"best_metric": 11.747422218322754, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-100", |
|
"epoch": 0.5037783375314862, |
|
"eval_steps": 50, |
|
"global_step": 100, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.005037783375314861, |
|
"grad_norm": 0.11665327101945877, |
|
"learning_rate": 4e-05, |
|
"loss": 11.7818, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.005037783375314861, |
|
"eval_loss": 11.777442932128906, |
|
"eval_runtime": 1.3844, |
|
"eval_samples_per_second": 60.678, |
|
"eval_steps_per_second": 30.339, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.010075566750629723, |
|
"grad_norm": 0.09411647170782089, |
|
"learning_rate": 8e-05, |
|
"loss": 11.7749, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.015113350125944584, |
|
"grad_norm": 0.11491205543279648, |
|
"learning_rate": 0.00012, |
|
"loss": 11.7791, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.020151133501259445, |
|
"grad_norm": 0.10814717411994934, |
|
"learning_rate": 0.00016, |
|
"loss": 11.7724, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.02518891687657431, |
|
"grad_norm": 0.09395021945238113, |
|
"learning_rate": 0.0002, |
|
"loss": 11.7741, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.030226700251889168, |
|
"grad_norm": 0.08854291588068008, |
|
"learning_rate": 0.00019994532573409262, |
|
"loss": 11.7739, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03526448362720403, |
|
"grad_norm": 0.09998143464326859, |
|
"learning_rate": 0.00019978136272187747, |
|
"loss": 11.7719, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.04030226700251889, |
|
"grad_norm": 0.1092071384191513, |
|
"learning_rate": 0.00019950829025450114, |
|
"loss": 11.7766, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.04534005037783375, |
|
"grad_norm": 0.13437338173389435, |
|
"learning_rate": 0.00019912640693269752, |
|
"loss": 11.7695, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.05037783375314862, |
|
"grad_norm": 0.10191803425550461, |
|
"learning_rate": 0.00019863613034027224, |
|
"loss": 11.7726, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.055415617128463476, |
|
"grad_norm": 0.13112697005271912, |
|
"learning_rate": 0.00019803799658748094, |
|
"loss": 11.7715, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.060453400503778336, |
|
"grad_norm": 0.1708078533411026, |
|
"learning_rate": 0.0001973326597248006, |
|
"loss": 11.765, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0654911838790932, |
|
"grad_norm": 0.14378079771995544, |
|
"learning_rate": 0.00019652089102773488, |
|
"loss": 11.7693, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.07052896725440806, |
|
"grad_norm": 0.11845815926790237, |
|
"learning_rate": 0.00019560357815343577, |
|
"loss": 11.754, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07556675062972293, |
|
"grad_norm": 0.13851119577884674, |
|
"learning_rate": 0.00019458172417006347, |
|
"loss": 11.7614, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.08060453400503778, |
|
"grad_norm": 0.14140120148658752, |
|
"learning_rate": 0.0001934564464599461, |
|
"loss": 11.7656, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.08564231738035265, |
|
"grad_norm": 0.21620717644691467, |
|
"learning_rate": 0.00019222897549773848, |
|
"loss": 11.7735, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0906801007556675, |
|
"grad_norm": 0.2548966407775879, |
|
"learning_rate": 0.00019090065350491626, |
|
"loss": 11.775, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09571788413098237, |
|
"grad_norm": 0.2564621865749359, |
|
"learning_rate": 0.00018947293298207635, |
|
"loss": 11.7763, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.10075566750629723, |
|
"grad_norm": 0.24637818336486816, |
|
"learning_rate": 0.0001879473751206489, |
|
"loss": 11.7768, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10579345088161209, |
|
"grad_norm": 0.20252592861652374, |
|
"learning_rate": 0.00018632564809575742, |
|
"loss": 11.7709, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.11083123425692695, |
|
"grad_norm": 0.1991090625524521, |
|
"learning_rate": 0.00018460952524209355, |
|
"loss": 11.77, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11586901763224182, |
|
"grad_norm": 0.19447946548461914, |
|
"learning_rate": 0.00018280088311480201, |
|
"loss": 11.7763, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.12090680100755667, |
|
"grad_norm": 0.31570008397102356, |
|
"learning_rate": 0.00018090169943749476, |
|
"loss": 11.7832, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12594458438287154, |
|
"grad_norm": 0.26807549595832825, |
|
"learning_rate": 0.00017891405093963938, |
|
"loss": 11.781, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1309823677581864, |
|
"grad_norm": 0.20723332464694977, |
|
"learning_rate": 0.00017684011108568592, |
|
"loss": 11.7613, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13602015113350127, |
|
"grad_norm": 0.16287362575531006, |
|
"learning_rate": 0.0001746821476984154, |
|
"loss": 11.7537, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.14105793450881612, |
|
"grad_norm": 0.19770941138267517, |
|
"learning_rate": 0.00017244252047910892, |
|
"loss": 11.7782, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.14609571788413098, |
|
"grad_norm": 0.20761863887310028, |
|
"learning_rate": 0.00017012367842724887, |
|
"loss": 11.758, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.15113350125944586, |
|
"grad_norm": 0.25872254371643066, |
|
"learning_rate": 0.00016772815716257412, |
|
"loss": 11.7536, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1561712846347607, |
|
"grad_norm": 0.3000946342945099, |
|
"learning_rate": 0.00016525857615241687, |
|
"loss": 11.7583, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.16120906801007556, |
|
"grad_norm": 0.2385156750679016, |
|
"learning_rate": 0.0001627176358473537, |
|
"loss": 11.7501, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16624685138539042, |
|
"grad_norm": 0.2373785525560379, |
|
"learning_rate": 0.00016010811472830252, |
|
"loss": 11.7478, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1712846347607053, |
|
"grad_norm": 0.5123424530029297, |
|
"learning_rate": 0.00015743286626829437, |
|
"loss": 11.788, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.17632241813602015, |
|
"grad_norm": 0.4221782386302948, |
|
"learning_rate": 0.00015469481581224272, |
|
"loss": 11.7738, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.181360201511335, |
|
"grad_norm": 0.3816068768501282, |
|
"learning_rate": 0.00015189695737812152, |
|
"loss": 11.771, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.18639798488664988, |
|
"grad_norm": 0.34051746129989624, |
|
"learning_rate": 0.00014904235038305083, |
|
"loss": 11.7577, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.19143576826196473, |
|
"grad_norm": 0.2573257386684418, |
|
"learning_rate": 0.0001461341162978688, |
|
"loss": 11.7447, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.1964735516372796, |
|
"grad_norm": 0.5065600275993347, |
|
"learning_rate": 0.00014317543523384928, |
|
"loss": 11.7931, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.20151133501259447, |
|
"grad_norm": 0.48842620849609375, |
|
"learning_rate": 0.00014016954246529696, |
|
"loss": 11.7782, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.20654911838790932, |
|
"grad_norm": 0.5999230742454529, |
|
"learning_rate": 0.00013711972489182208, |
|
"loss": 11.7796, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.21158690176322417, |
|
"grad_norm": 0.5456677079200745, |
|
"learning_rate": 0.00013402931744416433, |
|
"loss": 11.7846, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.21662468513853905, |
|
"grad_norm": 0.544049859046936, |
|
"learning_rate": 0.00013090169943749476, |
|
"loss": 11.7765, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2216624685138539, |
|
"grad_norm": 0.4290430545806885, |
|
"learning_rate": 0.00012774029087618446, |
|
"loss": 11.7786, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.22670025188916876, |
|
"grad_norm": 0.624894380569458, |
|
"learning_rate": 0.00012454854871407994, |
|
"loss": 11.7792, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.23173803526448364, |
|
"grad_norm": 0.6616364121437073, |
|
"learning_rate": 0.0001213299630743747, |
|
"loss": 11.7779, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.2367758186397985, |
|
"grad_norm": 0.6500002145767212, |
|
"learning_rate": 0.000118088053433211, |
|
"loss": 11.7756, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.24181360201511334, |
|
"grad_norm": 0.7946918606758118, |
|
"learning_rate": 0.0001148263647711842, |
|
"loss": 11.7614, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.24685138539042822, |
|
"grad_norm": 0.7269666790962219, |
|
"learning_rate": 0.00011154846369695863, |
|
"loss": 11.7358, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.2518891687657431, |
|
"grad_norm": 0.14651153981685638, |
|
"learning_rate": 0.00010825793454723325, |
|
"loss": 11.7679, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.2518891687657431, |
|
"eval_loss": 11.756505966186523, |
|
"eval_runtime": 1.3815, |
|
"eval_samples_per_second": 60.803, |
|
"eval_steps_per_second": 30.401, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.25692695214105793, |
|
"grad_norm": 0.12329468876123428, |
|
"learning_rate": 0.00010495837546732224, |
|
"loss": 11.7644, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2619647355163728, |
|
"grad_norm": 0.19282935559749603, |
|
"learning_rate": 0.00010165339447663587, |
|
"loss": 11.7626, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.26700251889168763, |
|
"grad_norm": 0.13012297451496124, |
|
"learning_rate": 9.834660552336415e-05, |
|
"loss": 11.7701, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.27204030226700254, |
|
"grad_norm": 0.1542385220527649, |
|
"learning_rate": 9.504162453267777e-05, |
|
"loss": 11.748, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2770780856423174, |
|
"grad_norm": 0.13558229804039001, |
|
"learning_rate": 9.174206545276677e-05, |
|
"loss": 11.7566, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.28211586901763225, |
|
"grad_norm": 0.1677481085062027, |
|
"learning_rate": 8.845153630304139e-05, |
|
"loss": 11.7563, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2871536523929471, |
|
"grad_norm": 0.20597466826438904, |
|
"learning_rate": 8.517363522881579e-05, |
|
"loss": 11.7526, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.29219143576826195, |
|
"grad_norm": 0.17000000178813934, |
|
"learning_rate": 8.191194656678904e-05, |
|
"loss": 11.7474, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.2972292191435768, |
|
"grad_norm": 0.2849511206150055, |
|
"learning_rate": 7.867003692562534e-05, |
|
"loss": 11.7591, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.3022670025188917, |
|
"grad_norm": 0.19527281820774078, |
|
"learning_rate": 7.54514512859201e-05, |
|
"loss": 11.7456, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.30730478589420657, |
|
"grad_norm": 0.25237226486206055, |
|
"learning_rate": 7.225970912381556e-05, |
|
"loss": 11.7602, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.3123425692695214, |
|
"grad_norm": 0.24200652539730072, |
|
"learning_rate": 6.909830056250527e-05, |
|
"loss": 11.7528, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.31738035264483627, |
|
"grad_norm": 0.19608649611473083, |
|
"learning_rate": 6.59706825558357e-05, |
|
"loss": 11.7477, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3224181360201511, |
|
"grad_norm": 0.2160205841064453, |
|
"learning_rate": 6.28802751081779e-05, |
|
"loss": 11.7498, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.327455919395466, |
|
"grad_norm": 0.3006030321121216, |
|
"learning_rate": 5.983045753470308e-05, |
|
"loss": 11.7526, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.33249370277078083, |
|
"grad_norm": 0.2874875068664551, |
|
"learning_rate": 5.6824564766150726e-05, |
|
"loss": 11.7516, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.33753148614609574, |
|
"grad_norm": 0.34401634335517883, |
|
"learning_rate": 5.386588370213124e-05, |
|
"loss": 11.7539, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3425692695214106, |
|
"grad_norm": 0.33066168427467346, |
|
"learning_rate": 5.095764961694922e-05, |
|
"loss": 11.752, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.34760705289672544, |
|
"grad_norm": 0.29201266169548035, |
|
"learning_rate": 4.810304262187852e-05, |
|
"loss": 11.743, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.3526448362720403, |
|
"grad_norm": 0.34607893228530884, |
|
"learning_rate": 4.530518418775733e-05, |
|
"loss": 11.7538, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.35768261964735515, |
|
"grad_norm": 0.2541449964046478, |
|
"learning_rate": 4.256713373170564e-05, |
|
"loss": 11.7628, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.36272040302267, |
|
"grad_norm": 0.29604771733283997, |
|
"learning_rate": 3.9891885271697496e-05, |
|
"loss": 11.7578, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.3677581863979849, |
|
"grad_norm": 0.3188517093658447, |
|
"learning_rate": 3.7282364152646297e-05, |
|
"loss": 11.7621, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.37279596977329976, |
|
"grad_norm": 0.24045149981975555, |
|
"learning_rate": 3.4741423847583134e-05, |
|
"loss": 11.7587, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3778337531486146, |
|
"grad_norm": 0.27738526463508606, |
|
"learning_rate": 3.227184283742591e-05, |
|
"loss": 11.7587, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.38287153652392947, |
|
"grad_norm": 0.20490190386772156, |
|
"learning_rate": 2.9876321572751144e-05, |
|
"loss": 11.736, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3879093198992443, |
|
"grad_norm": 0.34930336475372314, |
|
"learning_rate": 2.7557479520891104e-05, |
|
"loss": 11.7466, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3929471032745592, |
|
"grad_norm": 0.3548698425292969, |
|
"learning_rate": 2.5317852301584643e-05, |
|
"loss": 11.748, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.3979848866498741, |
|
"grad_norm": 0.22995570302009583, |
|
"learning_rate": 2.315988891431412e-05, |
|
"loss": 11.7384, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.40302267002518893, |
|
"grad_norm": 0.39114826917648315, |
|
"learning_rate": 2.1085949060360654e-05, |
|
"loss": 11.7427, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.4080604534005038, |
|
"grad_norm": 0.3221098482608795, |
|
"learning_rate": 1.9098300562505266e-05, |
|
"loss": 11.7438, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.41309823677581864, |
|
"grad_norm": 0.5230813026428223, |
|
"learning_rate": 1.7199116885197995e-05, |
|
"loss": 11.7491, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4181360201511335, |
|
"grad_norm": 0.5868245363235474, |
|
"learning_rate": 1.5390474757906446e-05, |
|
"loss": 11.7492, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.42317380352644834, |
|
"grad_norm": 0.7091190218925476, |
|
"learning_rate": 1.3674351904242611e-05, |
|
"loss": 11.7395, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.4282115869017632, |
|
"grad_norm": 0.34904906153678894, |
|
"learning_rate": 1.2052624879351104e-05, |
|
"loss": 11.7469, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4332493702770781, |
|
"grad_norm": 0.497840017080307, |
|
"learning_rate": 1.0527067017923654e-05, |
|
"loss": 11.7541, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.43828715365239296, |
|
"grad_norm": 0.6534344553947449, |
|
"learning_rate": 9.09934649508375e-06, |
|
"loss": 11.7602, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.4433249370277078, |
|
"grad_norm": 0.6424288749694824, |
|
"learning_rate": 7.771024502261526e-06, |
|
"loss": 11.7518, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.44836272040302266, |
|
"grad_norm": 0.6290802955627441, |
|
"learning_rate": 6.543553540053926e-06, |
|
"loss": 11.7363, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4534005037783375, |
|
"grad_norm": 0.6020405292510986, |
|
"learning_rate": 5.418275829936537e-06, |
|
"loss": 11.7478, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.45843828715365237, |
|
"grad_norm": 0.5933429002761841, |
|
"learning_rate": 4.3964218465642355e-06, |
|
"loss": 11.7431, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.4634760705289673, |
|
"grad_norm": 0.7595274448394775, |
|
"learning_rate": 3.4791089722651436e-06, |
|
"loss": 11.746, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.46851385390428213, |
|
"grad_norm": 0.7738293409347534, |
|
"learning_rate": 2.667340275199426e-06, |
|
"loss": 11.7478, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.473551637279597, |
|
"grad_norm": 0.6566546559333801, |
|
"learning_rate": 1.9620034125190644e-06, |
|
"loss": 11.7331, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.47858942065491183, |
|
"grad_norm": 0.790342390537262, |
|
"learning_rate": 1.3638696597277679e-06, |
|
"loss": 11.7377, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.4836272040302267, |
|
"grad_norm": 0.7581157684326172, |
|
"learning_rate": 8.735930673024806e-07, |
|
"loss": 11.7515, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.48866498740554154, |
|
"grad_norm": 0.8844296932220459, |
|
"learning_rate": 4.917097454988584e-07, |
|
"loss": 11.715, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.49370277078085645, |
|
"grad_norm": 0.9089491963386536, |
|
"learning_rate": 2.1863727812254653e-07, |
|
"loss": 11.7192, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.4987405541561713, |
|
"grad_norm": 0.14338494837284088, |
|
"learning_rate": 5.467426590739511e-08, |
|
"loss": 11.7708, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.5037783375314862, |
|
"grad_norm": 0.16467101871967316, |
|
"learning_rate": 0.0, |
|
"loss": 11.7612, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.5037783375314862, |
|
"eval_loss": 11.747422218322754, |
|
"eval_runtime": 1.3803, |
|
"eval_samples_per_second": 60.857, |
|
"eval_steps_per_second": 30.429, |
|
"step": 100 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 100, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 1, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 40740008755200.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|