|
{"current_steps": 5, "total_steps": 792, "loss": 4.6192, "learning_rate": 4.9995083170283816e-05, "epoch": 0.03770028275212064, "percentage": 0.63, "elapsed_time": "0:00:33", "remaining_time": "1:27:03", "throughput": 1518.75, "total_tokens": 50400} |
|
{"current_steps": 10, "total_steps": 792, "loss": 3.9149, "learning_rate": 4.998033461515242e-05, "epoch": 0.07540056550424128, "percentage": 1.26, "elapsed_time": "0:01:08", "remaining_time": "1:28:40", "throughput": 1528.78, "total_tokens": 104016} |
|
{"current_steps": 15, "total_steps": 792, "loss": 3.6912, "learning_rate": 4.9955760135896534e-05, "epoch": 0.11310084825636192, "percentage": 1.89, "elapsed_time": "0:01:41", "remaining_time": "1:27:55", "throughput": 1527.62, "total_tokens": 155584} |
|
{"current_steps": 20, "total_steps": 792, "loss": 3.5556, "learning_rate": 4.992136939879856e-05, "epoch": 0.15080113100848255, "percentage": 2.53, "elapsed_time": "0:02:13", "remaining_time": "1:25:35", "throughput": 1523.21, "total_tokens": 202672} |
|
{"current_steps": 25, "total_steps": 792, "loss": 3.4256, "learning_rate": 4.9877175931330346e-05, "epoch": 0.1885014137606032, "percentage": 3.16, "elapsed_time": "0:02:48", "remaining_time": "1:26:04", "throughput": 1513.8, "total_tokens": 254800} |
|
{"current_steps": 30, "total_steps": 792, "loss": 3.3128, "learning_rate": 4.982319711683221e-05, "epoch": 0.22620169651272384, "percentage": 3.79, "elapsed_time": "0:03:23", "remaining_time": "1:25:58", "throughput": 1508.32, "total_tokens": 306352} |
|
{"current_steps": 35, "total_steps": 792, "loss": 3.2688, "learning_rate": 4.975945418767529e-05, "epoch": 0.2639019792648445, "percentage": 4.42, "elapsed_time": "0:03:54", "remaining_time": "1:24:34", "throughput": 1518.88, "total_tokens": 356352} |
|
{"current_steps": 40, "total_steps": 792, "loss": 3.297, "learning_rate": 4.968597221690986e-05, "epoch": 0.3016022620169651, "percentage": 5.05, "elapsed_time": "0:04:27", "remaining_time": "1:23:45", "throughput": 1521.48, "total_tokens": 406672} |
|
{"current_steps": 45, "total_steps": 792, "loss": 3.232, "learning_rate": 4.96027801084029e-05, "epoch": 0.3393025447690858, "percentage": 5.68, "elapsed_time": "0:04:59", "remaining_time": "1:22:43", "throughput": 1525.5, "total_tokens": 456160} |
|
{"current_steps": 50, "total_steps": 792, "loss": 3.267, "learning_rate": 4.950991058546893e-05, "epoch": 0.3770028275212064, "percentage": 6.31, "elapsed_time": "0:05:34", "remaining_time": "1:22:37", "throughput": 1525.56, "total_tokens": 509680} |
|
{"current_steps": 55, "total_steps": 792, "loss": 3.2148, "learning_rate": 4.940740017799833e-05, "epoch": 0.41470311027332707, "percentage": 6.94, "elapsed_time": "0:06:07", "remaining_time": "1:22:00", "throughput": 1524.82, "total_tokens": 559968} |
|
{"current_steps": 60, "total_steps": 792, "loss": 3.1403, "learning_rate": 4.929528920808854e-05, "epoch": 0.4524033930254477, "percentage": 7.58, "elapsed_time": "0:06:39", "remaining_time": "1:21:09", "throughput": 1528.24, "total_tokens": 610000} |
|
{"current_steps": 65, "total_steps": 792, "loss": 3.1515, "learning_rate": 4.917362177418342e-05, "epoch": 0.49010367577756836, "percentage": 8.21, "elapsed_time": "0:07:13", "remaining_time": "1:20:43", "throughput": 1527.04, "total_tokens": 661280} |
|
{"current_steps": 70, "total_steps": 792, "loss": 3.1468, "learning_rate": 4.904244573372733e-05, "epoch": 0.527803958529689, "percentage": 8.84, "elapsed_time": "0:07:47", "remaining_time": "1:20:20", "throughput": 1526.19, "total_tokens": 713264} |
|
{"current_steps": 75, "total_steps": 792, "loss": 3.196, "learning_rate": 4.8901812684340564e-05, "epoch": 0.5655042412818096, "percentage": 9.47, "elapsed_time": "0:08:18", "remaining_time": "1:19:27", "throughput": 1528.99, "total_tokens": 762576} |
|
{"current_steps": 80, "total_steps": 792, "loss": 3.0593, "learning_rate": 4.8751777943523634e-05, "epoch": 0.6032045240339302, "percentage": 10.1, "elapsed_time": "0:08:52", "remaining_time": "1:18:57", "throughput": 1528.02, "total_tokens": 813392} |
|
{"current_steps": 85, "total_steps": 792, "loss": 3.0676, "learning_rate": 4.8592400526898314e-05, "epoch": 0.6409048067860509, "percentage": 10.73, "elapsed_time": "0:09:24", "remaining_time": "1:18:17", "throughput": 1523.96, "total_tokens": 860608} |
|
{"current_steps": 90, "total_steps": 792, "loss": 3.1061, "learning_rate": 4.842374312499405e-05, "epoch": 0.6786050895381716, "percentage": 11.36, "elapsed_time": "0:09:56", "remaining_time": "1:17:29", "throughput": 1525.19, "total_tokens": 909104} |
|
{"current_steps": 95, "total_steps": 792, "loss": 2.9847, "learning_rate": 4.824587207858888e-05, "epoch": 0.7163053722902922, "percentage": 11.99, "elapsed_time": "0:10:30", "remaining_time": "1:17:06", "throughput": 1521.9, "total_tokens": 959600} |
|
{"current_steps": 100, "total_steps": 792, "loss": 3.0289, "learning_rate": 4.805885735261454e-05, "epoch": 0.7540056550424128, "percentage": 12.63, "elapsed_time": "0:11:05", "remaining_time": "1:16:45", "throughput": 1522.94, "total_tokens": 1013648} |
|
{"current_steps": 105, "total_steps": 792, "loss": 2.9474, "learning_rate": 4.786277250863599e-05, "epoch": 0.7917059377945335, "percentage": 13.26, "elapsed_time": "0:11:40", "remaining_time": "1:16:22", "throughput": 1520.69, "total_tokens": 1065120} |
|
{"current_steps": 110, "total_steps": 792, "loss": 2.9713, "learning_rate": 4.765769467591625e-05, "epoch": 0.8294062205466541, "percentage": 13.89, "elapsed_time": "0:12:14", "remaining_time": "1:15:55", "throughput": 1523.69, "total_tokens": 1119424} |
|
{"current_steps": 115, "total_steps": 792, "loss": 3.0012, "learning_rate": 4.744370452107789e-05, "epoch": 0.8671065032987747, "percentage": 14.52, "elapsed_time": "0:12:47", "remaining_time": "1:15:18", "throughput": 1524.28, "total_tokens": 1169888} |
|
{"current_steps": 120, "total_steps": 792, "loss": 3.0399, "learning_rate": 4.722088621637309e-05, "epoch": 0.9048067860508954, "percentage": 15.15, "elapsed_time": "0:13:18", "remaining_time": "1:14:30", "throughput": 1526.84, "total_tokens": 1218944} |
|
{"current_steps": 125, "total_steps": 792, "loss": 2.9156, "learning_rate": 4.698932740657479e-05, "epoch": 0.942507068803016, "percentage": 15.78, "elapsed_time": "0:13:53", "remaining_time": "1:14:07", "throughput": 1523.71, "total_tokens": 1269920} |
|
{"current_steps": 130, "total_steps": 792, "loss": 3.0288, "learning_rate": 4.6749119174501975e-05, "epoch": 0.9802073515551367, "percentage": 16.41, "elapsed_time": "0:14:23", "remaining_time": "1:13:15", "throughput": 1524.09, "total_tokens": 1315536} |
|
{"current_steps": 135, "total_steps": 792, "loss": 2.8911, "learning_rate": 4.6500356005192514e-05, "epoch": 1.0179076343072573, "percentage": 17.05, "elapsed_time": "0:14:54", "remaining_time": "1:12:31", "throughput": 1521.83, "total_tokens": 1360736} |
|
{"current_steps": 140, "total_steps": 792, "loss": 2.9148, "learning_rate": 4.6243135748737864e-05, "epoch": 1.055607917059378, "percentage": 17.68, "elapsed_time": "0:15:24", "remaining_time": "1:11:47", "throughput": 1524.16, "total_tokens": 1409808} |
|
{"current_steps": 145, "total_steps": 792, "loss": 2.868, "learning_rate": 4.597755958179406e-05, "epoch": 1.0933081998114986, "percentage": 18.31, "elapsed_time": "0:15:57", "remaining_time": "1:11:12", "throughput": 1525.54, "total_tokens": 1460864} |
|
{"current_steps": 150, "total_steps": 792, "loss": 2.7477, "learning_rate": 4.570373196778427e-05, "epoch": 1.1310084825636193, "percentage": 18.94, "elapsed_time": "0:16:32", "remaining_time": "1:10:48", "throughput": 1523.68, "total_tokens": 1512640} |
|
{"current_steps": 155, "total_steps": 792, "loss": 2.932, "learning_rate": 4.5421760615808474e-05, "epoch": 1.1687087653157398, "percentage": 19.57, "elapsed_time": "0:17:02", "remaining_time": "1:10:00", "throughput": 1522.5, "total_tokens": 1556048} |
|
{"current_steps": 160, "total_steps": 792, "loss": 2.8219, "learning_rate": 4.513175643827647e-05, "epoch": 1.2064090480678604, "percentage": 20.2, "elapsed_time": "0:17:35", "remaining_time": "1:09:29", "throughput": 1522.58, "total_tokens": 1607232} |
|
{"current_steps": 165, "total_steps": 792, "loss": 2.8453, "learning_rate": 4.4833833507280884e-05, "epoch": 1.244109330819981, "percentage": 20.83, "elapsed_time": "0:18:05", "remaining_time": "1:08:44", "throughput": 1523.48, "total_tokens": 1653520} |
|
{"current_steps": 170, "total_steps": 792, "loss": 2.7362, "learning_rate": 4.4528109009727336e-05, "epoch": 1.2818096135721018, "percentage": 21.46, "elapsed_time": "0:18:37", "remaining_time": "1:08:10", "throughput": 1523.88, "total_tokens": 1703568} |
|
{"current_steps": 175, "total_steps": 792, "loss": 2.9197, "learning_rate": 4.42147032012394e-05, "epoch": 1.3195098963242224, "percentage": 22.1, "elapsed_time": "0:19:09", "remaining_time": "1:07:32", "throughput": 1525.1, "total_tokens": 1752944} |
|
{"current_steps": 180, "total_steps": 792, "loss": 2.8897, "learning_rate": 4.389373935885646e-05, "epoch": 1.3572101790763431, "percentage": 22.73, "elapsed_time": "0:19:44", "remaining_time": "1:07:07", "throughput": 1524.15, "total_tokens": 1805600} |
|
{"current_steps": 185, "total_steps": 792, "loss": 2.7946, "learning_rate": 4.356534373254316e-05, "epoch": 1.3949104618284638, "percentage": 23.36, "elapsed_time": "0:20:22", "remaining_time": "1:06:51", "throughput": 1522.01, "total_tokens": 1860688} |
|
{"current_steps": 190, "total_steps": 792, "loss": 2.8149, "learning_rate": 4.322964549552943e-05, "epoch": 1.4326107445805842, "percentage": 23.99, "elapsed_time": "0:20:56", "remaining_time": "1:06:20", "throughput": 1522.68, "total_tokens": 1913056} |
|
{"current_steps": 195, "total_steps": 792, "loss": 2.7811, "learning_rate": 4.288677669350066e-05, "epoch": 1.4703110273327051, "percentage": 24.62, "elapsed_time": "0:21:29", "remaining_time": "1:05:46", "throughput": 1521.72, "total_tokens": 1961744} |
|
{"current_steps": 200, "total_steps": 792, "loss": 2.8564, "learning_rate": 4.2536872192658036e-05, "epoch": 1.5080113100848256, "percentage": 25.25, "elapsed_time": "0:22:01", "remaining_time": "1:05:11", "throughput": 1521.86, "total_tokens": 2011248} |
|
{"current_steps": 205, "total_steps": 792, "loss": 2.7966, "learning_rate": 4.218006962666934e-05, "epoch": 1.5457115928369463, "percentage": 25.88, "elapsed_time": "0:22:35", "remaining_time": "1:04:42", "throughput": 1519.75, "total_tokens": 2060640} |
|
{"current_steps": 210, "total_steps": 792, "loss": 2.7674, "learning_rate": 4.181650934253132e-05, "epoch": 1.583411875589067, "percentage": 26.52, "elapsed_time": "0:23:09", "remaining_time": "1:04:10", "throughput": 1521.48, "total_tokens": 2113904} |
|
{"current_steps": 215, "total_steps": 792, "loss": 2.7607, "learning_rate": 4.144633434536467e-05, "epoch": 1.6211121583411876, "percentage": 27.15, "elapsed_time": "0:23:41", "remaining_time": "1:03:35", "throughput": 1521.17, "total_tokens": 2162608} |
|
{"current_steps": 220, "total_steps": 792, "loss": 2.8402, "learning_rate": 4.1069690242163484e-05, "epoch": 1.6588124410933083, "percentage": 27.78, "elapsed_time": "0:24:13", "remaining_time": "1:03:00", "throughput": 1521.12, "total_tokens": 2211616} |
|
{"current_steps": 225, "total_steps": 792, "loss": 2.8019, "learning_rate": 4.06867251845213e-05, "epoch": 1.6965127238454287, "percentage": 28.41, "elapsed_time": "0:24:53", "remaining_time": "1:02:43", "throughput": 1519.42, "total_tokens": 2269440} |
|
{"current_steps": 230, "total_steps": 792, "loss": 2.8311, "learning_rate": 4.0297589810356165e-05, "epoch": 1.7342130065975496, "percentage": 29.04, "elapsed_time": "0:25:29", "remaining_time": "1:02:16", "throughput": 1518.26, "total_tokens": 2321936} |
|
{"current_steps": 235, "total_steps": 792, "loss": 2.7626, "learning_rate": 3.9902437184657784e-05, "epoch": 1.77191328934967, "percentage": 29.67, "elapsed_time": "0:26:05", "remaining_time": "1:01:51", "throughput": 1517.85, "total_tokens": 2376720} |
|
{"current_steps": 240, "total_steps": 792, "loss": 2.8052, "learning_rate": 3.9501422739279956e-05, "epoch": 1.8096135721017907, "percentage": 30.3, "elapsed_time": "0:26:41", "remaining_time": "1:01:22", "throughput": 1517.76, "total_tokens": 2429952} |
|
{"current_steps": 245, "total_steps": 792, "loss": 2.767, "learning_rate": 3.909470421180201e-05, "epoch": 1.8473138548539114, "percentage": 30.93, "elapsed_time": "0:27:14", "remaining_time": "1:00:49", "throughput": 1518.09, "total_tokens": 2481488} |
|
{"current_steps": 250, "total_steps": 792, "loss": 2.7651, "learning_rate": 3.8682441583483314e-05, "epoch": 1.885014137606032, "percentage": 31.57, "elapsed_time": "0:27:45", "remaining_time": "1:00:10", "throughput": 1519.58, "total_tokens": 2530768} |
|
{"current_steps": 255, "total_steps": 792, "loss": 2.8097, "learning_rate": 3.8264797016335205e-05, "epoch": 1.9227144203581528, "percentage": 32.2, "elapsed_time": "0:28:18", "remaining_time": "0:59:36", "throughput": 1520.77, "total_tokens": 2583088} |
|
{"current_steps": 260, "total_steps": 792, "loss": 2.7269, "learning_rate": 3.7841934789335164e-05, "epoch": 1.9604147031102732, "percentage": 32.83, "elapsed_time": "0:28:51", "remaining_time": "0:59:02", "throughput": 1520.08, "total_tokens": 2631456} |
|
{"current_steps": 265, "total_steps": 792, "loss": 2.8586, "learning_rate": 3.741402123380828e-05, "epoch": 1.998114985862394, "percentage": 33.46, "elapsed_time": "0:29:25", "remaining_time": "0:58:31", "throughput": 1520.65, "total_tokens": 2684848} |
|
{"current_steps": 270, "total_steps": 792, "loss": 2.6131, "learning_rate": 3.6981224668001424e-05, "epoch": 2.0358152686145146, "percentage": 34.09, "elapsed_time": "0:29:58", "remaining_time": "0:57:56", "throughput": 1519.94, "total_tokens": 2733408} |
|
{"current_steps": 275, "total_steps": 792, "loss": 2.4891, "learning_rate": 3.654371533087586e-05, "epoch": 2.0735155513666355, "percentage": 34.72, "elapsed_time": "0:30:34", "remaining_time": "0:57:29", "throughput": 1518.93, "total_tokens": 2786832} |
|
{"current_steps": 280, "total_steps": 792, "loss": 2.5783, "learning_rate": 3.610166531514436e-05, "epoch": 2.111215834118756, "percentage": 35.35, "elapsed_time": "0:31:02", "remaining_time": "0:56:44", "throughput": 1519.0, "total_tokens": 2828464} |
|
{"current_steps": 285, "total_steps": 792, "loss": 2.59, "learning_rate": 3.565524849957921e-05, "epoch": 2.1489161168708764, "percentage": 35.98, "elapsed_time": "0:31:35", "remaining_time": "0:56:12", "throughput": 1518.36, "total_tokens": 2878192} |
|
{"current_steps": 290, "total_steps": 792, "loss": 2.5839, "learning_rate": 3.520464048061758e-05, "epoch": 2.1866163996229973, "percentage": 36.62, "elapsed_time": "0:32:06", "remaining_time": "0:55:35", "throughput": 1519.8, "total_tokens": 2928304} |
|
{"current_steps": 295, "total_steps": 792, "loss": 2.567, "learning_rate": 3.47500185032913e-05, "epoch": 2.2243166823751177, "percentage": 37.25, "elapsed_time": "0:32:37", "remaining_time": "0:54:57", "throughput": 1521.62, "total_tokens": 2978144} |
|
{"current_steps": 300, "total_steps": 792, "loss": 2.5694, "learning_rate": 3.4291561391508185e-05, "epoch": 2.2620169651272386, "percentage": 37.88, "elapsed_time": "0:33:11", "remaining_time": "0:54:26", "throughput": 1520.54, "total_tokens": 3028240} |
|
{"current_steps": 305, "total_steps": 792, "loss": 2.4965, "learning_rate": 3.3829449477712324e-05, "epoch": 2.299717247879359, "percentage": 38.51, "elapsed_time": "0:33:50", "remaining_time": "0:54:02", "throughput": 1518.52, "total_tokens": 3083328} |
|
{"current_steps": 310, "total_steps": 792, "loss": 2.599, "learning_rate": 3.336386453195088e-05, "epoch": 2.3374175306314795, "percentage": 39.14, "elapsed_time": "0:34:25", "remaining_time": "0:53:32", "throughput": 1518.53, "total_tokens": 3137072} |
|
{"current_steps": 315, "total_steps": 792, "loss": 2.524, "learning_rate": 3.2894989690375626e-05, "epoch": 2.3751178133836004, "percentage": 39.77, "elapsed_time": "0:35:00", "remaining_time": "0:53:00", "throughput": 1519.54, "total_tokens": 3191136} |
|
{"current_steps": 320, "total_steps": 792, "loss": 2.5338, "learning_rate": 3.2423009383206876e-05, "epoch": 2.412818096135721, "percentage": 40.4, "elapsed_time": "0:35:31", "remaining_time": "0:52:23", "throughput": 1520.03, "total_tokens": 3239952} |
|
{"current_steps": 325, "total_steps": 792, "loss": 2.5096, "learning_rate": 3.194810926218861e-05, "epoch": 2.4505183788878417, "percentage": 41.04, "elapsed_time": "0:36:05", "remaining_time": "0:51:51", "throughput": 1519.97, "total_tokens": 3291104} |
|
{"current_steps": 330, "total_steps": 792, "loss": 2.473, "learning_rate": 3.147047612756302e-05, "epoch": 2.488218661639962, "percentage": 41.67, "elapsed_time": "0:36:38", "remaining_time": "0:51:18", "throughput": 1519.35, "total_tokens": 3340592} |
|
{"current_steps": 335, "total_steps": 792, "loss": 2.5778, "learning_rate": 3.099029785459328e-05, "epoch": 2.525918944392083, "percentage": 42.3, "elapsed_time": "0:37:10", "remaining_time": "0:50:43", "throughput": 1518.86, "total_tokens": 3388224} |
|
{"current_steps": 340, "total_steps": 792, "loss": 2.5684, "learning_rate": 3.0507763319663517e-05, "epoch": 2.5636192271442035, "percentage": 42.93, "elapsed_time": "0:37:44", "remaining_time": "0:50:10", "throughput": 1519.11, "total_tokens": 3440512} |
|
{"current_steps": 345, "total_steps": 792, "loss": 2.4923, "learning_rate": 3.002306232598497e-05, "epoch": 2.6013195098963244, "percentage": 43.56, "elapsed_time": "0:38:18", "remaining_time": "0:49:38", "throughput": 1519.13, "total_tokens": 3491744} |
|
{"current_steps": 350, "total_steps": 792, "loss": 2.4633, "learning_rate": 2.9536385528937567e-05, "epoch": 2.639019792648445, "percentage": 44.19, "elapsed_time": "0:38:52", "remaining_time": "0:49:06", "throughput": 1518.45, "total_tokens": 3542368} |
|
{"current_steps": 355, "total_steps": 792, "loss": 2.5703, "learning_rate": 2.9047924361076345e-05, "epoch": 2.6767200754005653, "percentage": 44.82, "elapsed_time": "0:39:28", "remaining_time": "0:48:35", "throughput": 1518.15, "total_tokens": 3595360} |
|
{"current_steps": 360, "total_steps": 792, "loss": 2.4087, "learning_rate": 2.8557870956832132e-05, "epoch": 2.7144203581526862, "percentage": 45.45, "elapsed_time": "0:39:57", "remaining_time": "0:47:57", "throughput": 1518.58, "total_tokens": 3640912} |
|
{"current_steps": 365, "total_steps": 792, "loss": 2.5007, "learning_rate": 2.8066418076936167e-05, "epoch": 2.7521206409048067, "percentage": 46.09, "elapsed_time": "0:40:31", "remaining_time": "0:47:24", "throughput": 1517.51, "total_tokens": 3690048} |
|
{"current_steps": 370, "total_steps": 792, "loss": 2.5312, "learning_rate": 2.7573759032598366e-05, "epoch": 2.7898209236569276, "percentage": 46.72, "elapsed_time": "0:41:07", "remaining_time": "0:46:54", "throughput": 1517.71, "total_tokens": 3745104} |
|
{"current_steps": 375, "total_steps": 792, "loss": 2.5333, "learning_rate": 2.7080087609469062e-05, "epoch": 2.827521206409048, "percentage": 47.35, "elapsed_time": "0:41:40", "remaining_time": "0:46:20", "throughput": 1517.41, "total_tokens": 3794160} |
|
{"current_steps": 380, "total_steps": 792, "loss": 2.4185, "learning_rate": 2.6585597991414114e-05, "epoch": 2.8652214891611685, "percentage": 47.98, "elapsed_time": "0:42:14", "remaining_time": "0:45:48", "throughput": 1517.61, "total_tokens": 3846576} |
|
{"current_steps": 385, "total_steps": 792, "loss": 2.4913, "learning_rate": 2.6090484684133404e-05, "epoch": 2.9029217719132894, "percentage": 48.61, "elapsed_time": "0:42:43", "remaining_time": "0:45:09", "throughput": 1518.25, "total_tokens": 3891744} |
|
{"current_steps": 390, "total_steps": 792, "loss": 2.5319, "learning_rate": 2.5594942438652688e-05, "epoch": 2.9406220546654103, "percentage": 49.24, "elapsed_time": "0:43:20", "remaining_time": "0:44:40", "throughput": 1519.01, "total_tokens": 3949568} |
|
{"current_steps": 395, "total_steps": 792, "loss": 2.6441, "learning_rate": 2.509916617471903e-05, "epoch": 2.9783223374175307, "percentage": 49.87, "elapsed_time": "0:43:56", "remaining_time": "0:44:09", "throughput": 1518.07, "total_tokens": 4002384} |
|
{"current_steps": 400, "total_steps": 792, "loss": 2.3576, "learning_rate": 2.46033509041298e-05, "epoch": 3.016022620169651, "percentage": 50.51, "elapsed_time": "0:44:29", "remaining_time": "0:43:36", "throughput": 1517.94, "total_tokens": 4052688} |
|
{"current_steps": 405, "total_steps": 792, "loss": 2.3032, "learning_rate": 2.410769165402549e-05, "epoch": 3.053722902921772, "percentage": 51.14, "elapsed_time": "0:45:06", "remaining_time": "0:43:06", "throughput": 1517.37, "total_tokens": 4107392} |
|
{"current_steps": 410, "total_steps": 792, "loss": 2.2542, "learning_rate": 2.3612383390176503e-05, "epoch": 3.0914231856738925, "percentage": 51.77, "elapsed_time": "0:45:39", "remaining_time": "0:42:32", "throughput": 1517.78, "total_tokens": 4157984} |
|
{"current_steps": 415, "total_steps": 792, "loss": 2.2882, "learning_rate": 2.3117620940294048e-05, "epoch": 3.1291234684260134, "percentage": 52.4, "elapsed_time": "0:46:16", "remaining_time": "0:42:02", "throughput": 1517.57, "total_tokens": 4213280} |
|
{"current_steps": 420, "total_steps": 792, "loss": 2.2314, "learning_rate": 2.2623598917395438e-05, "epoch": 3.166823751178134, "percentage": 53.03, "elapsed_time": "0:46:50", "remaining_time": "0:41:29", "throughput": 1517.89, "total_tokens": 4265792} |
|
{"current_steps": 425, "total_steps": 792, "loss": 2.2679, "learning_rate": 2.213051164325366e-05, "epoch": 3.2045240339302543, "percentage": 53.66, "elapsed_time": "0:47:19", "remaining_time": "0:40:52", "throughput": 1518.1, "total_tokens": 4310832} |
|
{"current_steps": 430, "total_steps": 792, "loss": 2.2521, "learning_rate": 2.1638553071961708e-05, "epoch": 3.242224316682375, "percentage": 54.29, "elapsed_time": "0:47:48", "remaining_time": "0:40:14", "throughput": 1517.79, "total_tokens": 4353488} |
|
{"current_steps": 435, "total_steps": 792, "loss": 2.2071, "learning_rate": 2.1147916713641367e-05, "epoch": 3.2799245994344957, "percentage": 54.92, "elapsed_time": "0:48:21", "remaining_time": "0:39:41", "throughput": 1517.71, "total_tokens": 4404384} |
|
{"current_steps": 440, "total_steps": 792, "loss": 2.2525, "learning_rate": 2.0658795558326743e-05, "epoch": 3.3176248821866166, "percentage": 55.56, "elapsed_time": "0:48:53", "remaining_time": "0:39:07", "throughput": 1517.82, "total_tokens": 4453232} |
|
{"current_steps": 445, "total_steps": 792, "loss": 2.2431, "learning_rate": 2.017138200005236e-05, "epoch": 3.355325164938737, "percentage": 56.19, "elapsed_time": "0:49:29", "remaining_time": "0:38:35", "throughput": 1518.13, "total_tokens": 4508640} |
|
{"current_steps": 450, "total_steps": 792, "loss": 2.3357, "learning_rate": 1.9685867761175584e-05, "epoch": 3.3930254476908575, "percentage": 56.82, "elapsed_time": "0:50:03", "remaining_time": "0:38:02", "throughput": 1518.04, "total_tokens": 4559360} |
|
{"current_steps": 455, "total_steps": 792, "loss": 2.2875, "learning_rate": 1.9202443816963425e-05, "epoch": 3.4307257304429783, "percentage": 57.45, "elapsed_time": "0:50:37", "remaining_time": "0:37:29", "throughput": 1517.67, "total_tokens": 4609584} |
|
{"current_steps": 460, "total_steps": 792, "loss": 2.2136, "learning_rate": 1.872130032047302e-05, "epoch": 3.468426013195099, "percentage": 58.08, "elapsed_time": "0:51:13", "remaining_time": "0:36:58", "throughput": 1518.07, "total_tokens": 4665472} |
|
{"current_steps": 465, "total_steps": 792, "loss": 2.294, "learning_rate": 1.824262652775568e-05, "epoch": 3.5061262959472197, "percentage": 58.71, "elapsed_time": "0:51:46", "remaining_time": "0:36:24", "throughput": 1518.95, "total_tokens": 4719360} |
|
{"current_steps": 470, "total_steps": 792, "loss": 2.2146, "learning_rate": 1.7766610723413684e-05, "epoch": 3.54382657869934, "percentage": 59.34, "elapsed_time": "0:52:23", "remaining_time": "0:35:53", "throughput": 1518.13, "total_tokens": 4771504} |
|
{"current_steps": 475, "total_steps": 792, "loss": 2.3166, "learning_rate": 1.7293440146539196e-05, "epoch": 3.581526861451461, "percentage": 59.97, "elapsed_time": "0:52:55", "remaining_time": "0:35:18", "throughput": 1518.21, "total_tokens": 4820432} |
|
{"current_steps": 480, "total_steps": 792, "loss": 2.2775, "learning_rate": 1.682330091706446e-05, "epoch": 3.6192271442035815, "percentage": 60.61, "elapsed_time": "0:53:33", "remaining_time": "0:34:48", "throughput": 1517.93, "total_tokens": 4877984} |
|
{"current_steps": 485, "total_steps": 792, "loss": 2.2442, "learning_rate": 1.6356377962552238e-05, "epoch": 3.6569274269557024, "percentage": 61.24, "elapsed_time": "0:54:06", "remaining_time": "0:34:15", "throughput": 1517.77, "total_tokens": 4927712} |
|
{"current_steps": 490, "total_steps": 792, "loss": 2.2499, "learning_rate": 1.589285494545514e-05, "epoch": 3.694627709707823, "percentage": 61.87, "elapsed_time": "0:54:41", "remaining_time": "0:33:42", "throughput": 1517.37, "total_tokens": 4979520} |
|
{"current_steps": 495, "total_steps": 792, "loss": 2.214, "learning_rate": 1.5432914190872757e-05, "epoch": 3.7323279924599433, "percentage": 62.5, "elapsed_time": "0:55:14", "remaining_time": "0:33:08", "throughput": 1517.77, "total_tokens": 5030720} |
|
{"current_steps": 500, "total_steps": 792, "loss": 2.1646, "learning_rate": 1.4976736614834664e-05, "epoch": 3.770028275212064, "percentage": 63.13, "elapsed_time": "0:55:47", "remaining_time": "0:32:34", "throughput": 1517.98, "total_tokens": 5081376} |
|
{"current_steps": 505, "total_steps": 792, "loss": 2.3151, "learning_rate": 1.4524501653137787e-05, "epoch": 3.8077285579641846, "percentage": 63.76, "elapsed_time": "0:56:19", "remaining_time": "0:32:00", "throughput": 1517.49, "total_tokens": 5127888} |
|
{"current_steps": 510, "total_steps": 792, "loss": 2.2602, "learning_rate": 1.4076387190766017e-05, "epoch": 3.8454288407163055, "percentage": 64.39, "elapsed_time": "0:56:52", "remaining_time": "0:31:26", "throughput": 1517.54, "total_tokens": 5178720} |
|
{"current_steps": 515, "total_steps": 792, "loss": 2.1839, "learning_rate": 1.363256949191972e-05, "epoch": 3.883129123468426, "percentage": 65.03, "elapsed_time": "0:57:25", "remaining_time": "0:30:53", "throughput": 1516.96, "total_tokens": 5227120} |
|
{"current_steps": 520, "total_steps": 792, "loss": 2.2833, "learning_rate": 1.3193223130682936e-05, "epoch": 3.9208294062205464, "percentage": 65.66, "elapsed_time": "0:57:57", "remaining_time": "0:30:19", "throughput": 1516.97, "total_tokens": 5275760} |
|
{"current_steps": 525, "total_steps": 792, "loss": 2.1802, "learning_rate": 1.2758520922355226e-05, "epoch": 3.9585296889726673, "percentage": 66.29, "elapsed_time": "0:58:26", "remaining_time": "0:29:43", "throughput": 1517.09, "total_tokens": 5319632} |
|
{"current_steps": 530, "total_steps": 792, "loss": 2.2383, "learning_rate": 1.2328633855475429e-05, "epoch": 3.9962299717247878, "percentage": 66.92, "elapsed_time": "0:59:00", "remaining_time": "0:29:10", "throughput": 1516.88, "total_tokens": 5369936} |
|
{"current_steps": 535, "total_steps": 792, "loss": 2.007, "learning_rate": 1.1903731024563966e-05, "epoch": 4.033930254476909, "percentage": 67.55, "elapsed_time": "0:59:32", "remaining_time": "0:28:36", "throughput": 1517.52, "total_tokens": 5421440} |
|
{"current_steps": 540, "total_steps": 792, "loss": 2.0286, "learning_rate": 1.148397956361007e-05, "epoch": 4.071630537229029, "percentage": 68.18, "elapsed_time": "1:00:08", "remaining_time": "0:28:04", "throughput": 1517.66, "total_tokens": 5476736} |
|
{"current_steps": 545, "total_steps": 792, "loss": 2.0398, "learning_rate": 1.106954458033026e-05, "epoch": 4.10933081998115, "percentage": 68.81, "elapsed_time": "1:00:45", "remaining_time": "0:27:32", "throughput": 1517.35, "total_tokens": 5531328} |
|
{"current_steps": 550, "total_steps": 792, "loss": 2.1157, "learning_rate": 1.0660589091223855e-05, "epoch": 4.147031102733271, "percentage": 69.44, "elapsed_time": "1:01:16", "remaining_time": "0:26:57", "throughput": 1517.68, "total_tokens": 5579216} |
|
{"current_steps": 555, "total_steps": 792, "loss": 2.094, "learning_rate": 1.025727395745095e-05, "epoch": 4.184731385485391, "percentage": 70.08, "elapsed_time": "1:01:46", "remaining_time": "0:26:22", "throughput": 1517.83, "total_tokens": 5626208} |
|
{"current_steps": 560, "total_steps": 792, "loss": 2.0531, "learning_rate": 9.859757821558337e-06, "epoch": 4.222431668237512, "percentage": 70.71, "elapsed_time": "1:02:21", "remaining_time": "0:25:50", "throughput": 1517.83, "total_tokens": 5679360} |
|
{"current_steps": 565, "total_steps": 792, "loss": 1.9652, "learning_rate": 9.468197045077976e-06, "epoch": 4.260131950989632, "percentage": 71.34, "elapsed_time": "1:02:51", "remaining_time": "0:25:15", "throughput": 1517.87, "total_tokens": 5724608} |
|
{"current_steps": 570, "total_steps": 792, "loss": 2.0592, "learning_rate": 9.082745647022797e-06, "epoch": 4.297832233741753, "percentage": 71.97, "elapsed_time": "1:03:28", "remaining_time": "0:24:43", "throughput": 1517.79, "total_tokens": 5779904} |
|
{"current_steps": 575, "total_steps": 792, "loss": 2.0418, "learning_rate": 8.703555243303835e-06, "epoch": 4.335532516493874, "percentage": 72.6, "elapsed_time": "1:03:59", "remaining_time": "0:24:08", "throughput": 1517.79, "total_tokens": 5826880} |
|
{"current_steps": 580, "total_steps": 792, "loss": 1.991, "learning_rate": 8.330774987092712e-06, "epoch": 4.3732327992459945, "percentage": 73.23, "elapsed_time": "1:04:30", "remaining_time": "0:23:34", "throughput": 1517.97, "total_tokens": 5875440} |
|
{"current_steps": 585, "total_steps": 792, "loss": 2.0726, "learning_rate": 7.96455151015272e-06, "epoch": 4.410933081998115, "percentage": 73.86, "elapsed_time": "1:05:03", "remaining_time": "0:23:01", "throughput": 1517.74, "total_tokens": 5924960} |
|
{"current_steps": 590, "total_steps": 792, "loss": 2.069, "learning_rate": 7.605028865161809e-06, "epoch": 4.448633364750235, "percentage": 74.49, "elapsed_time": "1:05:38", "remaining_time": "0:22:28", "throughput": 1517.53, "total_tokens": 5976416} |
|
{"current_steps": 595, "total_steps": 792, "loss": 2.052, "learning_rate": 7.25234846904993e-06, "epoch": 4.486333647502356, "percentage": 75.13, "elapsed_time": "1:06:11", "remaining_time": "0:21:54", "throughput": 1517.75, "total_tokens": 6027088} |
|
{"current_steps": 600, "total_steps": 792, "loss": 2.0651, "learning_rate": 6.906649047373246e-06, "epoch": 4.524033930254477, "percentage": 75.76, "elapsed_time": "1:06:45", "remaining_time": "0:21:21", "throughput": 1518.02, "total_tokens": 6080528} |
|
{"current_steps": 605, "total_steps": 792, "loss": 2.0546, "learning_rate": 6.568066579746901e-06, "epoch": 4.561734213006598, "percentage": 76.39, "elapsed_time": "1:07:16", "remaining_time": "0:20:47", "throughput": 1517.81, "total_tokens": 6125904} |
|
{"current_steps": 610, "total_steps": 792, "loss": 2.081, "learning_rate": 6.2367342463579475e-06, "epoch": 4.599434495758718, "percentage": 77.02, "elapsed_time": "1:07:49", "remaining_time": "0:20:14", "throughput": 1517.22, "total_tokens": 6173744} |
|
{"current_steps": 615, "total_steps": 792, "loss": 2.0395, "learning_rate": 5.912782375579412e-06, "epoch": 4.6371347785108386, "percentage": 77.65, "elapsed_time": "1:08:22", "remaining_time": "0:19:40", "throughput": 1516.71, "total_tokens": 6222560} |
|
{"current_steps": 620, "total_steps": 792, "loss": 2.0659, "learning_rate": 5.596338392706077e-06, "epoch": 4.674835061262959, "percentage": 78.28, "elapsed_time": "1:08:54", "remaining_time": "0:19:07", "throughput": 1517.06, "total_tokens": 6272544} |
|
{"current_steps": 625, "total_steps": 792, "loss": 2.0247, "learning_rate": 5.2875267698322325e-06, "epoch": 4.71253534401508, "percentage": 78.91, "elapsed_time": "1:09:27", "remaining_time": "0:18:33", "throughput": 1517.29, "total_tokens": 6323024} |
|
{"current_steps": 630, "total_steps": 792, "loss": 2.0485, "learning_rate": 4.986468976890993e-06, "epoch": 4.750235626767201, "percentage": 79.55, "elapsed_time": "1:10:00", "remaining_time": "0:18:00", "throughput": 1517.64, "total_tokens": 6374608} |
|
{"current_steps": 635, "total_steps": 792, "loss": 2.0561, "learning_rate": 4.693283433874565e-06, "epoch": 4.787935909519321, "percentage": 80.18, "elapsed_time": "1:10:30", "remaining_time": "0:17:26", "throughput": 1517.9, "total_tokens": 6422208} |
|
{"current_steps": 640, "total_steps": 792, "loss": 2.1047, "learning_rate": 4.408085464254183e-06, "epoch": 4.825636192271442, "percentage": 80.81, "elapsed_time": "1:11:01", "remaining_time": "0:16:52", "throughput": 1517.88, "total_tokens": 6468912} |
|
{"current_steps": 645, "total_steps": 792, "loss": 2.0481, "learning_rate": 4.130987249617993e-06, "epoch": 4.863336475023563, "percentage": 81.44, "elapsed_time": "1:11:36", "remaining_time": "0:16:19", "throughput": 1518.15, "total_tokens": 6522848} |
|
{"current_steps": 650, "total_steps": 792, "loss": 2.0637, "learning_rate": 3.8620977855448935e-06, "epoch": 4.9010367577756835, "percentage": 82.07, "elapsed_time": "1:12:12", "remaining_time": "0:15:46", "throughput": 1518.35, "total_tokens": 6578768} |
|
{"current_steps": 655, "total_steps": 792, "loss": 2.0429, "learning_rate": 3.601522838731461e-06, "epoch": 4.938737040527804, "percentage": 82.7, "elapsed_time": "1:12:47", "remaining_time": "0:15:13", "throughput": 1518.35, "total_tokens": 6631936} |
|
{"current_steps": 660, "total_steps": 792, "loss": 2.1212, "learning_rate": 3.3493649053890326e-06, "epoch": 4.976437323279924, "percentage": 83.33, "elapsed_time": "1:13:22", "remaining_time": "0:14:40", "throughput": 1517.95, "total_tokens": 6682992} |
|
{"current_steps": 665, "total_steps": 792, "loss": 2.0205, "learning_rate": 3.1057231709272077e-06, "epoch": 5.014137606032045, "percentage": 83.96, "elapsed_time": "1:13:59", "remaining_time": "0:14:07", "throughput": 1517.21, "total_tokens": 6735056} |
|
{"current_steps": 670, "total_steps": 792, "loss": 1.9942, "learning_rate": 2.8706934709395892e-06, "epoch": 5.051837888784166, "percentage": 84.6, "elapsed_time": "1:14:31", "remaining_time": "0:13:34", "throughput": 1517.06, "total_tokens": 6784224} |
|
{"current_steps": 675, "total_steps": 792, "loss": 1.868, "learning_rate": 2.6443682535072177e-06, "epoch": 5.089538171536287, "percentage": 85.23, "elapsed_time": "1:15:02", "remaining_time": "0:13:00", "throughput": 1517.24, "total_tokens": 6831040} |
|
{"current_steps": 680, "total_steps": 792, "loss": 1.9132, "learning_rate": 2.4268365428344736e-06, "epoch": 5.127238454288407, "percentage": 85.86, "elapsed_time": "1:15:37", "remaining_time": "0:12:27", "throughput": 1516.96, "total_tokens": 6883552} |
|
{"current_steps": 685, "total_steps": 792, "loss": 1.8698, "learning_rate": 2.21818390423168e-06, "epoch": 5.1649387370405275, "percentage": 86.49, "elapsed_time": "1:16:08", "remaining_time": "0:11:53", "throughput": 1516.56, "total_tokens": 6928272} |
|
{"current_steps": 690, "total_steps": 792, "loss": 1.8974, "learning_rate": 2.0184924104583613e-06, "epoch": 5.202639019792649, "percentage": 87.12, "elapsed_time": "1:16:38", "remaining_time": "0:11:19", "throughput": 1516.26, "total_tokens": 6972496} |
|
{"current_steps": 695, "total_steps": 792, "loss": 1.9096, "learning_rate": 1.8278406094401623e-06, "epoch": 5.240339302544769, "percentage": 87.75, "elapsed_time": "1:17:08", "remaining_time": "0:10:46", "throughput": 1516.21, "total_tokens": 7018496} |
|
{"current_steps": 700, "total_steps": 792, "loss": 2.0098, "learning_rate": 1.6463034933723337e-06, "epoch": 5.27803958529689, "percentage": 88.38, "elapsed_time": "1:17:40", "remaining_time": "0:10:12", "throughput": 1516.27, "total_tokens": 7066400} |
|
{"current_steps": 705, "total_steps": 792, "loss": 1.9554, "learning_rate": 1.4739524692218314e-06, "epoch": 5.31573986804901, "percentage": 89.02, "elapsed_time": "1:18:13", "remaining_time": "0:09:39", "throughput": 1515.51, "total_tokens": 7113744} |
|
{"current_steps": 710, "total_steps": 792, "loss": 2.0233, "learning_rate": 1.3108553306396265e-06, "epoch": 5.353440150801131, "percentage": 89.65, "elapsed_time": "1:18:48", "remaining_time": "0:09:06", "throughput": 1515.74, "total_tokens": 7166848} |
|
{"current_steps": 715, "total_steps": 792, "loss": 1.9629, "learning_rate": 1.1570762312943295e-06, "epoch": 5.391140433553252, "percentage": 90.28, "elapsed_time": "1:19:23", "remaining_time": "0:08:32", "throughput": 1515.8, "total_tokens": 7220048} |
|
{"current_steps": 720, "total_steps": 792, "loss": 1.9036, "learning_rate": 1.0126756596375686e-06, "epoch": 5.4288407163053725, "percentage": 90.91, "elapsed_time": "1:19:55", "remaining_time": "0:07:59", "throughput": 1515.7, "total_tokens": 7268064} |
|
{"current_steps": 725, "total_steps": 792, "loss": 1.988, "learning_rate": 8.777104151110826e-07, "epoch": 5.466540999057493, "percentage": 91.54, "elapsed_time": "1:20:33", "remaining_time": "0:07:26", "throughput": 1515.83, "total_tokens": 7326512} |
|
{"current_steps": 730, "total_steps": 792, "loss": 1.9844, "learning_rate": 7.522335858048707e-07, "epoch": 5.504241281809613, "percentage": 92.17, "elapsed_time": "1:21:10", "remaining_time": "0:06:53", "throughput": 1515.78, "total_tokens": 7382288} |
|
{"current_steps": 735, "total_steps": 792, "loss": 1.9743, "learning_rate": 6.362945275751736e-07, "epoch": 5.541941564561734, "percentage": 92.8, "elapsed_time": "1:21:40", "remaining_time": "0:06:20", "throughput": 1516.23, "total_tokens": 7430544} |
|
{"current_steps": 740, "total_steps": 792, "loss": 2.007, "learning_rate": 5.299388446305343e-07, "epoch": 5.579641847313855, "percentage": 93.43, "elapsed_time": "1:22:12", "remaining_time": "0:05:46", "throughput": 1516.44, "total_tokens": 7479488} |
|
{"current_steps": 745, "total_steps": 792, "loss": 1.9413, "learning_rate": 4.3320837159353813e-07, "epoch": 5.617342130065976, "percentage": 94.07, "elapsed_time": "1:22:48", "remaining_time": "0:05:13", "throughput": 1516.32, "total_tokens": 7533536} |
|
{"current_steps": 750, "total_steps": 792, "loss": 1.902, "learning_rate": 3.4614115704533767e-07, "epoch": 5.655042412818096, "percentage": 94.7, "elapsed_time": "1:23:24", "remaining_time": "0:04:40", "throughput": 1516.39, "total_tokens": 7589200} |
|
{"current_steps": 755, "total_steps": 792, "loss": 2.0091, "learning_rate": 2.687714485593462e-07, "epoch": 5.6927426955702165, "percentage": 95.33, "elapsed_time": "1:23:56", "remaining_time": "0:04:06", "throughput": 1516.65, "total_tokens": 7638928} |
|
{"current_steps": 760, "total_steps": 792, "loss": 2.0389, "learning_rate": 2.011296792301165e-07, "epoch": 5.730442978322337, "percentage": 95.96, "elapsed_time": "1:24:33", "remaining_time": "0:03:33", "throughput": 1516.57, "total_tokens": 7693680} |
|
{"current_steps": 765, "total_steps": 792, "loss": 2.0012, "learning_rate": 1.4324245570256633e-07, "epoch": 5.768143261074458, "percentage": 96.59, "elapsed_time": "1:25:05", "remaining_time": "0:03:00", "throughput": 1516.87, "total_tokens": 7743904} |
|
{"current_steps": 770, "total_steps": 792, "loss": 2.0127, "learning_rate": 9.513254770636137e-08, "epoch": 5.805843543826579, "percentage": 97.22, "elapsed_time": "1:25:36", "remaining_time": "0:02:26", "throughput": 1516.8, "total_tokens": 7790992} |
|
{"current_steps": 775, "total_steps": 792, "loss": 2.0237, "learning_rate": 5.681887909952388e-08, "epoch": 5.843543826578699, "percentage": 97.85, "elapsed_time": "1:26:11", "remaining_time": "0:01:53", "throughput": 1516.79, "total_tokens": 7843600} |
|
{"current_steps": 780, "total_steps": 792, "loss": 1.9273, "learning_rate": 2.831652042480093e-08, "epoch": 5.88124410933082, "percentage": 98.48, "elapsed_time": "1:26:43", "remaining_time": "0:01:20", "throughput": 1517.02, "total_tokens": 7893968} |
|
{"current_steps": 785, "total_steps": 792, "loss": 1.9827, "learning_rate": 9.636682981720158e-09, "epoch": 5.918944392082941, "percentage": 99.12, "elapsed_time": "1:27:16", "remaining_time": "0:00:46", "throughput": 1517.49, "total_tokens": 7945856} |
|
{"current_steps": 790, "total_steps": 792, "loss": 1.9642, "learning_rate": 7.867144166728846e-10, "epoch": 5.956644674835061, "percentage": 99.75, "elapsed_time": "1:27:50", "remaining_time": "0:00:13", "throughput": 1517.51, "total_tokens": 7998560} |
|
{"current_steps": 792, "total_steps": 792, "epoch": 5.971724787935909, "percentage": 100.0, "elapsed_time": "1:28:03", "remaining_time": "0:00:00", "throughput": 1517.55, "total_tokens": 8017392} |
|
|