|
{ |
|
"best_metric": 2.176323175430298, |
|
"best_model_checkpoint": "../../saves/Llama3-8B-Chinese-Chat/lora/sft/checkpoint-2400", |
|
"epoch": 9.481481481481481, |
|
"eval_steps": 400, |
|
"global_step": 2400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.04, |
|
"grad_norm": 0.6577214002609253, |
|
"learning_rate": 2.5e-05, |
|
"loss": 2.8395, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"grad_norm": 0.9053373336791992, |
|
"learning_rate": 5e-05, |
|
"loss": 2.7212, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.12, |
|
"grad_norm": 0.8605895042419434, |
|
"learning_rate": 4.9998041801805566e-05, |
|
"loss": 2.6919, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"grad_norm": 0.9140876531600952, |
|
"learning_rate": 4.999293114538139e-05, |
|
"loss": 2.615, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 0.8557806015014648, |
|
"learning_rate": 4.998353314622318e-05, |
|
"loss": 2.6138, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24, |
|
"grad_norm": 0.9230502843856812, |
|
"learning_rate": 4.997022133030516e-05, |
|
"loss": 2.5346, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"grad_norm": 1.0639443397521973, |
|
"learning_rate": 4.9952997783001254e-05, |
|
"loss": 2.4718, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"grad_norm": 1.0025442838668823, |
|
"learning_rate": 4.9931865202480996e-05, |
|
"loss": 2.5162, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"grad_norm": 2.0188956260681152, |
|
"learning_rate": 4.990682689928687e-05, |
|
"loss": 2.4471, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.122934341430664, |
|
"learning_rate": 4.9877886795815685e-05, |
|
"loss": 2.5208, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"grad_norm": 1.2770124673843384, |
|
"learning_rate": 4.98450494257041e-05, |
|
"loss": 2.4132, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"grad_norm": 1.5353667736053467, |
|
"learning_rate": 4.980831993311844e-05, |
|
"loss": 2.4551, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.51, |
|
"grad_norm": 1.3800839185714722, |
|
"learning_rate": 4.976770407194877e-05, |
|
"loss": 2.4097, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.55, |
|
"grad_norm": 1.532073736190796, |
|
"learning_rate": 4.972320820490759e-05, |
|
"loss": 2.4367, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"grad_norm": 1.7717530727386475, |
|
"learning_rate": 4.967483930253302e-05, |
|
"loss": 2.4128, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"grad_norm": 1.92471444606781, |
|
"learning_rate": 4.962260494209683e-05, |
|
"loss": 2.4274, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"grad_norm": 2.059626340866089, |
|
"learning_rate": 4.9566513306417444e-05, |
|
"loss": 2.4485, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.71, |
|
"grad_norm": 1.4040436744689941, |
|
"learning_rate": 4.950657318257805e-05, |
|
"loss": 2.4313, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"grad_norm": 6.483478546142578, |
|
"learning_rate": 4.944279396055003e-05, |
|
"loss": 2.4243, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"grad_norm": 2.516174554824829, |
|
"learning_rate": 4.937518563172196e-05, |
|
"loss": 2.3765, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"grad_norm": 1.4100685119628906, |
|
"learning_rate": 4.930375878733445e-05, |
|
"loss": 2.4035, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.87, |
|
"grad_norm": 1.6589595079421997, |
|
"learning_rate": 4.922852461682093e-05, |
|
"loss": 2.328, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.91, |
|
"grad_norm": 1.4953701496124268, |
|
"learning_rate": 4.9149494906054716e-05, |
|
"loss": 2.3684, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"grad_norm": 1.4060596227645874, |
|
"learning_rate": 4.906668203550279e-05, |
|
"loss": 2.3472, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"grad_norm": 1.474617838859558, |
|
"learning_rate": 4.8980098978286215e-05, |
|
"loss": 2.3998, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"grad_norm": 1.42411208152771, |
|
"learning_rate": 4.888975929814792e-05, |
|
"loss": 2.3639, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.07, |
|
"grad_norm": 1.62990140914917, |
|
"learning_rate": 4.880525335812728e-05, |
|
"loss": 2.3325, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"grad_norm": 1.5688358545303345, |
|
"learning_rate": 4.8707815568466236e-05, |
|
"loss": 2.3001, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"grad_norm": 3.7384893894195557, |
|
"learning_rate": 4.860666381067398e-05, |
|
"loss": 2.3522, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"grad_norm": 1.44722318649292, |
|
"learning_rate": 4.850181393076568e-05, |
|
"loss": 2.3296, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"grad_norm": 1.6258642673492432, |
|
"learning_rate": 4.839328235408895e-05, |
|
"loss": 2.3827, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"grad_norm": 1.72386634349823, |
|
"learning_rate": 4.8281086082750825e-05, |
|
"loss": 2.2641, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.3, |
|
"grad_norm": 1.783467411994934, |
|
"learning_rate": 4.816524269295416e-05, |
|
"loss": 2.278, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"grad_norm": 4.662048816680908, |
|
"learning_rate": 4.804577033224429e-05, |
|
"loss": 2.264, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"grad_norm": 3.12662410736084, |
|
"learning_rate": 4.7922687716666105e-05, |
|
"loss": 2.2712, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"grad_norm": 2.0605392456054688, |
|
"learning_rate": 4.779601412783206e-05, |
|
"loss": 2.2865, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.46, |
|
"grad_norm": 1.6488828659057617, |
|
"learning_rate": 4.766576940990157e-05, |
|
"loss": 2.334, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 1.8102229833602905, |
|
"learning_rate": 4.7531973966472376e-05, |
|
"loss": 2.3421, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.54, |
|
"grad_norm": 2.01405668258667, |
|
"learning_rate": 4.7394648757384125e-05, |
|
"loss": 2.3526, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"grad_norm": 1.5997238159179688, |
|
"learning_rate": 4.7253815295434934e-05, |
|
"loss": 2.2611, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_loss": 2.21992826461792, |
|
"eval_runtime": 142.6668, |
|
"eval_samples_per_second": 6.308, |
|
"eval_steps_per_second": 3.154, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.62, |
|
"grad_norm": 2.2114603519439697, |
|
"learning_rate": 4.710949564301128e-05, |
|
"loss": 2.4253, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.66, |
|
"grad_norm": 2.8670096397399902, |
|
"learning_rate": 4.6961712408631795e-05, |
|
"loss": 2.3267, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.7, |
|
"grad_norm": 1.991642951965332, |
|
"learning_rate": 4.681048874340548e-05, |
|
"loss": 2.3256, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"grad_norm": 4.117753505706787, |
|
"learning_rate": 4.6655848337405e-05, |
|
"loss": 2.2686, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"grad_norm": 1.650153636932373, |
|
"learning_rate": 4.649781541595547e-05, |
|
"loss": 2.2634, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.82, |
|
"grad_norm": 1.9907420873641968, |
|
"learning_rate": 4.633641473583939e-05, |
|
"loss": 2.3402, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"grad_norm": 1.8381316661834717, |
|
"learning_rate": 4.6171671581418395e-05, |
|
"loss": 2.381, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"grad_norm": 1.8253530263900757, |
|
"learning_rate": 4.600361176067229e-05, |
|
"loss": 2.2451, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"grad_norm": 2.263423442840576, |
|
"learning_rate": 4.583226160115608e-05, |
|
"loss": 2.32, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.98, |
|
"grad_norm": 3.391731023788452, |
|
"learning_rate": 4.56576479458756e-05, |
|
"loss": 2.2602, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"grad_norm": 2.6076712608337402, |
|
"learning_rate": 4.5479798149082406e-05, |
|
"loss": 2.242, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"grad_norm": 1.9152929782867432, |
|
"learning_rate": 4.529874007198859e-05, |
|
"loss": 2.259, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"grad_norm": 2.9343013763427734, |
|
"learning_rate": 4.5114502078402103e-05, |
|
"loss": 2.2679, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 2.13, |
|
"grad_norm": 2.068617105484009, |
|
"learning_rate": 4.4927113030283475e-05, |
|
"loss": 2.2661, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"grad_norm": 7.2981953620910645, |
|
"learning_rate": 4.473660228322434e-05, |
|
"loss": 2.3253, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"grad_norm": 1.60201096534729, |
|
"learning_rate": 4.454299968184879e-05, |
|
"loss": 2.3142, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"grad_norm": 3.731210231781006, |
|
"learning_rate": 4.4346335555137975e-05, |
|
"loss": 2.3323, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 2.29, |
|
"grad_norm": 3.097869634628296, |
|
"learning_rate": 4.414664071167892e-05, |
|
"loss": 2.2346, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"grad_norm": 1.9533767700195312, |
|
"learning_rate": 4.394394643483818e-05, |
|
"loss": 2.3276, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"grad_norm": 4.110910892486572, |
|
"learning_rate": 4.373828447786111e-05, |
|
"loss": 2.2844, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 2.41, |
|
"grad_norm": 2.3068130016326904, |
|
"learning_rate": 4.352968705889753e-05, |
|
"loss": 2.3205, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"grad_norm": 1.739564061164856, |
|
"learning_rate": 4.331818685595458e-05, |
|
"loss": 2.2218, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 2.49, |
|
"grad_norm": 1.999505877494812, |
|
"learning_rate": 4.3103817001777494e-05, |
|
"loss": 2.2117, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"grad_norm": 1.6982237100601196, |
|
"learning_rate": 4.2886611078659187e-05, |
|
"loss": 2.174, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"grad_norm": 1.7287157773971558, |
|
"learning_rate": 4.266660311317938e-05, |
|
"loss": 2.2478, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"grad_norm": 2.1112897396087646, |
|
"learning_rate": 4.244382757087413e-05, |
|
"loss": 2.2961, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 2.65, |
|
"grad_norm": 1.901487946510315, |
|
"learning_rate": 4.221831935083662e-05, |
|
"loss": 2.1506, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"grad_norm": 1.8849854469299316, |
|
"learning_rate": 4.199011378024998e-05, |
|
"loss": 2.3471, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"grad_norm": 1.8044487237930298, |
|
"learning_rate": 4.175924660885314e-05, |
|
"loss": 2.2516, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 2.77, |
|
"grad_norm": 6.139608383178711, |
|
"learning_rate": 4.152575400334032e-05, |
|
"loss": 2.2227, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"grad_norm": 1.8950961828231812, |
|
"learning_rate": 4.1289672541695434e-05, |
|
"loss": 2.2474, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"grad_norm": 3.646942615509033, |
|
"learning_rate": 4.105103920746182e-05, |
|
"loss": 2.2239, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 2.88, |
|
"grad_norm": 1.5586037635803223, |
|
"learning_rate": 4.0809891383948616e-05, |
|
"loss": 2.3488, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"grad_norm": 1.7886706590652466, |
|
"learning_rate": 4.056626684837443e-05, |
|
"loss": 2.2088, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"grad_norm": 1.7146466970443726, |
|
"learning_rate": 4.0320203765949325e-05, |
|
"loss": 2.2672, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 1.8052889108657837, |
|
"learning_rate": 4.007174068389599e-05, |
|
"loss": 2.2723, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 3.04, |
|
"grad_norm": 3.6868667602539062, |
|
"learning_rate": 3.982091652541111e-05, |
|
"loss": 2.261, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 3.08, |
|
"grad_norm": 1.8512885570526123, |
|
"learning_rate": 3.956777058356785e-05, |
|
"loss": 2.1709, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"grad_norm": 2.124035596847534, |
|
"learning_rate": 3.931234251516029e-05, |
|
"loss": 2.1759, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"grad_norm": 2.077178478240967, |
|
"learning_rate": 3.905467233449103e-05, |
|
"loss": 2.3188, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"eval_loss": 2.1877169609069824, |
|
"eval_runtime": 142.5615, |
|
"eval_samples_per_second": 6.313, |
|
"eval_steps_per_second": 3.157, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"grad_norm": 1.8644123077392578, |
|
"learning_rate": 3.879480040710266e-05, |
|
"loss": 2.2546, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 3.24, |
|
"grad_norm": 2.230581521987915, |
|
"learning_rate": 3.8532767443454316e-05, |
|
"loss": 2.3114, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"grad_norm": 2.7963311672210693, |
|
"learning_rate": 3.826861449254409e-05, |
|
"loss": 2.1624, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"grad_norm": 2.0492663383483887, |
|
"learning_rate": 3.8002382935478506e-05, |
|
"loss": 2.2222, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"grad_norm": 2.477332592010498, |
|
"learning_rate": 3.773411447898992e-05, |
|
"loss": 2.2765, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 3.4, |
|
"grad_norm": 2.9290037155151367, |
|
"learning_rate": 3.7463851148902895e-05, |
|
"loss": 2.1336, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"grad_norm": 2.215397357940674, |
|
"learning_rate": 3.7191635283550636e-05, |
|
"loss": 2.2759, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"grad_norm": 2.4170403480529785, |
|
"learning_rate": 3.691750952714242e-05, |
|
"loss": 2.2636, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"grad_norm": 3.395965337753296, |
|
"learning_rate": 3.664151682308315e-05, |
|
"loss": 2.2049, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 3.56, |
|
"grad_norm": 15.315768241882324, |
|
"learning_rate": 3.636370040724599e-05, |
|
"loss": 2.1917, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 3.6, |
|
"grad_norm": 2.1479156017303467, |
|
"learning_rate": 3.608410380119927e-05, |
|
"loss": 2.2352, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"grad_norm": 1.9286900758743286, |
|
"learning_rate": 3.580277080538853e-05, |
|
"loss": 2.2011, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"grad_norm": 2.303708791732788, |
|
"learning_rate": 3.55197454922749e-05, |
|
"loss": 2.1426, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 3.71, |
|
"grad_norm": 2.3887522220611572, |
|
"learning_rate": 3.523507219943096e-05, |
|
"loss": 2.158, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"grad_norm": 6.511980056762695, |
|
"learning_rate": 3.494879552259496e-05, |
|
"loss": 2.305, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"grad_norm": 2.579639434814453, |
|
"learning_rate": 3.466096030868462e-05, |
|
"loss": 2.3205, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 3.83, |
|
"grad_norm": 1.789423942565918, |
|
"learning_rate": 3.437161164877165e-05, |
|
"loss": 2.1844, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 3.87, |
|
"grad_norm": 1.7820065021514893, |
|
"learning_rate": 3.408079487101791e-05, |
|
"loss": 2.2388, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"grad_norm": 2.107856512069702, |
|
"learning_rate": 3.3788555533574535e-05, |
|
"loss": 2.266, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"grad_norm": 2.2462568283081055, |
|
"learning_rate": 3.349493941744496e-05, |
|
"loss": 2.2603, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 3.99, |
|
"grad_norm": 1.9653023481369019, |
|
"learning_rate": 3.319999251931305e-05, |
|
"loss": 2.2055, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"grad_norm": 2.0476274490356445, |
|
"learning_rate": 3.29037610443375e-05, |
|
"loss": 2.2843, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 4.07, |
|
"grad_norm": 2.2384870052337646, |
|
"learning_rate": 3.260629139891344e-05, |
|
"loss": 2.1942, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"grad_norm": 1.923712134361267, |
|
"learning_rate": 3.230763018340269e-05, |
|
"loss": 2.2639, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"grad_norm": 2.2221322059631348, |
|
"learning_rate": 3.200782418483348e-05, |
|
"loss": 2.1726, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"grad_norm": 2.22153377532959, |
|
"learning_rate": 3.1706920369571006e-05, |
|
"loss": 2.2761, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 4.23, |
|
"grad_norm": 4.4850616455078125, |
|
"learning_rate": 3.1404965875959914e-05, |
|
"loss": 2.1937, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"grad_norm": 2.2232580184936523, |
|
"learning_rate": 3.110200800693972e-05, |
|
"loss": 2.2333, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"grad_norm": 2.1202492713928223, |
|
"learning_rate": 3.0798094222634597e-05, |
|
"loss": 2.2239, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 4.35, |
|
"grad_norm": 2.625195264816284, |
|
"learning_rate": 3.0493272132918445e-05, |
|
"loss": 2.1311, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"grad_norm": 2.0367236137390137, |
|
"learning_rate": 3.0187589489956503e-05, |
|
"loss": 2.2407, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"grad_norm": 2.249363422393799, |
|
"learning_rate": 2.9881094180724757e-05, |
|
"loss": 2.1846, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 4.46, |
|
"grad_norm": 2.561105489730835, |
|
"learning_rate": 2.9573834219508084e-05, |
|
"loss": 2.0067, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"grad_norm": 2.33273983001709, |
|
"learning_rate": 2.9265857740378576e-05, |
|
"loss": 2.2424, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"grad_norm": 2.4719011783599854, |
|
"learning_rate": 2.8957212989655092e-05, |
|
"loss": 2.1779, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"grad_norm": 2.366910457611084, |
|
"learning_rate": 2.8647948318345097e-05, |
|
"loss": 2.1714, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 4.62, |
|
"grad_norm": 2.1899473667144775, |
|
"learning_rate": 2.8338112174570263e-05, |
|
"loss": 2.1499, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 4.66, |
|
"grad_norm": 2.3744349479675293, |
|
"learning_rate": 2.8027753095976793e-05, |
|
"loss": 2.2543, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"grad_norm": 2.3728389739990234, |
|
"learning_rate": 2.7716919702131662e-05, |
|
"loss": 2.2539, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"grad_norm": 2.3791847229003906, |
|
"learning_rate": 2.740566068690612e-05, |
|
"loss": 2.1576, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"eval_loss": 2.1767776012420654, |
|
"eval_runtime": 143.8644, |
|
"eval_samples_per_second": 6.256, |
|
"eval_steps_per_second": 3.128, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"grad_norm": 1.933295726776123, |
|
"learning_rate": 2.70940248108475e-05, |
|
"loss": 2.1457, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 4.82, |
|
"grad_norm": 1.918013095855713, |
|
"learning_rate": 2.6782060893540578e-05, |
|
"loss": 2.1832, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"grad_norm": 2.6071360111236572, |
|
"learning_rate": 2.646981780595973e-05, |
|
"loss": 2.2199, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"grad_norm": 2.3134567737579346, |
|
"learning_rate": 2.6157344462812976e-05, |
|
"loss": 2.2627, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"grad_norm": 2.3015966415405273, |
|
"learning_rate": 2.5844689814879234e-05, |
|
"loss": 2.2163, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 4.98, |
|
"grad_norm": 2.3684730529785156, |
|
"learning_rate": 2.5531902841339867e-05, |
|
"loss": 2.1699, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 5.02, |
|
"grad_norm": 1.8125172853469849, |
|
"learning_rate": 2.5219032542105807e-05, |
|
"loss": 2.125, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"grad_norm": 1.954872965812683, |
|
"learning_rate": 2.4906127930141478e-05, |
|
"loss": 2.2746, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"grad_norm": 2.147181749343872, |
|
"learning_rate": 2.459323802378656e-05, |
|
"loss": 2.2056, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"grad_norm": 2.522922992706299, |
|
"learning_rate": 2.428041183907704e-05, |
|
"loss": 2.2883, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 5.18, |
|
"grad_norm": 2.107692003250122, |
|
"learning_rate": 2.396769838206651e-05, |
|
"loss": 2.1534, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"grad_norm": 2.043048620223999, |
|
"learning_rate": 2.3655146641149127e-05, |
|
"loss": 2.1946, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"grad_norm": 3.606001615524292, |
|
"learning_rate": 2.3342805579385278e-05, |
|
"loss": 2.2133, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 5.29, |
|
"grad_norm": 1.9504977464675903, |
|
"learning_rate": 2.3030724126831202e-05, |
|
"loss": 2.2014, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"grad_norm": 2.8629162311553955, |
|
"learning_rate": 2.2718951172873868e-05, |
|
"loss": 2.1473, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"grad_norm": 2.781360387802124, |
|
"learning_rate": 2.24075355585721e-05, |
|
"loss": 2.2206, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 5.41, |
|
"grad_norm": 2.6987454891204834, |
|
"learning_rate": 2.209652606900541e-05, |
|
"loss": 2.1345, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"grad_norm": 3.2475154399871826, |
|
"learning_rate": 2.1785971425631446e-05, |
|
"loss": 2.1578, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"grad_norm": 2.428025007247925, |
|
"learning_rate": 2.1475920278653592e-05, |
|
"loss": 2.0979, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"grad_norm": 2.3817710876464844, |
|
"learning_rate": 2.116642119939952e-05, |
|
"loss": 2.1287, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 5.57, |
|
"grad_norm": 3.956568479537964, |
|
"learning_rate": 2.0857522672712298e-05, |
|
"loss": 2.2171, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"grad_norm": 2.3348941802978516, |
|
"learning_rate": 2.054927308935489e-05, |
|
"loss": 2.307, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 5.65, |
|
"grad_norm": 2.4065921306610107, |
|
"learning_rate": 2.024172073842952e-05, |
|
"loss": 2.174, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"grad_norm": 2.090249538421631, |
|
"learning_rate": 1.993491379981284e-05, |
|
"loss": 2.2008, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"grad_norm": 4.224242687225342, |
|
"learning_rate": 1.96289003366083e-05, |
|
"loss": 2.2197, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 5.77, |
|
"grad_norm": 2.2841567993164062, |
|
"learning_rate": 1.9323728287616806e-05, |
|
"loss": 2.2154, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"grad_norm": 2.359734296798706, |
|
"learning_rate": 1.901944545982677e-05, |
|
"loss": 2.1683, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"grad_norm": 2.155123233795166, |
|
"learning_rate": 1.8716099520924923e-05, |
|
"loss": 2.2639, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"grad_norm": 2.440293550491333, |
|
"learning_rate": 1.8413737991828843e-05, |
|
"loss": 2.2023, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 5.93, |
|
"grad_norm": 2.8041791915893555, |
|
"learning_rate": 1.811240823924256e-05, |
|
"loss": 2.1118, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"grad_norm": 2.284633159637451, |
|
"learning_rate": 1.7812157468236274e-05, |
|
"loss": 2.1564, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"grad_norm": 2.427250623703003, |
|
"learning_rate": 1.7513032714851386e-05, |
|
"loss": 2.1653, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 6.04, |
|
"grad_norm": 2.3097784519195557, |
|
"learning_rate": 1.721508083873207e-05, |
|
"loss": 2.2382, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 6.08, |
|
"grad_norm": 2.3014960289001465, |
|
"learning_rate": 1.6918348515784387e-05, |
|
"loss": 2.2634, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"grad_norm": 2.0459794998168945, |
|
"learning_rate": 1.6622882230864255e-05, |
|
"loss": 2.1997, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"grad_norm": 1.938412070274353, |
|
"learning_rate": 1.632872827049533e-05, |
|
"loss": 2.2056, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"grad_norm": 2.71195650100708, |
|
"learning_rate": 1.603593271561794e-05, |
|
"loss": 2.141, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 6.24, |
|
"grad_norm": 2.2068488597869873, |
|
"learning_rate": 1.5744541434370235e-05, |
|
"loss": 2.1678, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"grad_norm": 2.839308023452759, |
|
"learning_rate": 1.545460007490268e-05, |
|
"loss": 2.1978, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"grad_norm": 2.6756937503814697, |
|
"learning_rate": 1.5166154058227003e-05, |
|
"loss": 2.1811, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"eval_loss": 2.1764707565307617, |
|
"eval_runtime": 143.5803, |
|
"eval_samples_per_second": 6.268, |
|
"eval_steps_per_second": 3.134, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"grad_norm": 2.5529654026031494, |
|
"learning_rate": 1.4879248571100729e-05, |
|
"loss": 2.1933, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 6.4, |
|
"grad_norm": 2.277837038040161, |
|
"learning_rate": 1.4593928558948403e-05, |
|
"loss": 2.1276, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 6.44, |
|
"grad_norm": 2.087127208709717, |
|
"learning_rate": 1.4310238718820638e-05, |
|
"loss": 2.1158, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"grad_norm": 2.717027187347412, |
|
"learning_rate": 1.4028223492392062e-05, |
|
"loss": 2.1657, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"grad_norm": 5.161482334136963, |
|
"learning_rate": 1.3747927058999228e-05, |
|
"loss": 2.1421, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"grad_norm": 3.2789719104766846, |
|
"learning_rate": 1.3469393328719732e-05, |
|
"loss": 2.1197, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 6.6, |
|
"grad_norm": 2.523096799850464, |
|
"learning_rate": 1.3192665935493401e-05, |
|
"loss": 2.1886, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"grad_norm": 2.650481939315796, |
|
"learning_rate": 1.2917788230286753e-05, |
|
"loss": 2.1621, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"grad_norm": 2.2410335540771484, |
|
"learning_rate": 1.2644803274301879e-05, |
|
"loss": 2.1615, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"grad_norm": 2.0118298530578613, |
|
"learning_rate": 1.2373753832230631e-05, |
|
"loss": 2.1762, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 6.76, |
|
"grad_norm": 2.5051441192626953, |
|
"learning_rate": 1.210468236555523e-05, |
|
"loss": 2.1252, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"grad_norm": 1.9113433361053467, |
|
"learning_rate": 1.1837631025896506e-05, |
|
"loss": 2.0933, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 6.83, |
|
"grad_norm": 2.689561367034912, |
|
"learning_rate": 1.1572641648410554e-05, |
|
"loss": 2.1595, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"grad_norm": 2.5091662406921387, |
|
"learning_rate": 1.1309755745235005e-05, |
|
"loss": 2.1441, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"grad_norm": 3.028869152069092, |
|
"learning_rate": 1.1049014498985932e-05, |
|
"loss": 2.186, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"grad_norm": 2.106482744216919, |
|
"learning_rate": 1.0790458756306357e-05, |
|
"loss": 2.1507, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 6.99, |
|
"grad_norm": 3.192108154296875, |
|
"learning_rate": 1.0534129021467363e-05, |
|
"loss": 2.187, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"grad_norm": 2.394237518310547, |
|
"learning_rate": 1.0280065450022844e-05, |
|
"loss": 2.1725, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"grad_norm": 2.58278226852417, |
|
"learning_rate": 1.002830784251896e-05, |
|
"loss": 2.1623, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"grad_norm": 2.267213821411133, |
|
"learning_rate": 9.77889563825912e-06, |
|
"loss": 2.1668, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 7.15, |
|
"grad_norm": 2.4960286617279053, |
|
"learning_rate": 9.531867909125544e-06, |
|
"loss": 2.2438, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 7.19, |
|
"grad_norm": 2.337390899658203, |
|
"learning_rate": 9.287263353458501e-06, |
|
"loss": 2.2111, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 7.23, |
|
"grad_norm": 3.2264187335968018, |
|
"learning_rate": 9.045120289993936e-06, |
|
"loss": 2.1189, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"grad_norm": 2.872370719909668, |
|
"learning_rate": 8.805476651860607e-06, |
|
"loss": 2.1599, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"grad_norm": 2.6654868125915527, |
|
"learning_rate": 8.568369980637678e-06, |
|
"loss": 2.1976, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 7.35, |
|
"grad_norm": 2.957482099533081, |
|
"learning_rate": 8.333837420473592e-06, |
|
"loss": 2.1056, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"grad_norm": 2.7675461769104004, |
|
"learning_rate": 8.101915712267228e-06, |
|
"loss": 2.1528, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"grad_norm": 2.2371811866760254, |
|
"learning_rate": 7.872641187912183e-06, |
|
"loss": 2.1607, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"grad_norm": 2.760326862335205, |
|
"learning_rate": 7.64604976460524e-06, |
|
"loss": 2.0584, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 7.51, |
|
"grad_norm": 2.8428192138671875, |
|
"learning_rate": 7.422176939219682e-06, |
|
"loss": 2.1451, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"grad_norm": 2.6749818325042725, |
|
"learning_rate": 7.2010577827444945e-06, |
|
"loss": 2.1713, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 7.59, |
|
"grad_norm": 2.2493717670440674, |
|
"learning_rate": 6.9827269347903204e-06, |
|
"loss": 2.195, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"grad_norm": 2.8834621906280518, |
|
"learning_rate": 6.767218598162947e-06, |
|
"loss": 2.1771, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 7.66, |
|
"grad_norm": 2.361966371536255, |
|
"learning_rate": 6.554566533505213e-06, |
|
"loss": 2.2726, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"grad_norm": 2.7109503746032715, |
|
"learning_rate": 6.344804054008252e-06, |
|
"loss": 2.1248, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"grad_norm": 2.7265536785125732, |
|
"learning_rate": 6.137964020192749e-06, |
|
"loss": 2.1833, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"grad_norm": 2.4823639392852783, |
|
"learning_rate": 5.934078834761176e-06, |
|
"loss": 2.1631, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 7.82, |
|
"grad_norm": 2.896096706390381, |
|
"learning_rate": 5.733180437521676e-06, |
|
"loss": 2.1357, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"grad_norm": 3.116410732269287, |
|
"learning_rate": 5.535300300384552e-06, |
|
"loss": 2.1195, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"grad_norm": 2.8223111629486084, |
|
"learning_rate": 5.34046942243199e-06, |
|
"loss": 2.1649, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"eval_loss": 2.1766035556793213, |
|
"eval_runtime": 142.6761, |
|
"eval_samples_per_second": 6.308, |
|
"eval_steps_per_second": 3.154, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 7.94, |
|
"grad_norm": 2.767136812210083, |
|
"learning_rate": 5.148718325061858e-06, |
|
"loss": 2.1394, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"grad_norm": 2.9371230602264404, |
|
"learning_rate": 4.960077047206374e-06, |
|
"loss": 2.1314, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 8.02, |
|
"grad_norm": 2.5136184692382812, |
|
"learning_rate": 4.7745751406263165e-06, |
|
"loss": 2.127, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"grad_norm": 2.1684181690216064, |
|
"learning_rate": 4.592241665281555e-06, |
|
"loss": 2.0934, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"grad_norm": 2.427680730819702, |
|
"learning_rate": 4.4131051847786735e-06, |
|
"loss": 2.1284, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"grad_norm": 2.8133230209350586, |
|
"learning_rate": 4.237193761896291e-06, |
|
"loss": 2.2072, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 8.18, |
|
"grad_norm": 2.575359344482422, |
|
"learning_rate": 4.06453495418885e-06, |
|
"loss": 2.1865, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"grad_norm": 2.287165403366089, |
|
"learning_rate": 3.895155809669599e-06, |
|
"loss": 2.1804, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"grad_norm": 2.5095739364624023, |
|
"learning_rate": 3.72908286257333e-06, |
|
"loss": 2.0919, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 8.3, |
|
"grad_norm": 2.8196492195129395, |
|
"learning_rate": 3.566342129199665e-06, |
|
"loss": 2.0937, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 8.34, |
|
"grad_norm": 2.5309383869171143, |
|
"learning_rate": 3.406959103837412e-06, |
|
"loss": 2.2098, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 8.38, |
|
"grad_norm": 2.6270852088928223, |
|
"learning_rate": 3.250958754770775e-06, |
|
"loss": 2.1873, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 8.41, |
|
"grad_norm": 2.9431707859039307, |
|
"learning_rate": 3.09836552036791e-06, |
|
"loss": 2.126, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"grad_norm": 2.5240731239318848, |
|
"learning_rate": 2.949203305252496e-06, |
|
"loss": 2.0116, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"grad_norm": 2.583218812942505, |
|
"learning_rate": 2.8034954765589566e-06, |
|
"loss": 2.0481, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"grad_norm": 2.5131754875183105, |
|
"learning_rate": 2.66126486027187e-06, |
|
"loss": 2.1438, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 8.57, |
|
"grad_norm": 3.5266616344451904, |
|
"learning_rate": 2.5225337376501015e-06, |
|
"loss": 2.1933, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 8.61, |
|
"grad_norm": 2.9746336936950684, |
|
"learning_rate": 2.3873238417363634e-06, |
|
"loss": 2.1236, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"grad_norm": 2.598071813583374, |
|
"learning_rate": 2.2556563539525815e-06, |
|
"loss": 2.0947, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"grad_norm": 3.164978265762329, |
|
"learning_rate": 2.1275519007817044e-06, |
|
"loss": 2.1925, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"grad_norm": 2.4741063117980957, |
|
"learning_rate": 2.003030550536439e-06, |
|
"loss": 2.1531, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 8.77, |
|
"grad_norm": 2.4534201622009277, |
|
"learning_rate": 1.882111810215445e-06, |
|
"loss": 2.1345, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"grad_norm": 2.8546104431152344, |
|
"learning_rate": 1.7648146224474444e-06, |
|
"loss": 2.145, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"grad_norm": 2.3898508548736572, |
|
"learning_rate": 1.651157362523731e-06, |
|
"loss": 2.1689, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"grad_norm": 2.7107295989990234, |
|
"learning_rate": 1.541157835519605e-06, |
|
"loss": 2.232, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 8.93, |
|
"grad_norm": 2.8263099193573, |
|
"learning_rate": 1.434833273505079e-06, |
|
"loss": 2.1338, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"grad_norm": 2.6851024627685547, |
|
"learning_rate": 1.3322003328453874e-06, |
|
"loss": 2.2601, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"grad_norm": 2.2520766258239746, |
|
"learning_rate": 1.2332750915916458e-06, |
|
"loss": 2.1185, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 9.05, |
|
"grad_norm": 2.532531499862671, |
|
"learning_rate": 1.1380730469621653e-06, |
|
"loss": 2.0739, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 9.09, |
|
"grad_norm": 2.496253490447998, |
|
"learning_rate": 1.046609112914701e-06, |
|
"loss": 2.1072, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 9.13, |
|
"grad_norm": 2.455324649810791, |
|
"learning_rate": 9.588976178100866e-07, |
|
"loss": 2.1398, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 9.17, |
|
"grad_norm": 2.132638692855835, |
|
"learning_rate": 8.749523021676309e-07, |
|
"loss": 2.1913, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 9.2, |
|
"grad_norm": 3.008934497833252, |
|
"learning_rate": 7.94786316512583e-07, |
|
"loss": 2.1616, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 9.24, |
|
"grad_norm": 3.200282096862793, |
|
"learning_rate": 7.184122193160032e-07, |
|
"loss": 2.1086, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 9.28, |
|
"grad_norm": 2.8957929611206055, |
|
"learning_rate": 6.458419750274336e-07, |
|
"loss": 2.26, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 9.32, |
|
"grad_norm": 2.499394178390503, |
|
"learning_rate": 5.770869522005839e-07, |
|
"loss": 2.052, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 9.36, |
|
"grad_norm": 2.7533364295959473, |
|
"learning_rate": 5.121579217123751e-07, |
|
"loss": 2.0698, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 9.4, |
|
"grad_norm": 2.28757643699646, |
|
"learning_rate": 4.510650550756279e-07, |
|
"loss": 2.1057, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 9.44, |
|
"grad_norm": 2.910390853881836, |
|
"learning_rate": 3.938179228456379e-07, |
|
"loss": 2.1283, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"grad_norm": 2.4329330921173096, |
|
"learning_rate": 3.4042549312087726e-07, |
|
"loss": 2.1422, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 9.48, |
|
"eval_loss": 2.176323175430298, |
|
"eval_runtime": 142.2743, |
|
"eval_samples_per_second": 6.326, |
|
"eval_steps_per_second": 3.163, |
|
"step": 2400 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 2530, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 10, |
|
"save_steps": 400, |
|
"total_flos": 3.0804047954968576e+18, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|