|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 1.0, |
|
"eval_steps": 500, |
|
"global_step": 1400, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.014285714285714285, |
|
"grad_norm": 2.0159542560577393, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 2.6872, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.02857142857142857, |
|
"grad_norm": 1.690909743309021, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 2.281, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.04285714285714286, |
|
"grad_norm": 1.8601027727127075, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 1.7851, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.05714285714285714, |
|
"grad_norm": 1.4562734365463257, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 1.3021, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.07142857142857142, |
|
"grad_norm": 1.4902580976486206, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 1.0692, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.08571428571428572, |
|
"grad_norm": 1.4558128118515015, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.9234, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"grad_norm": 1.373028039932251, |
|
"learning_rate": 0.0001, |
|
"loss": 0.8799, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.11428571428571428, |
|
"grad_norm": 1.6163955926895142, |
|
"learning_rate": 9.841269841269841e-05, |
|
"loss": 0.857, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.12857142857142856, |
|
"grad_norm": 1.4480136632919312, |
|
"learning_rate": 9.682539682539682e-05, |
|
"loss": 0.7656, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.14285714285714285, |
|
"grad_norm": 1.6265840530395508, |
|
"learning_rate": 9.523809523809524e-05, |
|
"loss": 0.7326, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.15714285714285714, |
|
"grad_norm": 1.2702994346618652, |
|
"learning_rate": 9.365079365079366e-05, |
|
"loss": 0.639, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.17142857142857143, |
|
"grad_norm": 1.4225071668624878, |
|
"learning_rate": 9.206349206349206e-05, |
|
"loss": 0.6401, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.18571428571428572, |
|
"grad_norm": 1.622852087020874, |
|
"learning_rate": 9.047619047619048e-05, |
|
"loss": 0.624, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"grad_norm": 1.6511507034301758, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.6543, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.21428571428571427, |
|
"grad_norm": 1.5115859508514404, |
|
"learning_rate": 8.730158730158731e-05, |
|
"loss": 0.6238, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.22857142857142856, |
|
"grad_norm": 1.534798502922058, |
|
"learning_rate": 8.571428571428571e-05, |
|
"loss": 0.6277, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.24285714285714285, |
|
"grad_norm": 1.3327252864837646, |
|
"learning_rate": 8.412698412698413e-05, |
|
"loss": 0.5918, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.2571428571428571, |
|
"grad_norm": 1.4269055128097534, |
|
"learning_rate": 8.253968253968255e-05, |
|
"loss": 0.5968, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.2714285714285714, |
|
"grad_norm": 1.2712342739105225, |
|
"learning_rate": 8.095238095238096e-05, |
|
"loss": 0.6326, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.2857142857142857, |
|
"grad_norm": 1.500617265701294, |
|
"learning_rate": 7.936507936507937e-05, |
|
"loss": 0.6162, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.3, |
|
"grad_norm": 1.5671484470367432, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 0.6206, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.3142857142857143, |
|
"grad_norm": 1.8047889471054077, |
|
"learning_rate": 7.619047619047618e-05, |
|
"loss": 0.612, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.32857142857142857, |
|
"grad_norm": 1.4521819353103638, |
|
"learning_rate": 7.460317460317461e-05, |
|
"loss": 0.5748, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.34285714285714286, |
|
"grad_norm": 1.6426695585250854, |
|
"learning_rate": 7.301587301587302e-05, |
|
"loss": 0.5811, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.35714285714285715, |
|
"grad_norm": 1.6789945363998413, |
|
"learning_rate": 7.142857142857143e-05, |
|
"loss": 0.5636, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.37142857142857144, |
|
"grad_norm": 1.5078628063201904, |
|
"learning_rate": 6.984126984126984e-05, |
|
"loss": 0.5618, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.38571428571428573, |
|
"grad_norm": 1.5368173122406006, |
|
"learning_rate": 6.825396825396825e-05, |
|
"loss": 0.5381, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.4, |
|
"grad_norm": 1.1794755458831787, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.528, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.4142857142857143, |
|
"grad_norm": 1.31809401512146, |
|
"learning_rate": 6.507936507936509e-05, |
|
"loss": 0.5508, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.42857142857142855, |
|
"grad_norm": 1.398769736289978, |
|
"learning_rate": 6.349206349206349e-05, |
|
"loss": 0.5435, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.44285714285714284, |
|
"grad_norm": 1.49237859249115, |
|
"learning_rate": 6.19047619047619e-05, |
|
"loss": 0.5469, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.45714285714285713, |
|
"grad_norm": 1.3688076734542847, |
|
"learning_rate": 6.0317460317460316e-05, |
|
"loss": 0.5257, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.4714285714285714, |
|
"grad_norm": 1.427947998046875, |
|
"learning_rate": 5.873015873015873e-05, |
|
"loss": 0.5247, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.4857142857142857, |
|
"grad_norm": 1.4817345142364502, |
|
"learning_rate": 5.714285714285714e-05, |
|
"loss": 0.5223, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 1.3692938089370728, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 0.5003, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 0.5142857142857142, |
|
"grad_norm": 1.4122618436813354, |
|
"learning_rate": 5.396825396825397e-05, |
|
"loss": 0.5535, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 0.5285714285714286, |
|
"grad_norm": 1.4148932695388794, |
|
"learning_rate": 5.2380952380952384e-05, |
|
"loss": 0.5262, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 0.5428571428571428, |
|
"grad_norm": 1.3666108846664429, |
|
"learning_rate": 5.0793650793650794e-05, |
|
"loss": 0.5345, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 0.5571428571428572, |
|
"grad_norm": 1.4528700113296509, |
|
"learning_rate": 4.9206349206349204e-05, |
|
"loss": 0.5136, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 0.5714285714285714, |
|
"grad_norm": 1.563751459121704, |
|
"learning_rate": 4.761904761904762e-05, |
|
"loss": 0.5196, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 0.5857142857142857, |
|
"grad_norm": 1.4110444784164429, |
|
"learning_rate": 4.603174603174603e-05, |
|
"loss": 0.5281, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 0.6, |
|
"grad_norm": 1.769435167312622, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.5482, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 0.6142857142857143, |
|
"grad_norm": 1.4131271839141846, |
|
"learning_rate": 4.2857142857142856e-05, |
|
"loss": 0.5366, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 0.6285714285714286, |
|
"grad_norm": 1.459429144859314, |
|
"learning_rate": 4.126984126984127e-05, |
|
"loss": 0.5013, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 0.6428571428571429, |
|
"grad_norm": 1.6418342590332031, |
|
"learning_rate": 3.968253968253968e-05, |
|
"loss": 0.5086, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 0.6571428571428571, |
|
"grad_norm": 1.6576707363128662, |
|
"learning_rate": 3.809523809523809e-05, |
|
"loss": 0.5244, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 0.6714285714285714, |
|
"grad_norm": 1.6548312902450562, |
|
"learning_rate": 3.650793650793651e-05, |
|
"loss": 0.4733, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 0.6857142857142857, |
|
"grad_norm": 1.6226400136947632, |
|
"learning_rate": 3.492063492063492e-05, |
|
"loss": 0.5237, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"grad_norm": 1.4631938934326172, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.5316, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 0.7142857142857143, |
|
"grad_norm": 1.3383113145828247, |
|
"learning_rate": 3.1746031746031745e-05, |
|
"loss": 0.501, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.7285714285714285, |
|
"grad_norm": 1.4526844024658203, |
|
"learning_rate": 3.0158730158730158e-05, |
|
"loss": 0.5204, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 0.7428571428571429, |
|
"grad_norm": 1.2494806051254272, |
|
"learning_rate": 2.857142857142857e-05, |
|
"loss": 0.4956, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 0.7571428571428571, |
|
"grad_norm": 1.5174272060394287, |
|
"learning_rate": 2.6984126984126984e-05, |
|
"loss": 0.4821, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 0.7714285714285715, |
|
"grad_norm": 1.4890474081039429, |
|
"learning_rate": 2.5396825396825397e-05, |
|
"loss": 0.482, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 0.7857142857142857, |
|
"grad_norm": 1.7602328062057495, |
|
"learning_rate": 2.380952380952381e-05, |
|
"loss": 0.4809, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 1.3647661209106445, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.4983, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 0.8142857142857143, |
|
"grad_norm": 1.386948585510254, |
|
"learning_rate": 2.0634920634920636e-05, |
|
"loss": 0.4851, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 0.8285714285714286, |
|
"grad_norm": 1.5154744386672974, |
|
"learning_rate": 1.9047619047619046e-05, |
|
"loss": 0.4645, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 0.8428571428571429, |
|
"grad_norm": 1.5008195638656616, |
|
"learning_rate": 1.746031746031746e-05, |
|
"loss": 0.4867, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 0.8571428571428571, |
|
"grad_norm": 1.5143955945968628, |
|
"learning_rate": 1.5873015873015872e-05, |
|
"loss": 0.4727, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 0.8714285714285714, |
|
"grad_norm": 1.4292445182800293, |
|
"learning_rate": 1.4285714285714285e-05, |
|
"loss": 0.477, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 0.8857142857142857, |
|
"grad_norm": 1.619745135307312, |
|
"learning_rate": 1.2698412698412699e-05, |
|
"loss": 0.4788, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"grad_norm": 1.4151842594146729, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.4824, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 0.9142857142857143, |
|
"grad_norm": 1.4590322971343994, |
|
"learning_rate": 9.523809523809523e-06, |
|
"loss": 0.476, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 0.9285714285714286, |
|
"grad_norm": 1.579836130142212, |
|
"learning_rate": 7.936507936507936e-06, |
|
"loss": 0.458, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 0.9428571428571428, |
|
"grad_norm": 1.5796328783035278, |
|
"learning_rate": 6.349206349206349e-06, |
|
"loss": 0.4921, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 0.9571428571428572, |
|
"grad_norm": 1.5328384637832642, |
|
"learning_rate": 4.7619047619047615e-06, |
|
"loss": 0.4796, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 0.9714285714285714, |
|
"grad_norm": 1.5657676458358765, |
|
"learning_rate": 3.1746031746031746e-06, |
|
"loss": 0.494, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 0.9857142857142858, |
|
"grad_norm": 1.5250962972640991, |
|
"learning_rate": 1.5873015873015873e-06, |
|
"loss": 0.4833, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.6834768056869507, |
|
"learning_rate": 0.0, |
|
"loss": 0.4744, |
|
"step": 1400 |
|
} |
|
], |
|
"logging_steps": 20, |
|
"max_steps": 1400, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 8419093040332800.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|