|
{ |
|
"best_metric": 1.2004581689834595, |
|
"best_model_checkpoint": "distilbert_base_train_book_v2/checkpoint-350000", |
|
"epoch": 25.0, |
|
"eval_steps": 10000, |
|
"global_step": 355850, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0351271603203597, |
|
"grad_norm": 1.9653347730636597, |
|
"learning_rate": 5e-06, |
|
"loss": 8.8875, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.0702543206407194, |
|
"grad_norm": 0.7303386926651001, |
|
"learning_rate": 1e-05, |
|
"loss": 6.5828, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 0.1053814809610791, |
|
"grad_norm": 0.6638388633728027, |
|
"learning_rate": 1.5e-05, |
|
"loss": 6.0119, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 0.1405086412814388, |
|
"grad_norm": 0.693476140499115, |
|
"learning_rate": 2e-05, |
|
"loss": 5.8991, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 0.1756358016017985, |
|
"grad_norm": 0.6988317370414734, |
|
"learning_rate": 2.5e-05, |
|
"loss": 5.8401, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 0.2107629619221582, |
|
"grad_norm": 0.5958765149116516, |
|
"learning_rate": 3e-05, |
|
"loss": 5.7955, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 0.2458901222425179, |
|
"grad_norm": 0.4958369731903076, |
|
"learning_rate": 3.5e-05, |
|
"loss": 5.7665, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 0.2810172825628776, |
|
"grad_norm": 0.4899258613586426, |
|
"learning_rate": 4e-05, |
|
"loss": 5.7418, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 0.31614444288323734, |
|
"grad_norm": 0.5407448410987854, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.7212, |
|
"step": 4500 |
|
}, |
|
{ |
|
"epoch": 0.351271603203597, |
|
"grad_norm": 0.5828524231910706, |
|
"learning_rate": 5e-05, |
|
"loss": 5.7049, |
|
"step": 5000 |
|
}, |
|
{ |
|
"epoch": 0.38639876352395675, |
|
"grad_norm": 0.5220353007316589, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.6856, |
|
"step": 5500 |
|
}, |
|
{ |
|
"epoch": 0.4215259238443164, |
|
"grad_norm": 0.4556717574596405, |
|
"learning_rate": 6e-05, |
|
"loss": 5.6762, |
|
"step": 6000 |
|
}, |
|
{ |
|
"epoch": 0.45665308416467615, |
|
"grad_norm": 0.49513307213783264, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 5.6635, |
|
"step": 6500 |
|
}, |
|
{ |
|
"epoch": 0.4917802444850358, |
|
"grad_norm": 0.45069605112075806, |
|
"learning_rate": 7e-05, |
|
"loss": 5.6566, |
|
"step": 7000 |
|
}, |
|
{ |
|
"epoch": 0.5269074048053956, |
|
"grad_norm": 0.5079524517059326, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 5.6468, |
|
"step": 7500 |
|
}, |
|
{ |
|
"epoch": 0.5620345651257552, |
|
"grad_norm": 0.5765870809555054, |
|
"learning_rate": 8e-05, |
|
"loss": 5.6365, |
|
"step": 8000 |
|
}, |
|
{ |
|
"epoch": 0.5971617254461149, |
|
"grad_norm": 0.47896265983581543, |
|
"learning_rate": 8.5e-05, |
|
"loss": 5.627, |
|
"step": 8500 |
|
}, |
|
{ |
|
"epoch": 0.6322888857664747, |
|
"grad_norm": 0.6472557783126831, |
|
"learning_rate": 9e-05, |
|
"loss": 5.6161, |
|
"step": 9000 |
|
}, |
|
{ |
|
"epoch": 0.6674160460868344, |
|
"grad_norm": 0.43458178639411926, |
|
"learning_rate": 9.5e-05, |
|
"loss": 5.6111, |
|
"step": 9500 |
|
}, |
|
{ |
|
"epoch": 0.702543206407194, |
|
"grad_norm": 0.4662564694881439, |
|
"learning_rate": 0.0001, |
|
"loss": 5.604, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.702543206407194, |
|
"eval_accuracy": 0.16497299638082222, |
|
"eval_loss": 5.45039176940918, |
|
"eval_runtime": 143.2482, |
|
"eval_samples_per_second": 842.635, |
|
"eval_steps_per_second": 5.271, |
|
"step": 10000 |
|
}, |
|
{ |
|
"epoch": 0.7376703667275537, |
|
"grad_norm": 0.5193693041801453, |
|
"learning_rate": 9.985542865404077e-05, |
|
"loss": 5.5963, |
|
"step": 10500 |
|
}, |
|
{ |
|
"epoch": 0.7727975270479135, |
|
"grad_norm": 0.41712281107902527, |
|
"learning_rate": 9.971085730808155e-05, |
|
"loss": 5.5891, |
|
"step": 11000 |
|
}, |
|
{ |
|
"epoch": 0.8079246873682732, |
|
"grad_norm": 0.4692609906196594, |
|
"learning_rate": 9.956628596212231e-05, |
|
"loss": 5.5854, |
|
"step": 11500 |
|
}, |
|
{ |
|
"epoch": 0.8430518476886328, |
|
"grad_norm": 0.4751366376876831, |
|
"learning_rate": 9.942171461616309e-05, |
|
"loss": 5.5763, |
|
"step": 12000 |
|
}, |
|
{ |
|
"epoch": 0.8781790080089925, |
|
"grad_norm": 0.4744866192340851, |
|
"learning_rate": 9.927714327020385e-05, |
|
"loss": 5.5709, |
|
"step": 12500 |
|
}, |
|
{ |
|
"epoch": 0.9133061683293523, |
|
"grad_norm": 0.4810524582862854, |
|
"learning_rate": 9.913257192424462e-05, |
|
"loss": 5.5608, |
|
"step": 13000 |
|
}, |
|
{ |
|
"epoch": 0.948433328649712, |
|
"grad_norm": 0.5423319935798645, |
|
"learning_rate": 9.89880005782854e-05, |
|
"loss": 5.5541, |
|
"step": 13500 |
|
}, |
|
{ |
|
"epoch": 0.9835604889700716, |
|
"grad_norm": 0.4694032371044159, |
|
"learning_rate": 9.884342923232616e-05, |
|
"loss": 5.5498, |
|
"step": 14000 |
|
}, |
|
{ |
|
"epoch": 1.0186876492904313, |
|
"grad_norm": 0.44750478863716125, |
|
"learning_rate": 9.869885788636692e-05, |
|
"loss": 5.5414, |
|
"step": 14500 |
|
}, |
|
{ |
|
"epoch": 1.053814809610791, |
|
"grad_norm": 0.4432881772518158, |
|
"learning_rate": 9.855428654040769e-05, |
|
"loss": 5.5374, |
|
"step": 15000 |
|
}, |
|
{ |
|
"epoch": 1.0889419699311507, |
|
"grad_norm": 0.4728132486343384, |
|
"learning_rate": 9.840971519444846e-05, |
|
"loss": 5.5297, |
|
"step": 15500 |
|
}, |
|
{ |
|
"epoch": 1.1240691302515105, |
|
"grad_norm": 0.48674553632736206, |
|
"learning_rate": 9.826514384848924e-05, |
|
"loss": 5.5254, |
|
"step": 16000 |
|
}, |
|
{ |
|
"epoch": 1.1591962905718702, |
|
"grad_norm": 0.5000836849212646, |
|
"learning_rate": 9.812057250253e-05, |
|
"loss": 5.52, |
|
"step": 16500 |
|
}, |
|
{ |
|
"epoch": 1.1943234508922298, |
|
"grad_norm": 0.43097734451293945, |
|
"learning_rate": 9.797600115657077e-05, |
|
"loss": 5.5166, |
|
"step": 17000 |
|
}, |
|
{ |
|
"epoch": 1.2294506112125896, |
|
"grad_norm": 0.4628123342990875, |
|
"learning_rate": 9.783142981061155e-05, |
|
"loss": 5.5085, |
|
"step": 17500 |
|
}, |
|
{ |
|
"epoch": 1.2645777715329491, |
|
"grad_norm": 0.46364983916282654, |
|
"learning_rate": 9.768685846465231e-05, |
|
"loss": 5.507, |
|
"step": 18000 |
|
}, |
|
{ |
|
"epoch": 1.299704931853309, |
|
"grad_norm": 0.6745333671569824, |
|
"learning_rate": 9.754228711869307e-05, |
|
"loss": 5.4862, |
|
"step": 18500 |
|
}, |
|
{ |
|
"epoch": 1.3348320921736687, |
|
"grad_norm": 0.9819153547286987, |
|
"learning_rate": 9.739771577273384e-05, |
|
"loss": 5.3901, |
|
"step": 19000 |
|
}, |
|
{ |
|
"epoch": 1.3699592524940285, |
|
"grad_norm": 2.0434741973876953, |
|
"learning_rate": 9.725314442677463e-05, |
|
"loss": 5.1978, |
|
"step": 19500 |
|
}, |
|
{ |
|
"epoch": 1.405086412814388, |
|
"grad_norm": 2.197896957397461, |
|
"learning_rate": 9.71085730808154e-05, |
|
"loss": 4.6179, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.405086412814388, |
|
"eval_accuracy": 0.3758464833297413, |
|
"eval_loss": 3.827747344970703, |
|
"eval_runtime": 146.1003, |
|
"eval_samples_per_second": 826.186, |
|
"eval_steps_per_second": 5.168, |
|
"step": 20000 |
|
}, |
|
{ |
|
"epoch": 1.4402135731347478, |
|
"grad_norm": 1.750232219696045, |
|
"learning_rate": 9.696400173485616e-05, |
|
"loss": 4.0652, |
|
"step": 20500 |
|
}, |
|
{ |
|
"epoch": 1.4753407334551074, |
|
"grad_norm": 1.4147313833236694, |
|
"learning_rate": 9.681943038889692e-05, |
|
"loss": 3.6557, |
|
"step": 21000 |
|
}, |
|
{ |
|
"epoch": 1.5104678937754672, |
|
"grad_norm": 1.1749171018600464, |
|
"learning_rate": 9.66748590429377e-05, |
|
"loss": 3.3421, |
|
"step": 21500 |
|
}, |
|
{ |
|
"epoch": 1.545595054095827, |
|
"grad_norm": 1.1297008991241455, |
|
"learning_rate": 9.653028769697846e-05, |
|
"loss": 3.1555, |
|
"step": 22000 |
|
}, |
|
{ |
|
"epoch": 1.5807222144161865, |
|
"grad_norm": 1.0993692874908447, |
|
"learning_rate": 9.638571635101923e-05, |
|
"loss": 3.0326, |
|
"step": 22500 |
|
}, |
|
{ |
|
"epoch": 1.6158493747365463, |
|
"grad_norm": 1.1023796796798706, |
|
"learning_rate": 9.624114500506e-05, |
|
"loss": 2.9348, |
|
"step": 23000 |
|
}, |
|
{ |
|
"epoch": 1.650976535056906, |
|
"grad_norm": 1.036067008972168, |
|
"learning_rate": 9.609657365910077e-05, |
|
"loss": 2.8555, |
|
"step": 23500 |
|
}, |
|
{ |
|
"epoch": 1.6861036953772657, |
|
"grad_norm": 1.0167666673660278, |
|
"learning_rate": 9.595200231314155e-05, |
|
"loss": 2.7872, |
|
"step": 24000 |
|
}, |
|
{ |
|
"epoch": 1.7212308556976255, |
|
"grad_norm": 1.0188016891479492, |
|
"learning_rate": 9.580743096718231e-05, |
|
"loss": 2.7329, |
|
"step": 24500 |
|
}, |
|
{ |
|
"epoch": 1.7563580160179852, |
|
"grad_norm": 1.0286072492599487, |
|
"learning_rate": 9.566285962122307e-05, |
|
"loss": 2.6825, |
|
"step": 25000 |
|
}, |
|
{ |
|
"epoch": 1.7914851763383448, |
|
"grad_norm": 0.9929981827735901, |
|
"learning_rate": 9.551828827526385e-05, |
|
"loss": 2.6381, |
|
"step": 25500 |
|
}, |
|
{ |
|
"epoch": 1.8266123366587044, |
|
"grad_norm": 0.9288387894630432, |
|
"learning_rate": 9.537371692930462e-05, |
|
"loss": 2.5937, |
|
"step": 26000 |
|
}, |
|
{ |
|
"epoch": 1.8617394969790642, |
|
"grad_norm": 0.9797139763832092, |
|
"learning_rate": 9.522914558334538e-05, |
|
"loss": 2.5524, |
|
"step": 26500 |
|
}, |
|
{ |
|
"epoch": 1.896866657299424, |
|
"grad_norm": 0.9662619233131409, |
|
"learning_rate": 9.508457423738616e-05, |
|
"loss": 2.519, |
|
"step": 27000 |
|
}, |
|
{ |
|
"epoch": 1.9319938176197837, |
|
"grad_norm": 0.9594979286193848, |
|
"learning_rate": 9.494000289142692e-05, |
|
"loss": 2.4854, |
|
"step": 27500 |
|
}, |
|
{ |
|
"epoch": 1.9671209779401433, |
|
"grad_norm": 0.9515316486358643, |
|
"learning_rate": 9.47954315454677e-05, |
|
"loss": 2.4515, |
|
"step": 28000 |
|
}, |
|
{ |
|
"epoch": 2.002248138260503, |
|
"grad_norm": 0.9741261601448059, |
|
"learning_rate": 9.465086019950846e-05, |
|
"loss": 2.4314, |
|
"step": 28500 |
|
}, |
|
{ |
|
"epoch": 2.0373752985808626, |
|
"grad_norm": 0.9476631283760071, |
|
"learning_rate": 9.450628885354923e-05, |
|
"loss": 2.3975, |
|
"step": 29000 |
|
}, |
|
{ |
|
"epoch": 2.0725024589012224, |
|
"grad_norm": 0.9616317749023438, |
|
"learning_rate": 9.436171750758999e-05, |
|
"loss": 2.3707, |
|
"step": 29500 |
|
}, |
|
{ |
|
"epoch": 2.107629619221582, |
|
"grad_norm": 0.9059496521949768, |
|
"learning_rate": 9.421714616163077e-05, |
|
"loss": 2.353, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.107629619221582, |
|
"eval_accuracy": 0.5933188090938791, |
|
"eval_loss": 2.040786027908325, |
|
"eval_runtime": 146.0422, |
|
"eval_samples_per_second": 826.515, |
|
"eval_steps_per_second": 5.17, |
|
"step": 30000 |
|
}, |
|
{ |
|
"epoch": 2.142756779541942, |
|
"grad_norm": 0.9514384865760803, |
|
"learning_rate": 9.407257481567155e-05, |
|
"loss": 2.3312, |
|
"step": 30500 |
|
}, |
|
{ |
|
"epoch": 2.1778839398623013, |
|
"grad_norm": 0.9169853925704956, |
|
"learning_rate": 9.392800346971231e-05, |
|
"loss": 2.3091, |
|
"step": 31000 |
|
}, |
|
{ |
|
"epoch": 2.213011100182661, |
|
"grad_norm": 0.9202592968940735, |
|
"learning_rate": 9.378343212375307e-05, |
|
"loss": 2.2937, |
|
"step": 31500 |
|
}, |
|
{ |
|
"epoch": 2.248138260503021, |
|
"grad_norm": 0.9011738300323486, |
|
"learning_rate": 9.363886077779385e-05, |
|
"loss": 2.2759, |
|
"step": 32000 |
|
}, |
|
{ |
|
"epoch": 2.2832654208233807, |
|
"grad_norm": 0.8867260217666626, |
|
"learning_rate": 9.349428943183462e-05, |
|
"loss": 2.2593, |
|
"step": 32500 |
|
}, |
|
{ |
|
"epoch": 2.3183925811437405, |
|
"grad_norm": 0.9071447253227234, |
|
"learning_rate": 9.334971808587538e-05, |
|
"loss": 2.2402, |
|
"step": 33000 |
|
}, |
|
{ |
|
"epoch": 2.3535197414641003, |
|
"grad_norm": 0.9013161063194275, |
|
"learning_rate": 9.320514673991614e-05, |
|
"loss": 2.2255, |
|
"step": 33500 |
|
}, |
|
{ |
|
"epoch": 2.3886469017844596, |
|
"grad_norm": 0.8916395306587219, |
|
"learning_rate": 9.306057539395692e-05, |
|
"loss": 2.2123, |
|
"step": 34000 |
|
}, |
|
{ |
|
"epoch": 2.4237740621048194, |
|
"grad_norm": 0.8660832047462463, |
|
"learning_rate": 9.29160040479977e-05, |
|
"loss": 2.1955, |
|
"step": 34500 |
|
}, |
|
{ |
|
"epoch": 2.458901222425179, |
|
"grad_norm": 0.8876006603240967, |
|
"learning_rate": 9.277143270203846e-05, |
|
"loss": 2.1837, |
|
"step": 35000 |
|
}, |
|
{ |
|
"epoch": 2.494028382745539, |
|
"grad_norm": 0.8954652547836304, |
|
"learning_rate": 9.262686135607923e-05, |
|
"loss": 2.1732, |
|
"step": 35500 |
|
}, |
|
{ |
|
"epoch": 2.5291555430658983, |
|
"grad_norm": 0.9105496406555176, |
|
"learning_rate": 9.248229001012e-05, |
|
"loss": 2.1599, |
|
"step": 36000 |
|
}, |
|
{ |
|
"epoch": 2.564282703386258, |
|
"grad_norm": 0.8876945972442627, |
|
"learning_rate": 9.233771866416077e-05, |
|
"loss": 2.1472, |
|
"step": 36500 |
|
}, |
|
{ |
|
"epoch": 2.599409863706618, |
|
"grad_norm": 0.8663467168807983, |
|
"learning_rate": 9.219314731820153e-05, |
|
"loss": 2.1345, |
|
"step": 37000 |
|
}, |
|
{ |
|
"epoch": 2.6345370240269776, |
|
"grad_norm": 0.9176456928253174, |
|
"learning_rate": 9.20485759722423e-05, |
|
"loss": 2.1258, |
|
"step": 37500 |
|
}, |
|
{ |
|
"epoch": 2.6696641843473374, |
|
"grad_norm": 0.9134395718574524, |
|
"learning_rate": 9.190400462628307e-05, |
|
"loss": 2.1125, |
|
"step": 38000 |
|
}, |
|
{ |
|
"epoch": 2.704791344667697, |
|
"grad_norm": 0.8617592453956604, |
|
"learning_rate": 9.175943328032385e-05, |
|
"loss": 2.1041, |
|
"step": 38500 |
|
}, |
|
{ |
|
"epoch": 2.739918504988057, |
|
"grad_norm": 0.8791117668151855, |
|
"learning_rate": 9.161486193436461e-05, |
|
"loss": 2.0931, |
|
"step": 39000 |
|
}, |
|
{ |
|
"epoch": 2.7750456653084163, |
|
"grad_norm": 0.8784831762313843, |
|
"learning_rate": 9.147029058840538e-05, |
|
"loss": 2.0854, |
|
"step": 39500 |
|
}, |
|
{ |
|
"epoch": 2.810172825628776, |
|
"grad_norm": 0.8679407238960266, |
|
"learning_rate": 9.132571924244616e-05, |
|
"loss": 2.0739, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.810172825628776, |
|
"eval_accuracy": 0.6315697031283101, |
|
"eval_loss": 1.7932097911834717, |
|
"eval_runtime": 144.409, |
|
"eval_samples_per_second": 835.862, |
|
"eval_steps_per_second": 5.228, |
|
"step": 40000 |
|
}, |
|
{ |
|
"epoch": 2.845299985949136, |
|
"grad_norm": 0.8974806070327759, |
|
"learning_rate": 9.118114789648692e-05, |
|
"loss": 2.0627, |
|
"step": 40500 |
|
}, |
|
{ |
|
"epoch": 2.8804271462694957, |
|
"grad_norm": 0.8850300312042236, |
|
"learning_rate": 9.103657655052768e-05, |
|
"loss": 2.0544, |
|
"step": 41000 |
|
}, |
|
{ |
|
"epoch": 2.915554306589855, |
|
"grad_norm": 0.8605426549911499, |
|
"learning_rate": 9.089200520456845e-05, |
|
"loss": 2.0464, |
|
"step": 41500 |
|
}, |
|
{ |
|
"epoch": 2.950681466910215, |
|
"grad_norm": 0.8664257526397705, |
|
"learning_rate": 9.074743385860923e-05, |
|
"loss": 2.0404, |
|
"step": 42000 |
|
}, |
|
{ |
|
"epoch": 2.9858086272305746, |
|
"grad_norm": 0.8507780432701111, |
|
"learning_rate": 9.060286251265e-05, |
|
"loss": 2.0325, |
|
"step": 42500 |
|
}, |
|
{ |
|
"epoch": 3.0209357875509344, |
|
"grad_norm": 0.8595208525657654, |
|
"learning_rate": 9.045829116669077e-05, |
|
"loss": 2.018, |
|
"step": 43000 |
|
}, |
|
{ |
|
"epoch": 3.056062947871294, |
|
"grad_norm": 0.8866372108459473, |
|
"learning_rate": 9.031371982073153e-05, |
|
"loss": 2.0135, |
|
"step": 43500 |
|
}, |
|
{ |
|
"epoch": 3.091190108191654, |
|
"grad_norm": 0.8739855289459229, |
|
"learning_rate": 9.016914847477231e-05, |
|
"loss": 2.0101, |
|
"step": 44000 |
|
}, |
|
{ |
|
"epoch": 3.1263172685120133, |
|
"grad_norm": 0.8769083619117737, |
|
"learning_rate": 9.002457712881307e-05, |
|
"loss": 2.0, |
|
"step": 44500 |
|
}, |
|
{ |
|
"epoch": 3.161444428832373, |
|
"grad_norm": 0.8757010102272034, |
|
"learning_rate": 8.988000578285384e-05, |
|
"loss": 1.9949, |
|
"step": 45000 |
|
}, |
|
{ |
|
"epoch": 3.196571589152733, |
|
"grad_norm": 0.8755298256874084, |
|
"learning_rate": 8.973543443689461e-05, |
|
"loss": 1.9849, |
|
"step": 45500 |
|
}, |
|
{ |
|
"epoch": 3.2316987494730927, |
|
"grad_norm": 0.8611502647399902, |
|
"learning_rate": 8.959086309093538e-05, |
|
"loss": 1.9821, |
|
"step": 46000 |
|
}, |
|
{ |
|
"epoch": 3.2668259097934524, |
|
"grad_norm": 0.8552160859107971, |
|
"learning_rate": 8.944629174497616e-05, |
|
"loss": 1.9738, |
|
"step": 46500 |
|
}, |
|
{ |
|
"epoch": 3.301953070113812, |
|
"grad_norm": 0.8645509481430054, |
|
"learning_rate": 8.930172039901692e-05, |
|
"loss": 1.9678, |
|
"step": 47000 |
|
}, |
|
{ |
|
"epoch": 3.3370802304341716, |
|
"grad_norm": 0.8320329189300537, |
|
"learning_rate": 8.915714905305768e-05, |
|
"loss": 1.9614, |
|
"step": 47500 |
|
}, |
|
{ |
|
"epoch": 3.3722073907545314, |
|
"grad_norm": 0.8820323944091797, |
|
"learning_rate": 8.901257770709845e-05, |
|
"loss": 1.9578, |
|
"step": 48000 |
|
}, |
|
{ |
|
"epoch": 3.407334551074891, |
|
"grad_norm": 0.847457230091095, |
|
"learning_rate": 8.886800636113923e-05, |
|
"loss": 1.9494, |
|
"step": 48500 |
|
}, |
|
{ |
|
"epoch": 3.442461711395251, |
|
"grad_norm": 0.858734667301178, |
|
"learning_rate": 8.872343501518e-05, |
|
"loss": 1.942, |
|
"step": 49000 |
|
}, |
|
{ |
|
"epoch": 3.4775888717156107, |
|
"grad_norm": 0.8552329540252686, |
|
"learning_rate": 8.857886366922077e-05, |
|
"loss": 1.9367, |
|
"step": 49500 |
|
}, |
|
{ |
|
"epoch": 3.51271603203597, |
|
"grad_norm": 0.8536255359649658, |
|
"learning_rate": 8.843429232326153e-05, |
|
"loss": 1.9345, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.51271603203597, |
|
"eval_accuracy": 0.652718528822783, |
|
"eval_loss": 1.6618030071258545, |
|
"eval_runtime": 145.7638, |
|
"eval_samples_per_second": 828.093, |
|
"eval_steps_per_second": 5.18, |
|
"step": 50000 |
|
}, |
|
{ |
|
"epoch": 3.54784319235633, |
|
"grad_norm": 0.8829108476638794, |
|
"learning_rate": 8.828972097730231e-05, |
|
"loss": 1.9303, |
|
"step": 50500 |
|
}, |
|
{ |
|
"epoch": 3.5829703526766896, |
|
"grad_norm": 0.8787323236465454, |
|
"learning_rate": 8.814514963134307e-05, |
|
"loss": 1.9231, |
|
"step": 51000 |
|
}, |
|
{ |
|
"epoch": 3.6180975129970494, |
|
"grad_norm": 0.8392846584320068, |
|
"learning_rate": 8.800057828538384e-05, |
|
"loss": 1.9205, |
|
"step": 51500 |
|
}, |
|
{ |
|
"epoch": 3.6532246733174087, |
|
"grad_norm": 0.8578284382820129, |
|
"learning_rate": 8.78560069394246e-05, |
|
"loss": 1.9147, |
|
"step": 52000 |
|
}, |
|
{ |
|
"epoch": 3.6883518336377685, |
|
"grad_norm": 0.8548659086227417, |
|
"learning_rate": 8.771143559346538e-05, |
|
"loss": 1.9079, |
|
"step": 52500 |
|
}, |
|
{ |
|
"epoch": 3.7234789939581283, |
|
"grad_norm": 0.8397179245948792, |
|
"learning_rate": 8.756686424750616e-05, |
|
"loss": 1.9064, |
|
"step": 53000 |
|
}, |
|
{ |
|
"epoch": 3.758606154278488, |
|
"grad_norm": 0.886605978012085, |
|
"learning_rate": 8.742229290154692e-05, |
|
"loss": 1.9006, |
|
"step": 53500 |
|
}, |
|
{ |
|
"epoch": 3.793733314598848, |
|
"grad_norm": 0.8749421834945679, |
|
"learning_rate": 8.727772155558768e-05, |
|
"loss": 1.894, |
|
"step": 54000 |
|
}, |
|
{ |
|
"epoch": 3.8288604749192077, |
|
"grad_norm": 0.8433464765548706, |
|
"learning_rate": 8.713315020962846e-05, |
|
"loss": 1.8917, |
|
"step": 54500 |
|
}, |
|
{ |
|
"epoch": 3.8639876352395675, |
|
"grad_norm": 0.8542115092277527, |
|
"learning_rate": 8.698857886366923e-05, |
|
"loss": 1.8846, |
|
"step": 55000 |
|
}, |
|
{ |
|
"epoch": 3.899114795559927, |
|
"grad_norm": 0.8477907776832581, |
|
"learning_rate": 8.684400751770999e-05, |
|
"loss": 1.88, |
|
"step": 55500 |
|
}, |
|
{ |
|
"epoch": 3.9342419558802866, |
|
"grad_norm": 0.8515335321426392, |
|
"learning_rate": 8.669943617175075e-05, |
|
"loss": 1.8768, |
|
"step": 56000 |
|
}, |
|
{ |
|
"epoch": 3.9693691162006464, |
|
"grad_norm": 0.8547754287719727, |
|
"learning_rate": 8.655486482579153e-05, |
|
"loss": 1.8742, |
|
"step": 56500 |
|
}, |
|
{ |
|
"epoch": 4.004496276521006, |
|
"grad_norm": 0.8512959480285645, |
|
"learning_rate": 8.641029347983231e-05, |
|
"loss": 1.868, |
|
"step": 57000 |
|
}, |
|
{ |
|
"epoch": 4.0396234368413655, |
|
"grad_norm": 0.8635633587837219, |
|
"learning_rate": 8.626572213387307e-05, |
|
"loss": 1.8603, |
|
"step": 57500 |
|
}, |
|
{ |
|
"epoch": 4.074750597161725, |
|
"grad_norm": 0.8565309643745422, |
|
"learning_rate": 8.612115078791384e-05, |
|
"loss": 1.858, |
|
"step": 58000 |
|
}, |
|
{ |
|
"epoch": 4.109877757482085, |
|
"grad_norm": 0.8473580479621887, |
|
"learning_rate": 8.597657944195461e-05, |
|
"loss": 1.8543, |
|
"step": 58500 |
|
}, |
|
{ |
|
"epoch": 4.145004917802445, |
|
"grad_norm": 0.8602383732795715, |
|
"learning_rate": 8.583200809599538e-05, |
|
"loss": 1.8505, |
|
"step": 59000 |
|
}, |
|
{ |
|
"epoch": 4.180132078122805, |
|
"grad_norm": 0.8420585989952087, |
|
"learning_rate": 8.568743675003614e-05, |
|
"loss": 1.8507, |
|
"step": 59500 |
|
}, |
|
{ |
|
"epoch": 4.215259238443164, |
|
"grad_norm": 0.8763858079910278, |
|
"learning_rate": 8.55428654040769e-05, |
|
"loss": 1.844, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.215259238443164, |
|
"eval_accuracy": 0.6653352124865598, |
|
"eval_loss": 1.5828626155853271, |
|
"eval_runtime": 145.5436, |
|
"eval_samples_per_second": 829.346, |
|
"eval_steps_per_second": 5.187, |
|
"step": 60000 |
|
}, |
|
{ |
|
"epoch": 4.250386398763524, |
|
"grad_norm": 0.8648141026496887, |
|
"learning_rate": 8.539829405811768e-05, |
|
"loss": 1.8439, |
|
"step": 60500 |
|
}, |
|
{ |
|
"epoch": 4.285513559083884, |
|
"grad_norm": 0.8564465045928955, |
|
"learning_rate": 8.525372271215846e-05, |
|
"loss": 1.8372, |
|
"step": 61000 |
|
}, |
|
{ |
|
"epoch": 4.320640719404244, |
|
"grad_norm": 0.8347111344337463, |
|
"learning_rate": 8.510915136619923e-05, |
|
"loss": 1.8322, |
|
"step": 61500 |
|
}, |
|
{ |
|
"epoch": 4.355767879724603, |
|
"grad_norm": 0.8819388747215271, |
|
"learning_rate": 8.496458002023999e-05, |
|
"loss": 1.8299, |
|
"step": 62000 |
|
}, |
|
{ |
|
"epoch": 4.3908950400449624, |
|
"grad_norm": 0.8561258316040039, |
|
"learning_rate": 8.482000867428075e-05, |
|
"loss": 1.8319, |
|
"step": 62500 |
|
}, |
|
{ |
|
"epoch": 4.426022200365322, |
|
"grad_norm": 0.8755387663841248, |
|
"learning_rate": 8.467543732832153e-05, |
|
"loss": 1.827, |
|
"step": 63000 |
|
}, |
|
{ |
|
"epoch": 4.461149360685682, |
|
"grad_norm": 0.8702017068862915, |
|
"learning_rate": 8.45308659823623e-05, |
|
"loss": 1.8213, |
|
"step": 63500 |
|
}, |
|
{ |
|
"epoch": 4.496276521006042, |
|
"grad_norm": 0.8973068594932556, |
|
"learning_rate": 8.438629463640307e-05, |
|
"loss": 1.819, |
|
"step": 64000 |
|
}, |
|
{ |
|
"epoch": 4.531403681326402, |
|
"grad_norm": 0.8871073126792908, |
|
"learning_rate": 8.424172329044384e-05, |
|
"loss": 1.8166, |
|
"step": 64500 |
|
}, |
|
{ |
|
"epoch": 4.566530841646761, |
|
"grad_norm": 0.8684224486351013, |
|
"learning_rate": 8.409715194448461e-05, |
|
"loss": 1.8129, |
|
"step": 65000 |
|
}, |
|
{ |
|
"epoch": 4.601658001967121, |
|
"grad_norm": 0.8582248687744141, |
|
"learning_rate": 8.395258059852538e-05, |
|
"loss": 1.8113, |
|
"step": 65500 |
|
}, |
|
{ |
|
"epoch": 4.636785162287481, |
|
"grad_norm": 0.843012809753418, |
|
"learning_rate": 8.380800925256614e-05, |
|
"loss": 1.807, |
|
"step": 66000 |
|
}, |
|
{ |
|
"epoch": 4.671912322607841, |
|
"grad_norm": 0.8517187237739563, |
|
"learning_rate": 8.36634379066069e-05, |
|
"loss": 1.8064, |
|
"step": 66500 |
|
}, |
|
{ |
|
"epoch": 4.7070394829282005, |
|
"grad_norm": 0.8724709153175354, |
|
"learning_rate": 8.351886656064768e-05, |
|
"loss": 1.8003, |
|
"step": 67000 |
|
}, |
|
{ |
|
"epoch": 4.742166643248559, |
|
"grad_norm": 0.8867350220680237, |
|
"learning_rate": 8.337429521468846e-05, |
|
"loss": 1.799, |
|
"step": 67500 |
|
}, |
|
{ |
|
"epoch": 4.777293803568919, |
|
"grad_norm": 0.8441566824913025, |
|
"learning_rate": 8.322972386872923e-05, |
|
"loss": 1.7979, |
|
"step": 68000 |
|
}, |
|
{ |
|
"epoch": 4.812420963889279, |
|
"grad_norm": 0.8734049201011658, |
|
"learning_rate": 8.308515252276999e-05, |
|
"loss": 1.7957, |
|
"step": 68500 |
|
}, |
|
{ |
|
"epoch": 4.847548124209639, |
|
"grad_norm": 0.8671762347221375, |
|
"learning_rate": 8.294058117681077e-05, |
|
"loss": 1.789, |
|
"step": 69000 |
|
}, |
|
{ |
|
"epoch": 4.8826752845299985, |
|
"grad_norm": 0.8785460591316223, |
|
"learning_rate": 8.279600983085153e-05, |
|
"loss": 1.7892, |
|
"step": 69500 |
|
}, |
|
{ |
|
"epoch": 4.917802444850358, |
|
"grad_norm": 0.8827086687088013, |
|
"learning_rate": 8.26514384848923e-05, |
|
"loss": 1.7874, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.917802444850358, |
|
"eval_accuracy": 0.6750020166957967, |
|
"eval_loss": 1.5247992277145386, |
|
"eval_runtime": 144.6205, |
|
"eval_samples_per_second": 834.639, |
|
"eval_steps_per_second": 5.221, |
|
"step": 70000 |
|
}, |
|
{ |
|
"epoch": 4.952929605170718, |
|
"grad_norm": 0.8440523743629456, |
|
"learning_rate": 8.250686713893306e-05, |
|
"loss": 1.7851, |
|
"step": 70500 |
|
}, |
|
{ |
|
"epoch": 4.988056765491078, |
|
"grad_norm": 0.8763037919998169, |
|
"learning_rate": 8.236229579297384e-05, |
|
"loss": 1.7801, |
|
"step": 71000 |
|
}, |
|
{ |
|
"epoch": 5.023183925811438, |
|
"grad_norm": 0.88693767786026, |
|
"learning_rate": 8.221772444701461e-05, |
|
"loss": 1.7789, |
|
"step": 71500 |
|
}, |
|
{ |
|
"epoch": 5.0583110861317975, |
|
"grad_norm": 0.866883397102356, |
|
"learning_rate": 8.207315310105538e-05, |
|
"loss": 1.7714, |
|
"step": 72000 |
|
}, |
|
{ |
|
"epoch": 5.093438246452157, |
|
"grad_norm": 0.8699513077735901, |
|
"learning_rate": 8.192858175509614e-05, |
|
"loss": 1.7682, |
|
"step": 72500 |
|
}, |
|
{ |
|
"epoch": 5.128565406772516, |
|
"grad_norm": 0.8703712821006775, |
|
"learning_rate": 8.178401040913692e-05, |
|
"loss": 1.7662, |
|
"step": 73000 |
|
}, |
|
{ |
|
"epoch": 5.163692567092876, |
|
"grad_norm": 0.854120135307312, |
|
"learning_rate": 8.163943906317768e-05, |
|
"loss": 1.7647, |
|
"step": 73500 |
|
}, |
|
{ |
|
"epoch": 5.198819727413236, |
|
"grad_norm": 0.8597338795661926, |
|
"learning_rate": 8.149486771721845e-05, |
|
"loss": 1.7613, |
|
"step": 74000 |
|
}, |
|
{ |
|
"epoch": 5.2339468877335955, |
|
"grad_norm": 0.8737318515777588, |
|
"learning_rate": 8.135029637125921e-05, |
|
"loss": 1.7627, |
|
"step": 74500 |
|
}, |
|
{ |
|
"epoch": 5.269074048053955, |
|
"grad_norm": 0.8847633600234985, |
|
"learning_rate": 8.120572502529999e-05, |
|
"loss": 1.7594, |
|
"step": 75000 |
|
}, |
|
{ |
|
"epoch": 5.304201208374315, |
|
"grad_norm": 0.8676943778991699, |
|
"learning_rate": 8.106115367934077e-05, |
|
"loss": 1.7574, |
|
"step": 75500 |
|
}, |
|
{ |
|
"epoch": 5.339328368694675, |
|
"grad_norm": 0.8717545866966248, |
|
"learning_rate": 8.091658233338153e-05, |
|
"loss": 1.7551, |
|
"step": 76000 |
|
}, |
|
{ |
|
"epoch": 5.374455529015035, |
|
"grad_norm": 0.8879154920578003, |
|
"learning_rate": 8.07720109874223e-05, |
|
"loss": 1.752, |
|
"step": 76500 |
|
}, |
|
{ |
|
"epoch": 5.409582689335394, |
|
"grad_norm": 0.861165463924408, |
|
"learning_rate": 8.062743964146306e-05, |
|
"loss": 1.748, |
|
"step": 77000 |
|
}, |
|
{ |
|
"epoch": 5.444709849655754, |
|
"grad_norm": 0.885162889957428, |
|
"learning_rate": 8.048286829550384e-05, |
|
"loss": 1.7444, |
|
"step": 77500 |
|
}, |
|
{ |
|
"epoch": 5.479837009976113, |
|
"grad_norm": 0.8831413388252258, |
|
"learning_rate": 8.03382969495446e-05, |
|
"loss": 1.7457, |
|
"step": 78000 |
|
}, |
|
{ |
|
"epoch": 5.514964170296473, |
|
"grad_norm": 0.8693949580192566, |
|
"learning_rate": 8.019372560358536e-05, |
|
"loss": 1.7439, |
|
"step": 78500 |
|
}, |
|
{ |
|
"epoch": 5.550091330616833, |
|
"grad_norm": 0.8950614333152771, |
|
"learning_rate": 8.004915425762614e-05, |
|
"loss": 1.7414, |
|
"step": 79000 |
|
}, |
|
{ |
|
"epoch": 5.5852184909371925, |
|
"grad_norm": 0.8772277235984802, |
|
"learning_rate": 7.990458291166692e-05, |
|
"loss": 1.7397, |
|
"step": 79500 |
|
}, |
|
{ |
|
"epoch": 5.620345651257552, |
|
"grad_norm": 0.8824251890182495, |
|
"learning_rate": 7.976001156570768e-05, |
|
"loss": 1.737, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.620345651257552, |
|
"eval_accuracy": 0.6819064890501978, |
|
"eval_loss": 1.4823895692825317, |
|
"eval_runtime": 145.2164, |
|
"eval_samples_per_second": 831.215, |
|
"eval_steps_per_second": 5.199, |
|
"step": 80000 |
|
}, |
|
{ |
|
"epoch": 5.655472811577912, |
|
"grad_norm": 0.9014139175415039, |
|
"learning_rate": 7.961544021974845e-05, |
|
"loss": 1.7339, |
|
"step": 80500 |
|
}, |
|
{ |
|
"epoch": 5.690599971898272, |
|
"grad_norm": 0.8738614320755005, |
|
"learning_rate": 7.947086887378921e-05, |
|
"loss": 1.7395, |
|
"step": 81000 |
|
}, |
|
{ |
|
"epoch": 5.725727132218632, |
|
"grad_norm": 0.8764089941978455, |
|
"learning_rate": 7.932629752782999e-05, |
|
"loss": 1.7336, |
|
"step": 81500 |
|
}, |
|
{ |
|
"epoch": 5.760854292538991, |
|
"grad_norm": 0.9202150106430054, |
|
"learning_rate": 7.918172618187075e-05, |
|
"loss": 1.7291, |
|
"step": 82000 |
|
}, |
|
{ |
|
"epoch": 5.795981452859351, |
|
"grad_norm": 0.8928285837173462, |
|
"learning_rate": 7.903715483591153e-05, |
|
"loss": 1.7302, |
|
"step": 82500 |
|
}, |
|
{ |
|
"epoch": 5.83110861317971, |
|
"grad_norm": 0.8655526041984558, |
|
"learning_rate": 7.88925834899523e-05, |
|
"loss": 1.7287, |
|
"step": 83000 |
|
}, |
|
{ |
|
"epoch": 5.866235773500071, |
|
"grad_norm": 0.8782069087028503, |
|
"learning_rate": 7.874801214399307e-05, |
|
"loss": 1.7261, |
|
"step": 83500 |
|
}, |
|
{ |
|
"epoch": 5.90136293382043, |
|
"grad_norm": 0.8890528678894043, |
|
"learning_rate": 7.860344079803384e-05, |
|
"loss": 1.7237, |
|
"step": 84000 |
|
}, |
|
{ |
|
"epoch": 5.936490094140789, |
|
"grad_norm": 0.8649280667304993, |
|
"learning_rate": 7.84588694520746e-05, |
|
"loss": 1.7213, |
|
"step": 84500 |
|
}, |
|
{ |
|
"epoch": 5.971617254461149, |
|
"grad_norm": 0.9018586277961731, |
|
"learning_rate": 7.831429810611536e-05, |
|
"loss": 1.7194, |
|
"step": 85000 |
|
}, |
|
{ |
|
"epoch": 6.006744414781509, |
|
"grad_norm": 0.8934980034828186, |
|
"learning_rate": 7.816972676015614e-05, |
|
"loss": 1.7184, |
|
"step": 85500 |
|
}, |
|
{ |
|
"epoch": 6.041871575101869, |
|
"grad_norm": 0.8839065432548523, |
|
"learning_rate": 7.802515541419692e-05, |
|
"loss": 1.7127, |
|
"step": 86000 |
|
}, |
|
{ |
|
"epoch": 6.076998735422229, |
|
"grad_norm": 0.8806888461112976, |
|
"learning_rate": 7.788058406823768e-05, |
|
"loss": 1.7144, |
|
"step": 86500 |
|
}, |
|
{ |
|
"epoch": 6.112125895742588, |
|
"grad_norm": 0.8924936652183533, |
|
"learning_rate": 7.773601272227845e-05, |
|
"loss": 1.7084, |
|
"step": 87000 |
|
}, |
|
{ |
|
"epoch": 6.147253056062948, |
|
"grad_norm": 0.9053957462310791, |
|
"learning_rate": 7.759144137631922e-05, |
|
"loss": 1.7076, |
|
"step": 87500 |
|
}, |
|
{ |
|
"epoch": 6.182380216383308, |
|
"grad_norm": 0.8794803023338318, |
|
"learning_rate": 7.744687003035999e-05, |
|
"loss": 1.7069, |
|
"step": 88000 |
|
}, |
|
{ |
|
"epoch": 6.217507376703667, |
|
"grad_norm": 0.9028088450431824, |
|
"learning_rate": 7.730229868440075e-05, |
|
"loss": 1.7067, |
|
"step": 88500 |
|
}, |
|
{ |
|
"epoch": 6.252634537024027, |
|
"grad_norm": 0.8775391578674316, |
|
"learning_rate": 7.715772733844152e-05, |
|
"loss": 1.7021, |
|
"step": 89000 |
|
}, |
|
{ |
|
"epoch": 6.287761697344386, |
|
"grad_norm": 0.9188543558120728, |
|
"learning_rate": 7.70131559924823e-05, |
|
"loss": 1.7019, |
|
"step": 89500 |
|
}, |
|
{ |
|
"epoch": 6.322888857664746, |
|
"grad_norm": 0.8761645555496216, |
|
"learning_rate": 7.686858464652307e-05, |
|
"loss": 1.7017, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 6.322888857664746, |
|
"eval_accuracy": 0.6875799717584923, |
|
"eval_loss": 1.45062255859375, |
|
"eval_runtime": 146.6571, |
|
"eval_samples_per_second": 823.049, |
|
"eval_steps_per_second": 5.148, |
|
"step": 90000 |
|
}, |
|
{ |
|
"epoch": 6.358016017985106, |
|
"grad_norm": 0.8915168642997742, |
|
"learning_rate": 7.672401330056384e-05, |
|
"loss": 1.7011, |
|
"step": 90500 |
|
}, |
|
{ |
|
"epoch": 6.393143178305466, |
|
"grad_norm": 0.8964366912841797, |
|
"learning_rate": 7.65794419546046e-05, |
|
"loss": 1.6985, |
|
"step": 91000 |
|
}, |
|
{ |
|
"epoch": 6.4282703386258255, |
|
"grad_norm": 0.8956195712089539, |
|
"learning_rate": 7.643487060864536e-05, |
|
"loss": 1.6956, |
|
"step": 91500 |
|
}, |
|
{ |
|
"epoch": 6.463397498946185, |
|
"grad_norm": 0.9271071553230286, |
|
"learning_rate": 7.629029926268614e-05, |
|
"loss": 1.695, |
|
"step": 92000 |
|
}, |
|
{ |
|
"epoch": 6.498524659266545, |
|
"grad_norm": 0.882736086845398, |
|
"learning_rate": 7.61457279167269e-05, |
|
"loss": 1.6941, |
|
"step": 92500 |
|
}, |
|
{ |
|
"epoch": 6.533651819586905, |
|
"grad_norm": 0.866625964641571, |
|
"learning_rate": 7.600115657076767e-05, |
|
"loss": 1.6942, |
|
"step": 93000 |
|
}, |
|
{ |
|
"epoch": 6.568778979907265, |
|
"grad_norm": 0.9276591539382935, |
|
"learning_rate": 7.585658522480845e-05, |
|
"loss": 1.6915, |
|
"step": 93500 |
|
}, |
|
{ |
|
"epoch": 6.603906140227624, |
|
"grad_norm": 0.8902279138565063, |
|
"learning_rate": 7.571201387884922e-05, |
|
"loss": 1.6874, |
|
"step": 94000 |
|
}, |
|
{ |
|
"epoch": 6.639033300547983, |
|
"grad_norm": 0.8612838983535767, |
|
"learning_rate": 7.556744253288999e-05, |
|
"loss": 1.6864, |
|
"step": 94500 |
|
}, |
|
{ |
|
"epoch": 6.674160460868343, |
|
"grad_norm": 0.8922297358512878, |
|
"learning_rate": 7.542287118693075e-05, |
|
"loss": 1.6851, |
|
"step": 95000 |
|
}, |
|
{ |
|
"epoch": 6.709287621188703, |
|
"grad_norm": 0.9003462791442871, |
|
"learning_rate": 7.527829984097152e-05, |
|
"loss": 1.6843, |
|
"step": 95500 |
|
}, |
|
{ |
|
"epoch": 6.744414781509063, |
|
"grad_norm": 0.8967103958129883, |
|
"learning_rate": 7.51337284950123e-05, |
|
"loss": 1.6888, |
|
"step": 96000 |
|
}, |
|
{ |
|
"epoch": 6.7795419418294225, |
|
"grad_norm": 0.8979239463806152, |
|
"learning_rate": 7.498915714905306e-05, |
|
"loss": 1.6813, |
|
"step": 96500 |
|
}, |
|
{ |
|
"epoch": 6.814669102149782, |
|
"grad_norm": 0.8968941569328308, |
|
"learning_rate": 7.484458580309382e-05, |
|
"loss": 1.6822, |
|
"step": 97000 |
|
}, |
|
{ |
|
"epoch": 6.849796262470142, |
|
"grad_norm": 0.8832077383995056, |
|
"learning_rate": 7.47000144571346e-05, |
|
"loss": 1.676, |
|
"step": 97500 |
|
}, |
|
{ |
|
"epoch": 6.884923422790502, |
|
"grad_norm": 0.8957480192184448, |
|
"learning_rate": 7.455544311117538e-05, |
|
"loss": 1.6772, |
|
"step": 98000 |
|
}, |
|
{ |
|
"epoch": 6.920050583110862, |
|
"grad_norm": 0.911115825176239, |
|
"learning_rate": 7.441087176521614e-05, |
|
"loss": 1.674, |
|
"step": 98500 |
|
}, |
|
{ |
|
"epoch": 6.955177743431221, |
|
"grad_norm": 0.9081796407699585, |
|
"learning_rate": 7.42663004192569e-05, |
|
"loss": 1.6746, |
|
"step": 99000 |
|
}, |
|
{ |
|
"epoch": 6.99030490375158, |
|
"grad_norm": 0.9057807922363281, |
|
"learning_rate": 7.412172907329767e-05, |
|
"loss": 1.6702, |
|
"step": 99500 |
|
}, |
|
{ |
|
"epoch": 7.02543206407194, |
|
"grad_norm": 0.9082474708557129, |
|
"learning_rate": 7.397715772733845e-05, |
|
"loss": 1.6703, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 7.02543206407194, |
|
"eval_accuracy": 0.6921466981210505, |
|
"eval_loss": 1.420421838760376, |
|
"eval_runtime": 146.0221, |
|
"eval_samples_per_second": 826.628, |
|
"eval_steps_per_second": 5.17, |
|
"step": 100000 |
|
}, |
|
{ |
|
"epoch": 7.0605592243923, |
|
"grad_norm": 0.9067661166191101, |
|
"learning_rate": 7.383258638137921e-05, |
|
"loss": 1.6691, |
|
"step": 100500 |
|
}, |
|
{ |
|
"epoch": 7.09568638471266, |
|
"grad_norm": 0.9043187499046326, |
|
"learning_rate": 7.368801503541999e-05, |
|
"loss": 1.669, |
|
"step": 101000 |
|
}, |
|
{ |
|
"epoch": 7.1308135450330195, |
|
"grad_norm": 0.8964481949806213, |
|
"learning_rate": 7.354344368946075e-05, |
|
"loss": 1.667, |
|
"step": 101500 |
|
}, |
|
{ |
|
"epoch": 7.165940705353379, |
|
"grad_norm": 0.9258145689964294, |
|
"learning_rate": 7.339887234350153e-05, |
|
"loss": 1.6653, |
|
"step": 102000 |
|
}, |
|
{ |
|
"epoch": 7.201067865673739, |
|
"grad_norm": 0.8769683241844177, |
|
"learning_rate": 7.32543009975423e-05, |
|
"loss": 1.664, |
|
"step": 102500 |
|
}, |
|
{ |
|
"epoch": 7.236195025994099, |
|
"grad_norm": 0.910929262638092, |
|
"learning_rate": 7.310972965158306e-05, |
|
"loss": 1.6624, |
|
"step": 103000 |
|
}, |
|
{ |
|
"epoch": 7.271322186314459, |
|
"grad_norm": 0.8838810324668884, |
|
"learning_rate": 7.296515830562382e-05, |
|
"loss": 1.6628, |
|
"step": 103500 |
|
}, |
|
{ |
|
"epoch": 7.306449346634818, |
|
"grad_norm": 0.8857362270355225, |
|
"learning_rate": 7.28205869596646e-05, |
|
"loss": 1.658, |
|
"step": 104000 |
|
}, |
|
{ |
|
"epoch": 7.341576506955178, |
|
"grad_norm": 0.8846690654754639, |
|
"learning_rate": 7.267601561370536e-05, |
|
"loss": 1.6543, |
|
"step": 104500 |
|
}, |
|
{ |
|
"epoch": 7.376703667275537, |
|
"grad_norm": 0.8960644006729126, |
|
"learning_rate": 7.253144426774614e-05, |
|
"loss": 1.6574, |
|
"step": 105000 |
|
}, |
|
{ |
|
"epoch": 7.411830827595897, |
|
"grad_norm": 0.9093925356864929, |
|
"learning_rate": 7.23868729217869e-05, |
|
"loss": 1.6602, |
|
"step": 105500 |
|
}, |
|
{ |
|
"epoch": 7.446957987916257, |
|
"grad_norm": 0.8878596425056458, |
|
"learning_rate": 7.224230157582768e-05, |
|
"loss": 1.6583, |
|
"step": 106000 |
|
}, |
|
{ |
|
"epoch": 7.482085148236616, |
|
"grad_norm": 0.8982939124107361, |
|
"learning_rate": 7.209773022986845e-05, |
|
"loss": 1.6542, |
|
"step": 106500 |
|
}, |
|
{ |
|
"epoch": 7.517212308556976, |
|
"grad_norm": 0.931877613067627, |
|
"learning_rate": 7.195315888390921e-05, |
|
"loss": 1.6513, |
|
"step": 107000 |
|
}, |
|
{ |
|
"epoch": 7.552339468877336, |
|
"grad_norm": 0.9102097749710083, |
|
"learning_rate": 7.180858753794997e-05, |
|
"loss": 1.6505, |
|
"step": 107500 |
|
}, |
|
{ |
|
"epoch": 7.587466629197696, |
|
"grad_norm": 0.8833571076393127, |
|
"learning_rate": 7.166401619199075e-05, |
|
"loss": 1.6506, |
|
"step": 108000 |
|
}, |
|
{ |
|
"epoch": 7.6225937895180556, |
|
"grad_norm": 0.8974440693855286, |
|
"learning_rate": 7.151944484603153e-05, |
|
"loss": 1.6529, |
|
"step": 108500 |
|
}, |
|
{ |
|
"epoch": 7.657720949838415, |
|
"grad_norm": 0.8859068155288696, |
|
"learning_rate": 7.13748735000723e-05, |
|
"loss": 1.6483, |
|
"step": 109000 |
|
}, |
|
{ |
|
"epoch": 7.692848110158775, |
|
"grad_norm": 0.9290302991867065, |
|
"learning_rate": 7.123030215411306e-05, |
|
"loss": 1.649, |
|
"step": 109500 |
|
}, |
|
{ |
|
"epoch": 7.727975270479135, |
|
"grad_norm": 0.912346601486206, |
|
"learning_rate": 7.108573080815382e-05, |
|
"loss": 1.6497, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 7.727975270479135, |
|
"eval_accuracy": 0.696107465901776, |
|
"eval_loss": 1.3987531661987305, |
|
"eval_runtime": 145.5941, |
|
"eval_samples_per_second": 829.058, |
|
"eval_steps_per_second": 5.186, |
|
"step": 110000 |
|
}, |
|
{ |
|
"epoch": 7.763102430799494, |
|
"grad_norm": 0.9191212058067322, |
|
"learning_rate": 7.09411594621946e-05, |
|
"loss": 1.6456, |
|
"step": 110500 |
|
}, |
|
{ |
|
"epoch": 7.798229591119854, |
|
"grad_norm": 0.9030919075012207, |
|
"learning_rate": 7.079658811623536e-05, |
|
"loss": 1.6428, |
|
"step": 111000 |
|
}, |
|
{ |
|
"epoch": 7.833356751440213, |
|
"grad_norm": 0.9317150115966797, |
|
"learning_rate": 7.065201677027613e-05, |
|
"loss": 1.6428, |
|
"step": 111500 |
|
}, |
|
{ |
|
"epoch": 7.868483911760573, |
|
"grad_norm": 0.9374363422393799, |
|
"learning_rate": 7.05074454243169e-05, |
|
"loss": 1.6444, |
|
"step": 112000 |
|
}, |
|
{ |
|
"epoch": 7.903611072080933, |
|
"grad_norm": 0.8871532678604126, |
|
"learning_rate": 7.036287407835768e-05, |
|
"loss": 1.6428, |
|
"step": 112500 |
|
}, |
|
{ |
|
"epoch": 7.938738232401293, |
|
"grad_norm": 0.947810709476471, |
|
"learning_rate": 7.021830273239845e-05, |
|
"loss": 1.6366, |
|
"step": 113000 |
|
}, |
|
{ |
|
"epoch": 7.9738653927216525, |
|
"grad_norm": 0.8954718708992004, |
|
"learning_rate": 7.007373138643921e-05, |
|
"loss": 1.6379, |
|
"step": 113500 |
|
}, |
|
{ |
|
"epoch": 8.008992553042011, |
|
"grad_norm": 0.8930262327194214, |
|
"learning_rate": 6.992916004047997e-05, |
|
"loss": 1.6387, |
|
"step": 114000 |
|
}, |
|
{ |
|
"epoch": 8.044119713362372, |
|
"grad_norm": 0.9441146850585938, |
|
"learning_rate": 6.978458869452075e-05, |
|
"loss": 1.6332, |
|
"step": 114500 |
|
}, |
|
{ |
|
"epoch": 8.079246873682731, |
|
"grad_norm": 0.9339563846588135, |
|
"learning_rate": 6.964001734856152e-05, |
|
"loss": 1.6326, |
|
"step": 115000 |
|
}, |
|
{ |
|
"epoch": 8.114374034003092, |
|
"grad_norm": 0.917434811592102, |
|
"learning_rate": 6.949544600260228e-05, |
|
"loss": 1.6311, |
|
"step": 115500 |
|
}, |
|
{ |
|
"epoch": 8.14950119432345, |
|
"grad_norm": 0.9269524216651917, |
|
"learning_rate": 6.935087465664306e-05, |
|
"loss": 1.6327, |
|
"step": 116000 |
|
}, |
|
{ |
|
"epoch": 8.184628354643811, |
|
"grad_norm": 0.9198341965675354, |
|
"learning_rate": 6.920630331068383e-05, |
|
"loss": 1.6323, |
|
"step": 116500 |
|
}, |
|
{ |
|
"epoch": 8.21975551496417, |
|
"grad_norm": 0.9190070629119873, |
|
"learning_rate": 6.90617319647246e-05, |
|
"loss": 1.6286, |
|
"step": 117000 |
|
}, |
|
{ |
|
"epoch": 8.25488267528453, |
|
"grad_norm": 0.9343723654747009, |
|
"learning_rate": 6.891716061876536e-05, |
|
"loss": 1.6291, |
|
"step": 117500 |
|
}, |
|
{ |
|
"epoch": 8.29000983560489, |
|
"grad_norm": 0.9313236474990845, |
|
"learning_rate": 6.877258927280613e-05, |
|
"loss": 1.6253, |
|
"step": 118000 |
|
}, |
|
{ |
|
"epoch": 8.325136995925249, |
|
"grad_norm": 0.9170568585395813, |
|
"learning_rate": 6.86280179268469e-05, |
|
"loss": 1.6244, |
|
"step": 118500 |
|
}, |
|
{ |
|
"epoch": 8.36026415624561, |
|
"grad_norm": 0.9194092154502869, |
|
"learning_rate": 6.848344658088767e-05, |
|
"loss": 1.6251, |
|
"step": 119000 |
|
}, |
|
{ |
|
"epoch": 8.395391316565968, |
|
"grad_norm": 0.903550922870636, |
|
"learning_rate": 6.833887523492845e-05, |
|
"loss": 1.6245, |
|
"step": 119500 |
|
}, |
|
{ |
|
"epoch": 8.430518476886329, |
|
"grad_norm": 0.9155687689781189, |
|
"learning_rate": 6.819430388896921e-05, |
|
"loss": 1.6245, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 8.430518476886329, |
|
"eval_accuracy": 0.699622936687572, |
|
"eval_loss": 1.376625895500183, |
|
"eval_runtime": 146.4286, |
|
"eval_samples_per_second": 824.334, |
|
"eval_steps_per_second": 5.156, |
|
"step": 120000 |
|
}, |
|
{ |
|
"epoch": 8.465645637206688, |
|
"grad_norm": 0.9170736074447632, |
|
"learning_rate": 6.804973254300999e-05, |
|
"loss": 1.6241, |
|
"step": 120500 |
|
}, |
|
{ |
|
"epoch": 8.500772797527048, |
|
"grad_norm": 0.918855607509613, |
|
"learning_rate": 6.790516119705075e-05, |
|
"loss": 1.6238, |
|
"step": 121000 |
|
}, |
|
{ |
|
"epoch": 8.535899957847407, |
|
"grad_norm": 0.9043271541595459, |
|
"learning_rate": 6.776058985109152e-05, |
|
"loss": 1.6253, |
|
"step": 121500 |
|
}, |
|
{ |
|
"epoch": 8.571027118167768, |
|
"grad_norm": 0.9378098845481873, |
|
"learning_rate": 6.761601850513228e-05, |
|
"loss": 1.6207, |
|
"step": 122000 |
|
}, |
|
{ |
|
"epoch": 8.606154278488127, |
|
"grad_norm": 0.9023655652999878, |
|
"learning_rate": 6.747144715917306e-05, |
|
"loss": 1.6192, |
|
"step": 122500 |
|
}, |
|
{ |
|
"epoch": 8.641281438808488, |
|
"grad_norm": 0.90032958984375, |
|
"learning_rate": 6.732687581321382e-05, |
|
"loss": 1.6188, |
|
"step": 123000 |
|
}, |
|
{ |
|
"epoch": 8.676408599128846, |
|
"grad_norm": 0.9296461939811707, |
|
"learning_rate": 6.71823044672546e-05, |
|
"loss": 1.6147, |
|
"step": 123500 |
|
}, |
|
{ |
|
"epoch": 8.711535759449205, |
|
"grad_norm": 0.917468249797821, |
|
"learning_rate": 6.703773312129536e-05, |
|
"loss": 1.614, |
|
"step": 124000 |
|
}, |
|
{ |
|
"epoch": 8.746662919769566, |
|
"grad_norm": 0.9283198714256287, |
|
"learning_rate": 6.689316177533613e-05, |
|
"loss": 1.6088, |
|
"step": 124500 |
|
}, |
|
{ |
|
"epoch": 8.781790080089925, |
|
"grad_norm": 0.9109067916870117, |
|
"learning_rate": 6.67485904293769e-05, |
|
"loss": 1.6148, |
|
"step": 125000 |
|
}, |
|
{ |
|
"epoch": 8.816917240410286, |
|
"grad_norm": 0.9425482153892517, |
|
"learning_rate": 6.660401908341767e-05, |
|
"loss": 1.6125, |
|
"step": 125500 |
|
}, |
|
{ |
|
"epoch": 8.852044400730644, |
|
"grad_norm": 0.9108548164367676, |
|
"learning_rate": 6.645944773745843e-05, |
|
"loss": 1.6118, |
|
"step": 126000 |
|
}, |
|
{ |
|
"epoch": 8.887171561051005, |
|
"grad_norm": 0.9401180744171143, |
|
"learning_rate": 6.631487639149921e-05, |
|
"loss": 1.6121, |
|
"step": 126500 |
|
}, |
|
{ |
|
"epoch": 8.922298721371364, |
|
"grad_norm": 0.9434899091720581, |
|
"learning_rate": 6.617030504553999e-05, |
|
"loss": 1.6099, |
|
"step": 127000 |
|
}, |
|
{ |
|
"epoch": 8.957425881691725, |
|
"grad_norm": 0.9705188870429993, |
|
"learning_rate": 6.602573369958075e-05, |
|
"loss": 1.6143, |
|
"step": 127500 |
|
}, |
|
{ |
|
"epoch": 8.992553042012084, |
|
"grad_norm": 0.9429416060447693, |
|
"learning_rate": 6.588116235362152e-05, |
|
"loss": 1.6131, |
|
"step": 128000 |
|
}, |
|
{ |
|
"epoch": 9.027680202332444, |
|
"grad_norm": 0.9325101971626282, |
|
"learning_rate": 6.573659100766228e-05, |
|
"loss": 1.6054, |
|
"step": 128500 |
|
}, |
|
{ |
|
"epoch": 9.062807362652803, |
|
"grad_norm": 0.9009438157081604, |
|
"learning_rate": 6.559201966170306e-05, |
|
"loss": 1.6049, |
|
"step": 129000 |
|
}, |
|
{ |
|
"epoch": 9.097934522973162, |
|
"grad_norm": 0.9559112191200256, |
|
"learning_rate": 6.544744831574382e-05, |
|
"loss": 1.6065, |
|
"step": 129500 |
|
}, |
|
{ |
|
"epoch": 9.133061683293523, |
|
"grad_norm": 0.9075998067855835, |
|
"learning_rate": 6.530287696978458e-05, |
|
"loss": 1.6015, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 9.133061683293523, |
|
"eval_accuracy": 0.701941754455614, |
|
"eval_loss": 1.362774133682251, |
|
"eval_runtime": 146.6899, |
|
"eval_samples_per_second": 822.865, |
|
"eval_steps_per_second": 5.147, |
|
"step": 130000 |
|
}, |
|
{ |
|
"epoch": 9.168188843613882, |
|
"grad_norm": 0.9487557411193848, |
|
"learning_rate": 6.515830562382536e-05, |
|
"loss": 1.603, |
|
"step": 130500 |
|
}, |
|
{ |
|
"epoch": 9.203316003934242, |
|
"grad_norm": 0.9400453567504883, |
|
"learning_rate": 6.501373427786614e-05, |
|
"loss": 1.6042, |
|
"step": 131000 |
|
}, |
|
{ |
|
"epoch": 9.238443164254601, |
|
"grad_norm": 0.9347612261772156, |
|
"learning_rate": 6.48691629319069e-05, |
|
"loss": 1.6033, |
|
"step": 131500 |
|
}, |
|
{ |
|
"epoch": 9.273570324574962, |
|
"grad_norm": 0.9069722890853882, |
|
"learning_rate": 6.472459158594767e-05, |
|
"loss": 1.6038, |
|
"step": 132000 |
|
}, |
|
{ |
|
"epoch": 9.30869748489532, |
|
"grad_norm": 0.9513418674468994, |
|
"learning_rate": 6.458002023998843e-05, |
|
"loss": 1.6023, |
|
"step": 132500 |
|
}, |
|
{ |
|
"epoch": 9.343824645215681, |
|
"grad_norm": 0.9263851642608643, |
|
"learning_rate": 6.443544889402921e-05, |
|
"loss": 1.5997, |
|
"step": 133000 |
|
}, |
|
{ |
|
"epoch": 9.37895180553604, |
|
"grad_norm": 0.9571240544319153, |
|
"learning_rate": 6.429087754806997e-05, |
|
"loss": 1.5987, |
|
"step": 133500 |
|
}, |
|
{ |
|
"epoch": 9.414078965856401, |
|
"grad_norm": 0.9037933349609375, |
|
"learning_rate": 6.414630620211074e-05, |
|
"loss": 1.5979, |
|
"step": 134000 |
|
}, |
|
{ |
|
"epoch": 9.44920612617676, |
|
"grad_norm": 0.9799155592918396, |
|
"learning_rate": 6.400173485615151e-05, |
|
"loss": 1.5952, |
|
"step": 134500 |
|
}, |
|
{ |
|
"epoch": 9.484333286497119, |
|
"grad_norm": 0.9275627732276917, |
|
"learning_rate": 6.385716351019229e-05, |
|
"loss": 1.595, |
|
"step": 135000 |
|
}, |
|
{ |
|
"epoch": 9.51946044681748, |
|
"grad_norm": 0.9277747273445129, |
|
"learning_rate": 6.371259216423306e-05, |
|
"loss": 1.5961, |
|
"step": 135500 |
|
}, |
|
{ |
|
"epoch": 9.554587607137838, |
|
"grad_norm": 0.9405021071434021, |
|
"learning_rate": 6.356802081827382e-05, |
|
"loss": 1.5955, |
|
"step": 136000 |
|
}, |
|
{ |
|
"epoch": 9.589714767458199, |
|
"grad_norm": 0.9006831049919128, |
|
"learning_rate": 6.342344947231458e-05, |
|
"loss": 1.5967, |
|
"step": 136500 |
|
}, |
|
{ |
|
"epoch": 9.624841927778558, |
|
"grad_norm": 0.9479206800460815, |
|
"learning_rate": 6.327887812635536e-05, |
|
"loss": 1.5923, |
|
"step": 137000 |
|
}, |
|
{ |
|
"epoch": 9.659969088098919, |
|
"grad_norm": 0.9280245304107666, |
|
"learning_rate": 6.313430678039613e-05, |
|
"loss": 1.5932, |
|
"step": 137500 |
|
}, |
|
{ |
|
"epoch": 9.695096248419278, |
|
"grad_norm": 0.9124751091003418, |
|
"learning_rate": 6.29897354344369e-05, |
|
"loss": 1.5882, |
|
"step": 138000 |
|
}, |
|
{ |
|
"epoch": 9.730223408739638, |
|
"grad_norm": 0.9291515946388245, |
|
"learning_rate": 6.284516408847767e-05, |
|
"loss": 1.5948, |
|
"step": 138500 |
|
}, |
|
{ |
|
"epoch": 9.765350569059997, |
|
"grad_norm": 0.9335107207298279, |
|
"learning_rate": 6.270059274251843e-05, |
|
"loss": 1.5896, |
|
"step": 139000 |
|
}, |
|
{ |
|
"epoch": 9.800477729380358, |
|
"grad_norm": 0.9176722764968872, |
|
"learning_rate": 6.255602139655921e-05, |
|
"loss": 1.5901, |
|
"step": 139500 |
|
}, |
|
{ |
|
"epoch": 9.835604889700717, |
|
"grad_norm": 0.9138041138648987, |
|
"learning_rate": 6.241145005059997e-05, |
|
"loss": 1.5882, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 9.835604889700717, |
|
"eval_accuracy": 0.7051619721997767, |
|
"eval_loss": 1.3451346158981323, |
|
"eval_runtime": 145.0544, |
|
"eval_samples_per_second": 832.143, |
|
"eval_steps_per_second": 5.205, |
|
"step": 140000 |
|
}, |
|
{ |
|
"epoch": 9.870732050021076, |
|
"grad_norm": 0.9771543741226196, |
|
"learning_rate": 6.226687870464074e-05, |
|
"loss": 1.5882, |
|
"step": 140500 |
|
}, |
|
{ |
|
"epoch": 9.905859210341436, |
|
"grad_norm": 0.9482930302619934, |
|
"learning_rate": 6.212230735868151e-05, |
|
"loss": 1.5866, |
|
"step": 141000 |
|
}, |
|
{ |
|
"epoch": 9.940986370661795, |
|
"grad_norm": 0.9545276761054993, |
|
"learning_rate": 6.197773601272228e-05, |
|
"loss": 1.5868, |
|
"step": 141500 |
|
}, |
|
{ |
|
"epoch": 9.976113530982156, |
|
"grad_norm": 0.962986171245575, |
|
"learning_rate": 6.183316466676306e-05, |
|
"loss": 1.5833, |
|
"step": 142000 |
|
}, |
|
{ |
|
"epoch": 10.011240691302515, |
|
"grad_norm": 0.9559778571128845, |
|
"learning_rate": 6.168859332080382e-05, |
|
"loss": 1.5858, |
|
"step": 142500 |
|
}, |
|
{ |
|
"epoch": 10.046367851622875, |
|
"grad_norm": 0.9056123495101929, |
|
"learning_rate": 6.154402197484458e-05, |
|
"loss": 1.5855, |
|
"step": 143000 |
|
}, |
|
{ |
|
"epoch": 10.081495011943234, |
|
"grad_norm": 0.9433762431144714, |
|
"learning_rate": 6.139945062888536e-05, |
|
"loss": 1.581, |
|
"step": 143500 |
|
}, |
|
{ |
|
"epoch": 10.116622172263595, |
|
"grad_norm": 0.9369404315948486, |
|
"learning_rate": 6.125487928292613e-05, |
|
"loss": 1.5795, |
|
"step": 144000 |
|
}, |
|
{ |
|
"epoch": 10.151749332583954, |
|
"grad_norm": 1.009413480758667, |
|
"learning_rate": 6.111030793696689e-05, |
|
"loss": 1.5793, |
|
"step": 144500 |
|
}, |
|
{ |
|
"epoch": 10.186876492904315, |
|
"grad_norm": 0.9554888606071472, |
|
"learning_rate": 6.096573659100766e-05, |
|
"loss": 1.5797, |
|
"step": 145000 |
|
}, |
|
{ |
|
"epoch": 10.222003653224673, |
|
"grad_norm": 0.9255361557006836, |
|
"learning_rate": 6.082116524504844e-05, |
|
"loss": 1.5809, |
|
"step": 145500 |
|
}, |
|
{ |
|
"epoch": 10.257130813545032, |
|
"grad_norm": 0.9112059473991394, |
|
"learning_rate": 6.067659389908921e-05, |
|
"loss": 1.5813, |
|
"step": 146000 |
|
}, |
|
{ |
|
"epoch": 10.292257973865393, |
|
"grad_norm": 0.9442374110221863, |
|
"learning_rate": 6.053202255312997e-05, |
|
"loss": 1.5757, |
|
"step": 146500 |
|
}, |
|
{ |
|
"epoch": 10.327385134185752, |
|
"grad_norm": 0.9561610221862793, |
|
"learning_rate": 6.0387451207170744e-05, |
|
"loss": 1.5765, |
|
"step": 147000 |
|
}, |
|
{ |
|
"epoch": 10.362512294506113, |
|
"grad_norm": 0.9480382800102234, |
|
"learning_rate": 6.024287986121151e-05, |
|
"loss": 1.5769, |
|
"step": 147500 |
|
}, |
|
{ |
|
"epoch": 10.397639454826471, |
|
"grad_norm": 0.9375914931297302, |
|
"learning_rate": 6.009830851525228e-05, |
|
"loss": 1.5744, |
|
"step": 148000 |
|
}, |
|
{ |
|
"epoch": 10.432766615146832, |
|
"grad_norm": 0.9849138259887695, |
|
"learning_rate": 5.995373716929304e-05, |
|
"loss": 1.5778, |
|
"step": 148500 |
|
}, |
|
{ |
|
"epoch": 10.467893775467191, |
|
"grad_norm": 0.9578582048416138, |
|
"learning_rate": 5.980916582333381e-05, |
|
"loss": 1.5729, |
|
"step": 149000 |
|
}, |
|
{ |
|
"epoch": 10.503020935787552, |
|
"grad_norm": 0.9367204308509827, |
|
"learning_rate": 5.966459447737459e-05, |
|
"loss": 1.5714, |
|
"step": 149500 |
|
}, |
|
{ |
|
"epoch": 10.53814809610791, |
|
"grad_norm": 0.9462071061134338, |
|
"learning_rate": 5.952002313141536e-05, |
|
"loss": 1.5738, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 10.53814809610791, |
|
"eval_accuracy": 0.7075515620989917, |
|
"eval_loss": 1.331017255783081, |
|
"eval_runtime": 146.1625, |
|
"eval_samples_per_second": 825.834, |
|
"eval_steps_per_second": 5.165, |
|
"step": 150000 |
|
}, |
|
{ |
|
"epoch": 10.573275256428271, |
|
"grad_norm": 0.9778929948806763, |
|
"learning_rate": 5.9375451785456126e-05, |
|
"loss": 1.5759, |
|
"step": 150500 |
|
}, |
|
{ |
|
"epoch": 10.60840241674863, |
|
"grad_norm": 0.9483457803726196, |
|
"learning_rate": 5.9230880439496896e-05, |
|
"loss": 1.5711, |
|
"step": 151000 |
|
}, |
|
{ |
|
"epoch": 10.643529577068989, |
|
"grad_norm": 0.9661710262298584, |
|
"learning_rate": 5.908630909353766e-05, |
|
"loss": 1.5713, |
|
"step": 151500 |
|
}, |
|
{ |
|
"epoch": 10.67865673738935, |
|
"grad_norm": 0.926715612411499, |
|
"learning_rate": 5.894173774757843e-05, |
|
"loss": 1.5706, |
|
"step": 152000 |
|
}, |
|
{ |
|
"epoch": 10.713783897709709, |
|
"grad_norm": 0.9615681767463684, |
|
"learning_rate": 5.8797166401619195e-05, |
|
"loss": 1.5685, |
|
"step": 152500 |
|
}, |
|
{ |
|
"epoch": 10.74891105803007, |
|
"grad_norm": 0.9441356658935547, |
|
"learning_rate": 5.865259505565997e-05, |
|
"loss": 1.5689, |
|
"step": 153000 |
|
}, |
|
{ |
|
"epoch": 10.784038218350428, |
|
"grad_norm": 0.9553456902503967, |
|
"learning_rate": 5.8508023709700743e-05, |
|
"loss": 1.5708, |
|
"step": 153500 |
|
}, |
|
{ |
|
"epoch": 10.819165378670789, |
|
"grad_norm": 0.9203099012374878, |
|
"learning_rate": 5.8363452363741514e-05, |
|
"loss": 1.5691, |
|
"step": 154000 |
|
}, |
|
{ |
|
"epoch": 10.854292538991148, |
|
"grad_norm": 0.9613307118415833, |
|
"learning_rate": 5.821888101778228e-05, |
|
"loss": 1.5691, |
|
"step": 154500 |
|
}, |
|
{ |
|
"epoch": 10.889419699311508, |
|
"grad_norm": 0.93576580286026, |
|
"learning_rate": 5.807430967182305e-05, |
|
"loss": 1.5683, |
|
"step": 155000 |
|
}, |
|
{ |
|
"epoch": 10.924546859631867, |
|
"grad_norm": 0.959653913974762, |
|
"learning_rate": 5.792973832586381e-05, |
|
"loss": 1.5685, |
|
"step": 155500 |
|
}, |
|
{ |
|
"epoch": 10.959674019952226, |
|
"grad_norm": 0.9653539657592773, |
|
"learning_rate": 5.7785166979904584e-05, |
|
"loss": 1.5662, |
|
"step": 156000 |
|
}, |
|
{ |
|
"epoch": 10.994801180272587, |
|
"grad_norm": 0.9286296367645264, |
|
"learning_rate": 5.764059563394536e-05, |
|
"loss": 1.5656, |
|
"step": 156500 |
|
}, |
|
{ |
|
"epoch": 11.029928340592946, |
|
"grad_norm": 0.9834336042404175, |
|
"learning_rate": 5.7496024287986125e-05, |
|
"loss": 1.5588, |
|
"step": 157000 |
|
}, |
|
{ |
|
"epoch": 11.065055500913306, |
|
"grad_norm": 0.9424375295639038, |
|
"learning_rate": 5.7351452942026896e-05, |
|
"loss": 1.562, |
|
"step": 157500 |
|
}, |
|
{ |
|
"epoch": 11.100182661233665, |
|
"grad_norm": 0.9713384509086609, |
|
"learning_rate": 5.720688159606766e-05, |
|
"loss": 1.562, |
|
"step": 158000 |
|
}, |
|
{ |
|
"epoch": 11.135309821554026, |
|
"grad_norm": 0.9827730655670166, |
|
"learning_rate": 5.706231025010843e-05, |
|
"loss": 1.5595, |
|
"step": 158500 |
|
}, |
|
{ |
|
"epoch": 11.170436981874385, |
|
"grad_norm": 0.969085693359375, |
|
"learning_rate": 5.6917738904149195e-05, |
|
"loss": 1.5606, |
|
"step": 159000 |
|
}, |
|
{ |
|
"epoch": 11.205564142194746, |
|
"grad_norm": 0.9821890592575073, |
|
"learning_rate": 5.6773167558189966e-05, |
|
"loss": 1.5607, |
|
"step": 159500 |
|
}, |
|
{ |
|
"epoch": 11.240691302515105, |
|
"grad_norm": 0.9560614824295044, |
|
"learning_rate": 5.6628596212230736e-05, |
|
"loss": 1.563, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 11.240691302515105, |
|
"eval_accuracy": 0.7090796716396742, |
|
"eval_loss": 1.3214398622512817, |
|
"eval_runtime": 147.4505, |
|
"eval_samples_per_second": 818.621, |
|
"eval_steps_per_second": 5.12, |
|
"step": 160000 |
|
}, |
|
{ |
|
"epoch": 11.275818462835465, |
|
"grad_norm": 0.9682839512825012, |
|
"learning_rate": 5.6484024866271514e-05, |
|
"loss": 1.558, |
|
"step": 160500 |
|
}, |
|
{ |
|
"epoch": 11.310945623155824, |
|
"grad_norm": 0.958651065826416, |
|
"learning_rate": 5.633945352031228e-05, |
|
"loss": 1.5618, |
|
"step": 161000 |
|
}, |
|
{ |
|
"epoch": 11.346072783476183, |
|
"grad_norm": 0.9705203771591187, |
|
"learning_rate": 5.619488217435305e-05, |
|
"loss": 1.5576, |
|
"step": 161500 |
|
}, |
|
{ |
|
"epoch": 11.381199943796544, |
|
"grad_norm": 0.9865082502365112, |
|
"learning_rate": 5.605031082839381e-05, |
|
"loss": 1.5583, |
|
"step": 162000 |
|
}, |
|
{ |
|
"epoch": 11.416327104116903, |
|
"grad_norm": 0.9645934700965881, |
|
"learning_rate": 5.5905739482434584e-05, |
|
"loss": 1.5551, |
|
"step": 162500 |
|
}, |
|
{ |
|
"epoch": 11.451454264437263, |
|
"grad_norm": 0.9389051795005798, |
|
"learning_rate": 5.576116813647535e-05, |
|
"loss": 1.5521, |
|
"step": 163000 |
|
}, |
|
{ |
|
"epoch": 11.486581424757622, |
|
"grad_norm": 0.9918972849845886, |
|
"learning_rate": 5.561659679051612e-05, |
|
"loss": 1.5551, |
|
"step": 163500 |
|
}, |
|
{ |
|
"epoch": 11.521708585077983, |
|
"grad_norm": 0.96946120262146, |
|
"learning_rate": 5.5472025444556896e-05, |
|
"loss": 1.5554, |
|
"step": 164000 |
|
}, |
|
{ |
|
"epoch": 11.556835745398342, |
|
"grad_norm": 0.9327898621559143, |
|
"learning_rate": 5.532745409859767e-05, |
|
"loss": 1.5536, |
|
"step": 164500 |
|
}, |
|
{ |
|
"epoch": 11.591962905718702, |
|
"grad_norm": 0.9520469307899475, |
|
"learning_rate": 5.518288275263843e-05, |
|
"loss": 1.554, |
|
"step": 165000 |
|
}, |
|
{ |
|
"epoch": 11.627090066039061, |
|
"grad_norm": 0.9468493461608887, |
|
"learning_rate": 5.50383114066792e-05, |
|
"loss": 1.5541, |
|
"step": 165500 |
|
}, |
|
{ |
|
"epoch": 11.662217226359422, |
|
"grad_norm": 0.9519227147102356, |
|
"learning_rate": 5.4893740060719965e-05, |
|
"loss": 1.551, |
|
"step": 166000 |
|
}, |
|
{ |
|
"epoch": 11.69734438667978, |
|
"grad_norm": 0.9576557278633118, |
|
"learning_rate": 5.4749168714760736e-05, |
|
"loss": 1.5519, |
|
"step": 166500 |
|
}, |
|
{ |
|
"epoch": 11.73247154700014, |
|
"grad_norm": 0.9346196055412292, |
|
"learning_rate": 5.46045973688015e-05, |
|
"loss": 1.549, |
|
"step": 167000 |
|
}, |
|
{ |
|
"epoch": 11.7675987073205, |
|
"grad_norm": 0.964485764503479, |
|
"learning_rate": 5.446002602284227e-05, |
|
"loss": 1.5494, |
|
"step": 167500 |
|
}, |
|
{ |
|
"epoch": 11.80272586764086, |
|
"grad_norm": 0.9761068820953369, |
|
"learning_rate": 5.431545467688305e-05, |
|
"loss": 1.551, |
|
"step": 168000 |
|
}, |
|
{ |
|
"epoch": 11.83785302796122, |
|
"grad_norm": 0.9633313417434692, |
|
"learning_rate": 5.417088333092382e-05, |
|
"loss": 1.5475, |
|
"step": 168500 |
|
}, |
|
{ |
|
"epoch": 11.872980188281579, |
|
"grad_norm": 0.9491254091262817, |
|
"learning_rate": 5.402631198496458e-05, |
|
"loss": 1.5468, |
|
"step": 169000 |
|
}, |
|
{ |
|
"epoch": 11.90810734860194, |
|
"grad_norm": 0.9572640061378479, |
|
"learning_rate": 5.3881740639005354e-05, |
|
"loss": 1.5463, |
|
"step": 169500 |
|
}, |
|
{ |
|
"epoch": 11.943234508922298, |
|
"grad_norm": 0.9447980523109436, |
|
"learning_rate": 5.373716929304612e-05, |
|
"loss": 1.5473, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 11.943234508922298, |
|
"eval_accuracy": 0.7113065905004013, |
|
"eval_loss": 1.308741807937622, |
|
"eval_runtime": 147.0083, |
|
"eval_samples_per_second": 821.083, |
|
"eval_steps_per_second": 5.136, |
|
"step": 170000 |
|
}, |
|
{ |
|
"epoch": 11.978361669242659, |
|
"grad_norm": 0.9711387753486633, |
|
"learning_rate": 5.359259794708689e-05, |
|
"loss": 1.5469, |
|
"step": 170500 |
|
}, |
|
{ |
|
"epoch": 12.013488829563018, |
|
"grad_norm": 0.9664705395698547, |
|
"learning_rate": 5.344802660112765e-05, |
|
"loss": 1.547, |
|
"step": 171000 |
|
}, |
|
{ |
|
"epoch": 12.048615989883379, |
|
"grad_norm": 0.9575341939926147, |
|
"learning_rate": 5.330345525516843e-05, |
|
"loss": 1.5439, |
|
"step": 171500 |
|
}, |
|
{ |
|
"epoch": 12.083743150203738, |
|
"grad_norm": 0.9776381850242615, |
|
"learning_rate": 5.31588839092092e-05, |
|
"loss": 1.5416, |
|
"step": 172000 |
|
}, |
|
{ |
|
"epoch": 12.118870310524096, |
|
"grad_norm": 0.9849351048469543, |
|
"learning_rate": 5.3014312563249965e-05, |
|
"loss": 1.544, |
|
"step": 172500 |
|
}, |
|
{ |
|
"epoch": 12.153997470844457, |
|
"grad_norm": 1.0078338384628296, |
|
"learning_rate": 5.2869741217290736e-05, |
|
"loss": 1.5463, |
|
"step": 173000 |
|
}, |
|
{ |
|
"epoch": 12.189124631164816, |
|
"grad_norm": 0.9730892777442932, |
|
"learning_rate": 5.272516987133151e-05, |
|
"loss": 1.5424, |
|
"step": 173500 |
|
}, |
|
{ |
|
"epoch": 12.224251791485177, |
|
"grad_norm": 1.0085258483886719, |
|
"learning_rate": 5.258059852537227e-05, |
|
"loss": 1.5384, |
|
"step": 174000 |
|
}, |
|
{ |
|
"epoch": 12.259378951805536, |
|
"grad_norm": 0.9556441307067871, |
|
"learning_rate": 5.243602717941304e-05, |
|
"loss": 1.5427, |
|
"step": 174500 |
|
}, |
|
{ |
|
"epoch": 12.294506112125896, |
|
"grad_norm": 0.9524454474449158, |
|
"learning_rate": 5.229145583345382e-05, |
|
"loss": 1.5373, |
|
"step": 175000 |
|
}, |
|
{ |
|
"epoch": 12.329633272446255, |
|
"grad_norm": 0.9806123375892639, |
|
"learning_rate": 5.214688448749458e-05, |
|
"loss": 1.5425, |
|
"step": 175500 |
|
}, |
|
{ |
|
"epoch": 12.364760432766616, |
|
"grad_norm": 0.9411491751670837, |
|
"learning_rate": 5.2002313141535354e-05, |
|
"loss": 1.5394, |
|
"step": 176000 |
|
}, |
|
{ |
|
"epoch": 12.399887593086975, |
|
"grad_norm": 1.0032098293304443, |
|
"learning_rate": 5.185774179557612e-05, |
|
"loss": 1.5388, |
|
"step": 176500 |
|
}, |
|
{ |
|
"epoch": 12.435014753407334, |
|
"grad_norm": 0.9663287401199341, |
|
"learning_rate": 5.171317044961689e-05, |
|
"loss": 1.5389, |
|
"step": 177000 |
|
}, |
|
{ |
|
"epoch": 12.470141913727694, |
|
"grad_norm": 0.9497302770614624, |
|
"learning_rate": 5.156859910365765e-05, |
|
"loss": 1.537, |
|
"step": 177500 |
|
}, |
|
{ |
|
"epoch": 12.505269074048053, |
|
"grad_norm": 0.9728620648384094, |
|
"learning_rate": 5.1424027757698423e-05, |
|
"loss": 1.5397, |
|
"step": 178000 |
|
}, |
|
{ |
|
"epoch": 12.540396234368414, |
|
"grad_norm": 0.9789900779724121, |
|
"learning_rate": 5.127945641173919e-05, |
|
"loss": 1.5405, |
|
"step": 178500 |
|
}, |
|
{ |
|
"epoch": 12.575523394688773, |
|
"grad_norm": 0.9774718284606934, |
|
"learning_rate": 5.113488506577997e-05, |
|
"loss": 1.5368, |
|
"step": 179000 |
|
}, |
|
{ |
|
"epoch": 12.610650555009133, |
|
"grad_norm": 0.9627470970153809, |
|
"learning_rate": 5.0990313719820736e-05, |
|
"loss": 1.5365, |
|
"step": 179500 |
|
}, |
|
{ |
|
"epoch": 12.645777715329492, |
|
"grad_norm": 0.9805966019630432, |
|
"learning_rate": 5.0845742373861507e-05, |
|
"loss": 1.5364, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 12.645777715329492, |
|
"eval_accuracy": 0.7135348973126568, |
|
"eval_loss": 1.2944310903549194, |
|
"eval_runtime": 147.0212, |
|
"eval_samples_per_second": 821.011, |
|
"eval_steps_per_second": 5.135, |
|
"step": 180000 |
|
}, |
|
{ |
|
"epoch": 12.680904875649853, |
|
"grad_norm": 1.0078229904174805, |
|
"learning_rate": 5.070117102790227e-05, |
|
"loss": 1.5344, |
|
"step": 180500 |
|
}, |
|
{ |
|
"epoch": 12.716032035970212, |
|
"grad_norm": 1.0073761940002441, |
|
"learning_rate": 5.055659968194304e-05, |
|
"loss": 1.5357, |
|
"step": 181000 |
|
}, |
|
{ |
|
"epoch": 12.751159196290573, |
|
"grad_norm": 0.9848378300666809, |
|
"learning_rate": 5.0412028335983805e-05, |
|
"loss": 1.5352, |
|
"step": 181500 |
|
}, |
|
{ |
|
"epoch": 12.786286356610931, |
|
"grad_norm": 0.9649278521537781, |
|
"learning_rate": 5.0267456990024576e-05, |
|
"loss": 1.5356, |
|
"step": 182000 |
|
}, |
|
{ |
|
"epoch": 12.82141351693129, |
|
"grad_norm": 0.9573400616645813, |
|
"learning_rate": 5.0122885644065354e-05, |
|
"loss": 1.5363, |
|
"step": 182500 |
|
}, |
|
{ |
|
"epoch": 12.856540677251651, |
|
"grad_norm": 0.9978417754173279, |
|
"learning_rate": 4.997831429810612e-05, |
|
"loss": 1.5338, |
|
"step": 183000 |
|
}, |
|
{ |
|
"epoch": 12.89166783757201, |
|
"grad_norm": 0.99289870262146, |
|
"learning_rate": 4.983374295214689e-05, |
|
"loss": 1.5328, |
|
"step": 183500 |
|
}, |
|
{ |
|
"epoch": 12.92679499789237, |
|
"grad_norm": 0.974976122379303, |
|
"learning_rate": 4.968917160618766e-05, |
|
"loss": 1.5298, |
|
"step": 184000 |
|
}, |
|
{ |
|
"epoch": 12.96192215821273, |
|
"grad_norm": 0.9644068479537964, |
|
"learning_rate": 4.954460026022842e-05, |
|
"loss": 1.5341, |
|
"step": 184500 |
|
}, |
|
{ |
|
"epoch": 12.99704931853309, |
|
"grad_norm": 0.9787358641624451, |
|
"learning_rate": 4.9400028914269194e-05, |
|
"loss": 1.532, |
|
"step": 185000 |
|
}, |
|
{ |
|
"epoch": 13.032176478853449, |
|
"grad_norm": 0.9854114651679993, |
|
"learning_rate": 4.9255457568309965e-05, |
|
"loss": 1.5277, |
|
"step": 185500 |
|
}, |
|
{ |
|
"epoch": 13.06730363917381, |
|
"grad_norm": 0.9944697618484497, |
|
"learning_rate": 4.9110886222350736e-05, |
|
"loss": 1.5284, |
|
"step": 186000 |
|
}, |
|
{ |
|
"epoch": 13.102430799494169, |
|
"grad_norm": 0.9880611300468445, |
|
"learning_rate": 4.89663148763915e-05, |
|
"loss": 1.5284, |
|
"step": 186500 |
|
}, |
|
{ |
|
"epoch": 13.13755795981453, |
|
"grad_norm": 0.9830440282821655, |
|
"learning_rate": 4.882174353043227e-05, |
|
"loss": 1.5251, |
|
"step": 187000 |
|
}, |
|
{ |
|
"epoch": 13.172685120134888, |
|
"grad_norm": 0.992473304271698, |
|
"learning_rate": 4.867717218447304e-05, |
|
"loss": 1.5256, |
|
"step": 187500 |
|
}, |
|
{ |
|
"epoch": 13.207812280455247, |
|
"grad_norm": 0.948597252368927, |
|
"learning_rate": 4.853260083851381e-05, |
|
"loss": 1.5271, |
|
"step": 188000 |
|
}, |
|
{ |
|
"epoch": 13.242939440775608, |
|
"grad_norm": 0.9829763174057007, |
|
"learning_rate": 4.8388029492554576e-05, |
|
"loss": 1.5259, |
|
"step": 188500 |
|
}, |
|
{ |
|
"epoch": 13.278066601095967, |
|
"grad_norm": 0.9461808800697327, |
|
"learning_rate": 4.824345814659535e-05, |
|
"loss": 1.525, |
|
"step": 189000 |
|
}, |
|
{ |
|
"epoch": 13.313193761416327, |
|
"grad_norm": 0.9874758720397949, |
|
"learning_rate": 4.809888680063612e-05, |
|
"loss": 1.5286, |
|
"step": 189500 |
|
}, |
|
{ |
|
"epoch": 13.348320921736686, |
|
"grad_norm": 0.9801004528999329, |
|
"learning_rate": 4.795431545467688e-05, |
|
"loss": 1.5257, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 13.348320921736686, |
|
"eval_accuracy": 0.7146395658479376, |
|
"eval_loss": 1.2904942035675049, |
|
"eval_runtime": 148.7015, |
|
"eval_samples_per_second": 811.734, |
|
"eval_steps_per_second": 5.077, |
|
"step": 190000 |
|
}, |
|
{ |
|
"epoch": 13.383448082057047, |
|
"grad_norm": 1.0048809051513672, |
|
"learning_rate": 4.780974410871765e-05, |
|
"loss": 1.5222, |
|
"step": 190500 |
|
}, |
|
{ |
|
"epoch": 13.418575242377406, |
|
"grad_norm": 0.9752638339996338, |
|
"learning_rate": 4.766517276275842e-05, |
|
"loss": 1.5242, |
|
"step": 191000 |
|
}, |
|
{ |
|
"epoch": 13.453702402697767, |
|
"grad_norm": 0.9492470622062683, |
|
"learning_rate": 4.7520601416799194e-05, |
|
"loss": 1.5219, |
|
"step": 191500 |
|
}, |
|
{ |
|
"epoch": 13.488829563018125, |
|
"grad_norm": 0.973146915435791, |
|
"learning_rate": 4.737603007083996e-05, |
|
"loss": 1.5234, |
|
"step": 192000 |
|
}, |
|
{ |
|
"epoch": 13.523956723338486, |
|
"grad_norm": 0.9545627236366272, |
|
"learning_rate": 4.723145872488073e-05, |
|
"loss": 1.5252, |
|
"step": 192500 |
|
}, |
|
{ |
|
"epoch": 13.559083883658845, |
|
"grad_norm": 0.9910150170326233, |
|
"learning_rate": 4.70868873789215e-05, |
|
"loss": 1.5229, |
|
"step": 193000 |
|
}, |
|
{ |
|
"epoch": 13.594211043979204, |
|
"grad_norm": 1.0169930458068848, |
|
"learning_rate": 4.694231603296227e-05, |
|
"loss": 1.5233, |
|
"step": 193500 |
|
}, |
|
{ |
|
"epoch": 13.629338204299565, |
|
"grad_norm": 1.0121934413909912, |
|
"learning_rate": 4.6797744687003034e-05, |
|
"loss": 1.5201, |
|
"step": 194000 |
|
}, |
|
{ |
|
"epoch": 13.664465364619923, |
|
"grad_norm": 0.9987900853157043, |
|
"learning_rate": 4.665317334104381e-05, |
|
"loss": 1.5187, |
|
"step": 194500 |
|
}, |
|
{ |
|
"epoch": 13.699592524940284, |
|
"grad_norm": 1.0241477489471436, |
|
"learning_rate": 4.6508601995084576e-05, |
|
"loss": 1.5206, |
|
"step": 195000 |
|
}, |
|
{ |
|
"epoch": 13.734719685260643, |
|
"grad_norm": 0.9895103573799133, |
|
"learning_rate": 4.6364030649125346e-05, |
|
"loss": 1.5201, |
|
"step": 195500 |
|
}, |
|
{ |
|
"epoch": 13.769846845581004, |
|
"grad_norm": 0.9821479916572571, |
|
"learning_rate": 4.621945930316611e-05, |
|
"loss": 1.5201, |
|
"step": 196000 |
|
}, |
|
{ |
|
"epoch": 13.804974005901363, |
|
"grad_norm": 1.005090594291687, |
|
"learning_rate": 4.607488795720689e-05, |
|
"loss": 1.5182, |
|
"step": 196500 |
|
}, |
|
{ |
|
"epoch": 13.840101166221723, |
|
"grad_norm": 0.9969967603683472, |
|
"learning_rate": 4.593031661124765e-05, |
|
"loss": 1.5195, |
|
"step": 197000 |
|
}, |
|
{ |
|
"epoch": 13.875228326542082, |
|
"grad_norm": 0.9817072153091431, |
|
"learning_rate": 4.578574526528842e-05, |
|
"loss": 1.5175, |
|
"step": 197500 |
|
}, |
|
{ |
|
"epoch": 13.910355486862443, |
|
"grad_norm": 0.9905675053596497, |
|
"learning_rate": 4.564117391932919e-05, |
|
"loss": 1.5182, |
|
"step": 198000 |
|
}, |
|
{ |
|
"epoch": 13.945482647182802, |
|
"grad_norm": 0.9947062134742737, |
|
"learning_rate": 4.5496602573369964e-05, |
|
"loss": 1.5162, |
|
"step": 198500 |
|
}, |
|
{ |
|
"epoch": 13.98060980750316, |
|
"grad_norm": 0.97999107837677, |
|
"learning_rate": 4.535203122741073e-05, |
|
"loss": 1.5216, |
|
"step": 199000 |
|
}, |
|
{ |
|
"epoch": 14.015736967823521, |
|
"grad_norm": 0.9755523204803467, |
|
"learning_rate": 4.52074598814515e-05, |
|
"loss": 1.5161, |
|
"step": 199500 |
|
}, |
|
{ |
|
"epoch": 14.05086412814388, |
|
"grad_norm": 0.9752248525619507, |
|
"learning_rate": 4.506288853549227e-05, |
|
"loss": 1.5164, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 14.05086412814388, |
|
"eval_accuracy": 0.716148456990617, |
|
"eval_loss": 1.2788549661636353, |
|
"eval_runtime": 145.5914, |
|
"eval_samples_per_second": 829.074, |
|
"eval_steps_per_second": 5.186, |
|
"step": 200000 |
|
}, |
|
{ |
|
"epoch": 14.08599128846424, |
|
"grad_norm": 0.984459638595581, |
|
"learning_rate": 4.491831718953304e-05, |
|
"loss": 1.5128, |
|
"step": 200500 |
|
}, |
|
{ |
|
"epoch": 14.1211184487846, |
|
"grad_norm": 0.9917986392974854, |
|
"learning_rate": 4.4773745843573805e-05, |
|
"loss": 1.5142, |
|
"step": 201000 |
|
}, |
|
{ |
|
"epoch": 14.15624560910496, |
|
"grad_norm": 0.9588636159896851, |
|
"learning_rate": 4.4629174497614575e-05, |
|
"loss": 1.5122, |
|
"step": 201500 |
|
}, |
|
{ |
|
"epoch": 14.19137276942532, |
|
"grad_norm": 0.9958001971244812, |
|
"learning_rate": 4.4484603151655346e-05, |
|
"loss": 1.5105, |
|
"step": 202000 |
|
}, |
|
{ |
|
"epoch": 14.22649992974568, |
|
"grad_norm": 1.012403130531311, |
|
"learning_rate": 4.434003180569612e-05, |
|
"loss": 1.5129, |
|
"step": 202500 |
|
}, |
|
{ |
|
"epoch": 14.261627090066039, |
|
"grad_norm": 1.0305534601211548, |
|
"learning_rate": 4.419546045973688e-05, |
|
"loss": 1.5133, |
|
"step": 203000 |
|
}, |
|
{ |
|
"epoch": 14.2967542503864, |
|
"grad_norm": 0.9992120265960693, |
|
"learning_rate": 4.405088911377765e-05, |
|
"loss": 1.5114, |
|
"step": 203500 |
|
}, |
|
{ |
|
"epoch": 14.331881410706758, |
|
"grad_norm": 1.008920669555664, |
|
"learning_rate": 4.390631776781842e-05, |
|
"loss": 1.5139, |
|
"step": 204000 |
|
}, |
|
{ |
|
"epoch": 14.367008571027117, |
|
"grad_norm": 1.0051392316818237, |
|
"learning_rate": 4.3761746421859187e-05, |
|
"loss": 1.5112, |
|
"step": 204500 |
|
}, |
|
{ |
|
"epoch": 14.402135731347478, |
|
"grad_norm": 0.9850152730941772, |
|
"learning_rate": 4.361717507589996e-05, |
|
"loss": 1.5111, |
|
"step": 205000 |
|
}, |
|
{ |
|
"epoch": 14.437262891667837, |
|
"grad_norm": 0.9894864559173584, |
|
"learning_rate": 4.347260372994073e-05, |
|
"loss": 1.5119, |
|
"step": 205500 |
|
}, |
|
{ |
|
"epoch": 14.472390051988198, |
|
"grad_norm": 1.0255316495895386, |
|
"learning_rate": 4.33280323839815e-05, |
|
"loss": 1.51, |
|
"step": 206000 |
|
}, |
|
{ |
|
"epoch": 14.507517212308557, |
|
"grad_norm": 1.044765830039978, |
|
"learning_rate": 4.318346103802226e-05, |
|
"loss": 1.5082, |
|
"step": 206500 |
|
}, |
|
{ |
|
"epoch": 14.542644372628917, |
|
"grad_norm": 1.0084095001220703, |
|
"learning_rate": 4.3038889692063034e-05, |
|
"loss": 1.5073, |
|
"step": 207000 |
|
}, |
|
{ |
|
"epoch": 14.577771532949276, |
|
"grad_norm": 0.9992097616195679, |
|
"learning_rate": 4.2894318346103804e-05, |
|
"loss": 1.5085, |
|
"step": 207500 |
|
}, |
|
{ |
|
"epoch": 14.612898693269637, |
|
"grad_norm": 1.016776204109192, |
|
"learning_rate": 4.2749747000144575e-05, |
|
"loss": 1.5092, |
|
"step": 208000 |
|
}, |
|
{ |
|
"epoch": 14.648025853589996, |
|
"grad_norm": 1.0093616247177124, |
|
"learning_rate": 4.260517565418534e-05, |
|
"loss": 1.5074, |
|
"step": 208500 |
|
}, |
|
{ |
|
"epoch": 14.683153013910356, |
|
"grad_norm": 1.0291818380355835, |
|
"learning_rate": 4.246060430822611e-05, |
|
"loss": 1.5079, |
|
"step": 209000 |
|
}, |
|
{ |
|
"epoch": 14.718280174230715, |
|
"grad_norm": 0.9756091833114624, |
|
"learning_rate": 4.231603296226688e-05, |
|
"loss": 1.5071, |
|
"step": 209500 |
|
}, |
|
{ |
|
"epoch": 14.753407334551074, |
|
"grad_norm": 1.0051275491714478, |
|
"learning_rate": 4.217146161630765e-05, |
|
"loss": 1.5071, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 14.753407334551074, |
|
"eval_accuracy": 0.7175581891473035, |
|
"eval_loss": 1.2701668739318848, |
|
"eval_runtime": 145.5115, |
|
"eval_samples_per_second": 829.529, |
|
"eval_steps_per_second": 5.189, |
|
"step": 210000 |
|
}, |
|
{ |
|
"epoch": 14.788534494871435, |
|
"grad_norm": 1.011240005493164, |
|
"learning_rate": 4.2026890270348416e-05, |
|
"loss": 1.5067, |
|
"step": 210500 |
|
}, |
|
{ |
|
"epoch": 14.823661655191794, |
|
"grad_norm": 1.0316245555877686, |
|
"learning_rate": 4.1882318924389186e-05, |
|
"loss": 1.5059, |
|
"step": 211000 |
|
}, |
|
{ |
|
"epoch": 14.858788815512154, |
|
"grad_norm": 0.9921131730079651, |
|
"learning_rate": 4.173774757842996e-05, |
|
"loss": 1.5065, |
|
"step": 211500 |
|
}, |
|
{ |
|
"epoch": 14.893915975832513, |
|
"grad_norm": 1.0233694314956665, |
|
"learning_rate": 4.159317623247073e-05, |
|
"loss": 1.5039, |
|
"step": 212000 |
|
}, |
|
{ |
|
"epoch": 14.929043136152874, |
|
"grad_norm": 1.0409730672836304, |
|
"learning_rate": 4.144860488651149e-05, |
|
"loss": 1.507, |
|
"step": 212500 |
|
}, |
|
{ |
|
"epoch": 14.964170296473233, |
|
"grad_norm": 1.0045446157455444, |
|
"learning_rate": 4.130403354055227e-05, |
|
"loss": 1.5061, |
|
"step": 213000 |
|
}, |
|
{ |
|
"epoch": 14.999297456793593, |
|
"grad_norm": 0.9876782298088074, |
|
"learning_rate": 4.1159462194593033e-05, |
|
"loss": 1.5067, |
|
"step": 213500 |
|
}, |
|
{ |
|
"epoch": 15.034424617113952, |
|
"grad_norm": 1.0133213996887207, |
|
"learning_rate": 4.1014890848633804e-05, |
|
"loss": 1.5002, |
|
"step": 214000 |
|
}, |
|
{ |
|
"epoch": 15.069551777434313, |
|
"grad_norm": 0.9735503792762756, |
|
"learning_rate": 4.087031950267457e-05, |
|
"loss": 1.5016, |
|
"step": 214500 |
|
}, |
|
{ |
|
"epoch": 15.104678937754672, |
|
"grad_norm": 0.985035240650177, |
|
"learning_rate": 4.0725748156715346e-05, |
|
"loss": 1.5021, |
|
"step": 215000 |
|
}, |
|
{ |
|
"epoch": 15.13980609807503, |
|
"grad_norm": 1.0019500255584717, |
|
"learning_rate": 4.058117681075611e-05, |
|
"loss": 1.4975, |
|
"step": 215500 |
|
}, |
|
{ |
|
"epoch": 15.174933258395392, |
|
"grad_norm": 1.0004901885986328, |
|
"learning_rate": 4.043660546479688e-05, |
|
"loss": 1.5017, |
|
"step": 216000 |
|
}, |
|
{ |
|
"epoch": 15.21006041871575, |
|
"grad_norm": 0.9935667514801025, |
|
"learning_rate": 4.0292034118837645e-05, |
|
"loss": 1.5015, |
|
"step": 216500 |
|
}, |
|
{ |
|
"epoch": 15.245187579036111, |
|
"grad_norm": 1.0111830234527588, |
|
"learning_rate": 4.014746277287842e-05, |
|
"loss": 1.5004, |
|
"step": 217000 |
|
}, |
|
{ |
|
"epoch": 15.28031473935647, |
|
"grad_norm": 1.0003619194030762, |
|
"learning_rate": 4.0002891426919186e-05, |
|
"loss": 1.4991, |
|
"step": 217500 |
|
}, |
|
{ |
|
"epoch": 15.31544189967683, |
|
"grad_norm": 1.0029890537261963, |
|
"learning_rate": 3.985832008095996e-05, |
|
"loss": 1.4992, |
|
"step": 218000 |
|
}, |
|
{ |
|
"epoch": 15.35056905999719, |
|
"grad_norm": 1.0191558599472046, |
|
"learning_rate": 3.971374873500072e-05, |
|
"loss": 1.5008, |
|
"step": 218500 |
|
}, |
|
{ |
|
"epoch": 15.38569622031755, |
|
"grad_norm": 1.01618492603302, |
|
"learning_rate": 3.95691773890415e-05, |
|
"loss": 1.4989, |
|
"step": 219000 |
|
}, |
|
{ |
|
"epoch": 15.42082338063791, |
|
"grad_norm": 1.020931601524353, |
|
"learning_rate": 3.942460604308226e-05, |
|
"loss": 1.4985, |
|
"step": 219500 |
|
}, |
|
{ |
|
"epoch": 15.45595054095827, |
|
"grad_norm": 1.0255357027053833, |
|
"learning_rate": 3.928003469712303e-05, |
|
"loss": 1.4972, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 15.45595054095827, |
|
"eval_accuracy": 0.7193424504315581, |
|
"eval_loss": 1.2618342638015747, |
|
"eval_runtime": 145.885, |
|
"eval_samples_per_second": 827.405, |
|
"eval_steps_per_second": 5.175, |
|
"step": 220000 |
|
}, |
|
{ |
|
"epoch": 15.491077701278629, |
|
"grad_norm": 1.0047690868377686, |
|
"learning_rate": 3.9135463351163804e-05, |
|
"loss": 1.4989, |
|
"step": 220500 |
|
}, |
|
{ |
|
"epoch": 15.526204861598988, |
|
"grad_norm": 1.040840983390808, |
|
"learning_rate": 3.899089200520457e-05, |
|
"loss": 1.5001, |
|
"step": 221000 |
|
}, |
|
{ |
|
"epoch": 15.561332021919348, |
|
"grad_norm": 0.9770060777664185, |
|
"learning_rate": 3.884632065924534e-05, |
|
"loss": 1.4976, |
|
"step": 221500 |
|
}, |
|
{ |
|
"epoch": 15.596459182239707, |
|
"grad_norm": 0.9793146848678589, |
|
"learning_rate": 3.870174931328611e-05, |
|
"loss": 1.4986, |
|
"step": 222000 |
|
}, |
|
{ |
|
"epoch": 15.631586342560068, |
|
"grad_norm": 0.9713142514228821, |
|
"learning_rate": 3.855717796732688e-05, |
|
"loss": 1.4947, |
|
"step": 222500 |
|
}, |
|
{ |
|
"epoch": 15.666713502880427, |
|
"grad_norm": 1.0131899118423462, |
|
"learning_rate": 3.8412606621367644e-05, |
|
"loss": 1.4965, |
|
"step": 223000 |
|
}, |
|
{ |
|
"epoch": 15.701840663200787, |
|
"grad_norm": 1.0238277912139893, |
|
"learning_rate": 3.8268035275408415e-05, |
|
"loss": 1.4961, |
|
"step": 223500 |
|
}, |
|
{ |
|
"epoch": 15.736967823521146, |
|
"grad_norm": 1.0393719673156738, |
|
"learning_rate": 3.812346392944918e-05, |
|
"loss": 1.4944, |
|
"step": 224000 |
|
}, |
|
{ |
|
"epoch": 15.772094983841507, |
|
"grad_norm": 0.993442952632904, |
|
"learning_rate": 3.797889258348996e-05, |
|
"loss": 1.4932, |
|
"step": 224500 |
|
}, |
|
{ |
|
"epoch": 15.807222144161866, |
|
"grad_norm": 1.010707974433899, |
|
"learning_rate": 3.783432123753072e-05, |
|
"loss": 1.4919, |
|
"step": 225000 |
|
}, |
|
{ |
|
"epoch": 15.842349304482227, |
|
"grad_norm": 1.0285263061523438, |
|
"learning_rate": 3.768974989157149e-05, |
|
"loss": 1.4922, |
|
"step": 225500 |
|
}, |
|
{ |
|
"epoch": 15.877476464802585, |
|
"grad_norm": 0.9992517232894897, |
|
"learning_rate": 3.754517854561226e-05, |
|
"loss": 1.496, |
|
"step": 226000 |
|
}, |
|
{ |
|
"epoch": 15.912603625122944, |
|
"grad_norm": 1.0487091541290283, |
|
"learning_rate": 3.740060719965303e-05, |
|
"loss": 1.496, |
|
"step": 226500 |
|
}, |
|
{ |
|
"epoch": 15.947730785443305, |
|
"grad_norm": 0.9960684776306152, |
|
"learning_rate": 3.72560358536938e-05, |
|
"loss": 1.4916, |
|
"step": 227000 |
|
}, |
|
{ |
|
"epoch": 15.982857945763664, |
|
"grad_norm": 1.0461766719818115, |
|
"learning_rate": 3.711146450773457e-05, |
|
"loss": 1.4945, |
|
"step": 227500 |
|
}, |
|
{ |
|
"epoch": 16.017985106084023, |
|
"grad_norm": 1.058475375175476, |
|
"learning_rate": 3.696689316177534e-05, |
|
"loss": 1.4899, |
|
"step": 228000 |
|
}, |
|
{ |
|
"epoch": 16.053112266404383, |
|
"grad_norm": 1.0232276916503906, |
|
"learning_rate": 3.682232181581611e-05, |
|
"loss": 1.4884, |
|
"step": 228500 |
|
}, |
|
{ |
|
"epoch": 16.088239426724744, |
|
"grad_norm": 1.0691484212875366, |
|
"learning_rate": 3.667775046985687e-05, |
|
"loss": 1.4886, |
|
"step": 229000 |
|
}, |
|
{ |
|
"epoch": 16.123366587045105, |
|
"grad_norm": 1.003165602684021, |
|
"learning_rate": 3.6533179123897644e-05, |
|
"loss": 1.4894, |
|
"step": 229500 |
|
}, |
|
{ |
|
"epoch": 16.158493747365462, |
|
"grad_norm": 1.010514736175537, |
|
"learning_rate": 3.6388607777938415e-05, |
|
"loss": 1.4915, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 16.158493747365462, |
|
"eval_accuracy": 0.7201344726261764, |
|
"eval_loss": 1.2572591304779053, |
|
"eval_runtime": 146.9469, |
|
"eval_samples_per_second": 821.426, |
|
"eval_steps_per_second": 5.138, |
|
"step": 230000 |
|
}, |
|
{ |
|
"epoch": 16.193620907685823, |
|
"grad_norm": 1.0277786254882812, |
|
"learning_rate": 3.6244036431979186e-05, |
|
"loss": 1.4867, |
|
"step": 230500 |
|
}, |
|
{ |
|
"epoch": 16.228748068006183, |
|
"grad_norm": 1.0123547315597534, |
|
"learning_rate": 3.609946508601995e-05, |
|
"loss": 1.4901, |
|
"step": 231000 |
|
}, |
|
{ |
|
"epoch": 16.26387522832654, |
|
"grad_norm": 1.0431201457977295, |
|
"learning_rate": 3.595489374006073e-05, |
|
"loss": 1.485, |
|
"step": 231500 |
|
}, |
|
{ |
|
"epoch": 16.2990023886469, |
|
"grad_norm": 0.9797715544700623, |
|
"learning_rate": 3.581032239410149e-05, |
|
"loss": 1.4898, |
|
"step": 232000 |
|
}, |
|
{ |
|
"epoch": 16.33412954896726, |
|
"grad_norm": 1.0554850101470947, |
|
"learning_rate": 3.566575104814226e-05, |
|
"loss": 1.4867, |
|
"step": 232500 |
|
}, |
|
{ |
|
"epoch": 16.369256709287622, |
|
"grad_norm": 1.0041229724884033, |
|
"learning_rate": 3.5521179702183026e-05, |
|
"loss": 1.4876, |
|
"step": 233000 |
|
}, |
|
{ |
|
"epoch": 16.40438386960798, |
|
"grad_norm": 1.004367470741272, |
|
"learning_rate": 3.5376608356223804e-05, |
|
"loss": 1.4866, |
|
"step": 233500 |
|
}, |
|
{ |
|
"epoch": 16.43951102992834, |
|
"grad_norm": 0.9991381764411926, |
|
"learning_rate": 3.523203701026457e-05, |
|
"loss": 1.4866, |
|
"step": 234000 |
|
}, |
|
{ |
|
"epoch": 16.4746381902487, |
|
"grad_norm": 1.0062518119812012, |
|
"learning_rate": 3.508746566430534e-05, |
|
"loss": 1.4869, |
|
"step": 234500 |
|
}, |
|
{ |
|
"epoch": 16.50976535056906, |
|
"grad_norm": 1.0116221904754639, |
|
"learning_rate": 3.49428943183461e-05, |
|
"loss": 1.4867, |
|
"step": 235000 |
|
}, |
|
{ |
|
"epoch": 16.54489251088942, |
|
"grad_norm": 1.0057295560836792, |
|
"learning_rate": 3.479832297238687e-05, |
|
"loss": 1.4836, |
|
"step": 235500 |
|
}, |
|
{ |
|
"epoch": 16.58001967120978, |
|
"grad_norm": 0.9946209788322449, |
|
"learning_rate": 3.4653751626427644e-05, |
|
"loss": 1.4871, |
|
"step": 236000 |
|
}, |
|
{ |
|
"epoch": 16.61514683153014, |
|
"grad_norm": 1.052956223487854, |
|
"learning_rate": 3.4509180280468415e-05, |
|
"loss": 1.4836, |
|
"step": 236500 |
|
}, |
|
{ |
|
"epoch": 16.650273991850497, |
|
"grad_norm": 1.0504807233810425, |
|
"learning_rate": 3.436460893450918e-05, |
|
"loss": 1.4898, |
|
"step": 237000 |
|
}, |
|
{ |
|
"epoch": 16.685401152170858, |
|
"grad_norm": 1.0513533353805542, |
|
"learning_rate": 3.422003758854995e-05, |
|
"loss": 1.4857, |
|
"step": 237500 |
|
}, |
|
{ |
|
"epoch": 16.72052831249122, |
|
"grad_norm": 1.0254963636398315, |
|
"learning_rate": 3.407546624259072e-05, |
|
"loss": 1.484, |
|
"step": 238000 |
|
}, |
|
{ |
|
"epoch": 16.75565547281158, |
|
"grad_norm": 1.0302786827087402, |
|
"learning_rate": 3.393089489663149e-05, |
|
"loss": 1.4862, |
|
"step": 238500 |
|
}, |
|
{ |
|
"epoch": 16.790782633131936, |
|
"grad_norm": 1.0228885412216187, |
|
"learning_rate": 3.378632355067226e-05, |
|
"loss": 1.4836, |
|
"step": 239000 |
|
}, |
|
{ |
|
"epoch": 16.825909793452297, |
|
"grad_norm": 1.018209457397461, |
|
"learning_rate": 3.3641752204713026e-05, |
|
"loss": 1.4827, |
|
"step": 239500 |
|
}, |
|
{ |
|
"epoch": 16.861036953772658, |
|
"grad_norm": 0.989748477935791, |
|
"learning_rate": 3.3497180858753797e-05, |
|
"loss": 1.4824, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 16.861036953772658, |
|
"eval_accuracy": 0.7210892050293545, |
|
"eval_loss": 1.2514859437942505, |
|
"eval_runtime": 145.8765, |
|
"eval_samples_per_second": 827.453, |
|
"eval_steps_per_second": 5.176, |
|
"step": 240000 |
|
}, |
|
{ |
|
"epoch": 16.89616411409302, |
|
"grad_norm": 1.0294135808944702, |
|
"learning_rate": 3.335260951279456e-05, |
|
"loss": 1.4826, |
|
"step": 240500 |
|
}, |
|
{ |
|
"epoch": 16.931291274413375, |
|
"grad_norm": 1.06317138671875, |
|
"learning_rate": 3.320803816683534e-05, |
|
"loss": 1.4851, |
|
"step": 241000 |
|
}, |
|
{ |
|
"epoch": 16.966418434733736, |
|
"grad_norm": 1.072068452835083, |
|
"learning_rate": 3.30634668208761e-05, |
|
"loss": 1.4808, |
|
"step": 241500 |
|
}, |
|
{ |
|
"epoch": 17.001545595054097, |
|
"grad_norm": 1.0590496063232422, |
|
"learning_rate": 3.291889547491687e-05, |
|
"loss": 1.4849, |
|
"step": 242000 |
|
}, |
|
{ |
|
"epoch": 17.036672755374454, |
|
"grad_norm": 1.0390037298202515, |
|
"learning_rate": 3.277432412895764e-05, |
|
"loss": 1.4826, |
|
"step": 242500 |
|
}, |
|
{ |
|
"epoch": 17.071799915694815, |
|
"grad_norm": 1.0264649391174316, |
|
"learning_rate": 3.2629752782998414e-05, |
|
"loss": 1.4784, |
|
"step": 243000 |
|
}, |
|
{ |
|
"epoch": 17.106927076015175, |
|
"grad_norm": 1.0615930557250977, |
|
"learning_rate": 3.248518143703918e-05, |
|
"loss": 1.4799, |
|
"step": 243500 |
|
}, |
|
{ |
|
"epoch": 17.142054236335536, |
|
"grad_norm": 1.0263277292251587, |
|
"learning_rate": 3.234061009107995e-05, |
|
"loss": 1.4773, |
|
"step": 244000 |
|
}, |
|
{ |
|
"epoch": 17.177181396655893, |
|
"grad_norm": 1.016964077949524, |
|
"learning_rate": 3.219603874512072e-05, |
|
"loss": 1.4776, |
|
"step": 244500 |
|
}, |
|
{ |
|
"epoch": 17.212308556976254, |
|
"grad_norm": 1.0330182313919067, |
|
"learning_rate": 3.205146739916149e-05, |
|
"loss": 1.48, |
|
"step": 245000 |
|
}, |
|
{ |
|
"epoch": 17.247435717296614, |
|
"grad_norm": 1.0279370546340942, |
|
"learning_rate": 3.1906896053202255e-05, |
|
"loss": 1.4767, |
|
"step": 245500 |
|
}, |
|
{ |
|
"epoch": 17.282562877616975, |
|
"grad_norm": 1.053545594215393, |
|
"learning_rate": 3.1762324707243026e-05, |
|
"loss": 1.4807, |
|
"step": 246000 |
|
}, |
|
{ |
|
"epoch": 17.317690037937332, |
|
"grad_norm": 1.0184204578399658, |
|
"learning_rate": 3.1617753361283796e-05, |
|
"loss": 1.4776, |
|
"step": 246500 |
|
}, |
|
{ |
|
"epoch": 17.352817198257693, |
|
"grad_norm": 1.0180692672729492, |
|
"learning_rate": 3.147318201532457e-05, |
|
"loss": 1.4765, |
|
"step": 247000 |
|
}, |
|
{ |
|
"epoch": 17.387944358578054, |
|
"grad_norm": 1.03151535987854, |
|
"learning_rate": 3.132861066936533e-05, |
|
"loss": 1.4784, |
|
"step": 247500 |
|
}, |
|
{ |
|
"epoch": 17.42307151889841, |
|
"grad_norm": 1.0360081195831299, |
|
"learning_rate": 3.11840393234061e-05, |
|
"loss": 1.4725, |
|
"step": 248000 |
|
}, |
|
{ |
|
"epoch": 17.45819867921877, |
|
"grad_norm": 1.0606461763381958, |
|
"learning_rate": 3.103946797744687e-05, |
|
"loss": 1.4753, |
|
"step": 248500 |
|
}, |
|
{ |
|
"epoch": 17.493325839539132, |
|
"grad_norm": 1.0544891357421875, |
|
"learning_rate": 3.0894896631487643e-05, |
|
"loss": 1.474, |
|
"step": 249000 |
|
}, |
|
{ |
|
"epoch": 17.528452999859493, |
|
"grad_norm": 1.0830744504928589, |
|
"learning_rate": 3.075032528552841e-05, |
|
"loss": 1.4728, |
|
"step": 249500 |
|
}, |
|
{ |
|
"epoch": 17.56358016017985, |
|
"grad_norm": 1.0082223415374756, |
|
"learning_rate": 3.060575393956918e-05, |
|
"loss": 1.4748, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 17.56358016017985, |
|
"eval_accuracy": 0.7222705826647003, |
|
"eval_loss": 1.245027780532837, |
|
"eval_runtime": 145.2394, |
|
"eval_samples_per_second": 831.083, |
|
"eval_steps_per_second": 5.198, |
|
"step": 250000 |
|
}, |
|
{ |
|
"epoch": 17.59870732050021, |
|
"grad_norm": 1.0504813194274902, |
|
"learning_rate": 3.046118259360995e-05, |
|
"loss": 1.4783, |
|
"step": 250500 |
|
}, |
|
{ |
|
"epoch": 17.63383448082057, |
|
"grad_norm": 1.0163605213165283, |
|
"learning_rate": 3.0316611247650716e-05, |
|
"loss": 1.4719, |
|
"step": 251000 |
|
}, |
|
{ |
|
"epoch": 17.668961641140932, |
|
"grad_norm": 1.0471776723861694, |
|
"learning_rate": 3.0172039901691484e-05, |
|
"loss": 1.4745, |
|
"step": 251500 |
|
}, |
|
{ |
|
"epoch": 17.70408880146129, |
|
"grad_norm": 1.050743818283081, |
|
"learning_rate": 3.0027468555732258e-05, |
|
"loss": 1.4754, |
|
"step": 252000 |
|
}, |
|
{ |
|
"epoch": 17.73921596178165, |
|
"grad_norm": 1.0446287393569946, |
|
"learning_rate": 2.9882897209773025e-05, |
|
"loss": 1.4742, |
|
"step": 252500 |
|
}, |
|
{ |
|
"epoch": 17.77434312210201, |
|
"grad_norm": 1.0279436111450195, |
|
"learning_rate": 2.9738325863813793e-05, |
|
"loss": 1.4728, |
|
"step": 253000 |
|
}, |
|
{ |
|
"epoch": 17.809470282422367, |
|
"grad_norm": 1.0320532321929932, |
|
"learning_rate": 2.959375451785456e-05, |
|
"loss": 1.4751, |
|
"step": 253500 |
|
}, |
|
{ |
|
"epoch": 17.844597442742728, |
|
"grad_norm": 1.0268456935882568, |
|
"learning_rate": 2.9449183171895334e-05, |
|
"loss": 1.472, |
|
"step": 254000 |
|
}, |
|
{ |
|
"epoch": 17.87972460306309, |
|
"grad_norm": 1.0411797761917114, |
|
"learning_rate": 2.93046118259361e-05, |
|
"loss": 1.4757, |
|
"step": 254500 |
|
}, |
|
{ |
|
"epoch": 17.91485176338345, |
|
"grad_norm": 1.0576931238174438, |
|
"learning_rate": 2.916004047997687e-05, |
|
"loss": 1.4738, |
|
"step": 255000 |
|
}, |
|
{ |
|
"epoch": 17.949978923703807, |
|
"grad_norm": 1.0698353052139282, |
|
"learning_rate": 2.9015469134017636e-05, |
|
"loss": 1.4749, |
|
"step": 255500 |
|
}, |
|
{ |
|
"epoch": 17.985106084024167, |
|
"grad_norm": 1.0517598390579224, |
|
"learning_rate": 2.887089778805841e-05, |
|
"loss": 1.4719, |
|
"step": 256000 |
|
}, |
|
{ |
|
"epoch": 18.020233244344528, |
|
"grad_norm": 1.030639886856079, |
|
"learning_rate": 2.8726326442099178e-05, |
|
"loss": 1.4722, |
|
"step": 256500 |
|
}, |
|
{ |
|
"epoch": 18.05536040466489, |
|
"grad_norm": 1.038255214691162, |
|
"learning_rate": 2.8581755096139945e-05, |
|
"loss": 1.4713, |
|
"step": 257000 |
|
}, |
|
{ |
|
"epoch": 18.090487564985246, |
|
"grad_norm": 1.0348975658416748, |
|
"learning_rate": 2.843718375018072e-05, |
|
"loss": 1.4678, |
|
"step": 257500 |
|
}, |
|
{ |
|
"epoch": 18.125614725305606, |
|
"grad_norm": 1.0541919469833374, |
|
"learning_rate": 2.8292612404221487e-05, |
|
"loss": 1.4711, |
|
"step": 258000 |
|
}, |
|
{ |
|
"epoch": 18.160741885625967, |
|
"grad_norm": 1.0226538181304932, |
|
"learning_rate": 2.8148041058262254e-05, |
|
"loss": 1.4693, |
|
"step": 258500 |
|
}, |
|
{ |
|
"epoch": 18.195869045946324, |
|
"grad_norm": 1.0200276374816895, |
|
"learning_rate": 2.800346971230302e-05, |
|
"loss": 1.4705, |
|
"step": 259000 |
|
}, |
|
{ |
|
"epoch": 18.230996206266685, |
|
"grad_norm": 1.0429800748825073, |
|
"learning_rate": 2.7858898366343792e-05, |
|
"loss": 1.4704, |
|
"step": 259500 |
|
}, |
|
{ |
|
"epoch": 18.266123366587045, |
|
"grad_norm": 1.0760138034820557, |
|
"learning_rate": 2.7714327020384563e-05, |
|
"loss": 1.4686, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 18.266123366587045, |
|
"eval_accuracy": 0.7233516160401716, |
|
"eval_loss": 1.238907814025879, |
|
"eval_runtime": 142.7876, |
|
"eval_samples_per_second": 845.353, |
|
"eval_steps_per_second": 5.288, |
|
"step": 260000 |
|
}, |
|
{ |
|
"epoch": 18.301250526907406, |
|
"grad_norm": 1.0434221029281616, |
|
"learning_rate": 2.756975567442533e-05, |
|
"loss": 1.468, |
|
"step": 260500 |
|
}, |
|
{ |
|
"epoch": 18.336377687227763, |
|
"grad_norm": 1.0621415376663208, |
|
"learning_rate": 2.7425184328466098e-05, |
|
"loss": 1.4665, |
|
"step": 261000 |
|
}, |
|
{ |
|
"epoch": 18.371504847548124, |
|
"grad_norm": 1.0416877269744873, |
|
"learning_rate": 2.728061298250687e-05, |
|
"loss": 1.4703, |
|
"step": 261500 |
|
}, |
|
{ |
|
"epoch": 18.406632007868485, |
|
"grad_norm": 1.0364477634429932, |
|
"learning_rate": 2.7136041636547636e-05, |
|
"loss": 1.4679, |
|
"step": 262000 |
|
}, |
|
{ |
|
"epoch": 18.441759168188845, |
|
"grad_norm": 1.0509207248687744, |
|
"learning_rate": 2.6991470290588407e-05, |
|
"loss": 1.4645, |
|
"step": 262500 |
|
}, |
|
{ |
|
"epoch": 18.476886328509202, |
|
"grad_norm": 1.0550258159637451, |
|
"learning_rate": 2.6846898944629178e-05, |
|
"loss": 1.465, |
|
"step": 263000 |
|
}, |
|
{ |
|
"epoch": 18.512013488829563, |
|
"grad_norm": 1.045780897140503, |
|
"learning_rate": 2.6702327598669945e-05, |
|
"loss": 1.4645, |
|
"step": 263500 |
|
}, |
|
{ |
|
"epoch": 18.547140649149924, |
|
"grad_norm": 1.033547043800354, |
|
"learning_rate": 2.6557756252710713e-05, |
|
"loss": 1.464, |
|
"step": 264000 |
|
}, |
|
{ |
|
"epoch": 18.58226780947028, |
|
"grad_norm": 1.0473381280899048, |
|
"learning_rate": 2.641318490675148e-05, |
|
"loss": 1.4647, |
|
"step": 264500 |
|
}, |
|
{ |
|
"epoch": 18.61739496979064, |
|
"grad_norm": 1.0651229619979858, |
|
"learning_rate": 2.6268613560792254e-05, |
|
"loss": 1.4683, |
|
"step": 265000 |
|
}, |
|
{ |
|
"epoch": 18.652522130111002, |
|
"grad_norm": 1.0227559804916382, |
|
"learning_rate": 2.612404221483302e-05, |
|
"loss": 1.4653, |
|
"step": 265500 |
|
}, |
|
{ |
|
"epoch": 18.687649290431363, |
|
"grad_norm": 1.0518014430999756, |
|
"learning_rate": 2.597947086887379e-05, |
|
"loss": 1.4634, |
|
"step": 266000 |
|
}, |
|
{ |
|
"epoch": 18.72277645075172, |
|
"grad_norm": 1.0625571012496948, |
|
"learning_rate": 2.5834899522914556e-05, |
|
"loss": 1.4663, |
|
"step": 266500 |
|
}, |
|
{ |
|
"epoch": 18.75790361107208, |
|
"grad_norm": 1.0320320129394531, |
|
"learning_rate": 2.569032817695533e-05, |
|
"loss": 1.4658, |
|
"step": 267000 |
|
}, |
|
{ |
|
"epoch": 18.79303077139244, |
|
"grad_norm": 1.0507465600967407, |
|
"learning_rate": 2.5545756830996098e-05, |
|
"loss": 1.4667, |
|
"step": 267500 |
|
}, |
|
{ |
|
"epoch": 18.828157931712802, |
|
"grad_norm": 1.0624668598175049, |
|
"learning_rate": 2.5401185485036865e-05, |
|
"loss": 1.465, |
|
"step": 268000 |
|
}, |
|
{ |
|
"epoch": 18.86328509203316, |
|
"grad_norm": 1.1198619604110718, |
|
"learning_rate": 2.5256614139077633e-05, |
|
"loss": 1.4639, |
|
"step": 268500 |
|
}, |
|
{ |
|
"epoch": 18.89841225235352, |
|
"grad_norm": 1.0421173572540283, |
|
"learning_rate": 2.5112042793118407e-05, |
|
"loss": 1.4641, |
|
"step": 269000 |
|
}, |
|
{ |
|
"epoch": 18.93353941267388, |
|
"grad_norm": 1.0857394933700562, |
|
"learning_rate": 2.4967471447159174e-05, |
|
"loss": 1.4633, |
|
"step": 269500 |
|
}, |
|
{ |
|
"epoch": 18.968666572994238, |
|
"grad_norm": 1.0285334587097168, |
|
"learning_rate": 2.4822900101199945e-05, |
|
"loss": 1.4649, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 18.968666572994238, |
|
"eval_accuracy": 0.7242654194082216, |
|
"eval_loss": 1.2332816123962402, |
|
"eval_runtime": 141.3716, |
|
"eval_samples_per_second": 853.821, |
|
"eval_steps_per_second": 5.341, |
|
"step": 270000 |
|
}, |
|
{ |
|
"epoch": 19.0037937333146, |
|
"grad_norm": 1.0661959648132324, |
|
"learning_rate": 2.4678328755240712e-05, |
|
"loss": 1.464, |
|
"step": 270500 |
|
}, |
|
{ |
|
"epoch": 19.03892089363496, |
|
"grad_norm": 1.0679501295089722, |
|
"learning_rate": 2.4533757409281483e-05, |
|
"loss": 1.4604, |
|
"step": 271000 |
|
}, |
|
{ |
|
"epoch": 19.07404805395532, |
|
"grad_norm": 1.0748372077941895, |
|
"learning_rate": 2.438918606332225e-05, |
|
"loss": 1.4611, |
|
"step": 271500 |
|
}, |
|
{ |
|
"epoch": 19.109175214275677, |
|
"grad_norm": 1.0354524850845337, |
|
"learning_rate": 2.424461471736302e-05, |
|
"loss": 1.4604, |
|
"step": 272000 |
|
}, |
|
{ |
|
"epoch": 19.144302374596037, |
|
"grad_norm": 1.0497136116027832, |
|
"learning_rate": 2.410004337140379e-05, |
|
"loss": 1.4616, |
|
"step": 272500 |
|
}, |
|
{ |
|
"epoch": 19.179429534916398, |
|
"grad_norm": 1.0558326244354248, |
|
"learning_rate": 2.395547202544456e-05, |
|
"loss": 1.4611, |
|
"step": 273000 |
|
}, |
|
{ |
|
"epoch": 19.21455669523676, |
|
"grad_norm": 1.0470489263534546, |
|
"learning_rate": 2.3810900679485327e-05, |
|
"loss": 1.4575, |
|
"step": 273500 |
|
}, |
|
{ |
|
"epoch": 19.249683855557116, |
|
"grad_norm": 1.0182609558105469, |
|
"learning_rate": 2.3666329333526098e-05, |
|
"loss": 1.4594, |
|
"step": 274000 |
|
}, |
|
{ |
|
"epoch": 19.284811015877477, |
|
"grad_norm": 1.0594130754470825, |
|
"learning_rate": 2.3521757987566865e-05, |
|
"loss": 1.4582, |
|
"step": 274500 |
|
}, |
|
{ |
|
"epoch": 19.319938176197837, |
|
"grad_norm": 1.0967395305633545, |
|
"learning_rate": 2.3377186641607636e-05, |
|
"loss": 1.4607, |
|
"step": 275000 |
|
}, |
|
{ |
|
"epoch": 19.355065336518194, |
|
"grad_norm": 1.0475187301635742, |
|
"learning_rate": 2.3232615295648403e-05, |
|
"loss": 1.4617, |
|
"step": 275500 |
|
}, |
|
{ |
|
"epoch": 19.390192496838555, |
|
"grad_norm": 1.0457967519760132, |
|
"learning_rate": 2.3088043949689174e-05, |
|
"loss": 1.4569, |
|
"step": 276000 |
|
}, |
|
{ |
|
"epoch": 19.425319657158916, |
|
"grad_norm": 1.0595502853393555, |
|
"learning_rate": 2.2943472603729945e-05, |
|
"loss": 1.4609, |
|
"step": 276500 |
|
}, |
|
{ |
|
"epoch": 19.460446817479276, |
|
"grad_norm": 1.0646470785140991, |
|
"learning_rate": 2.2798901257770712e-05, |
|
"loss": 1.4572, |
|
"step": 277000 |
|
}, |
|
{ |
|
"epoch": 19.495573977799634, |
|
"grad_norm": 1.0434619188308716, |
|
"learning_rate": 2.265432991181148e-05, |
|
"loss": 1.4597, |
|
"step": 277500 |
|
}, |
|
{ |
|
"epoch": 19.530701138119994, |
|
"grad_norm": 1.039019227027893, |
|
"learning_rate": 2.250975856585225e-05, |
|
"loss": 1.4576, |
|
"step": 278000 |
|
}, |
|
{ |
|
"epoch": 19.565828298440355, |
|
"grad_norm": 1.0945369005203247, |
|
"learning_rate": 2.2365187219893018e-05, |
|
"loss": 1.459, |
|
"step": 278500 |
|
}, |
|
{ |
|
"epoch": 19.600955458760716, |
|
"grad_norm": 1.0497277975082397, |
|
"learning_rate": 2.2220615873933785e-05, |
|
"loss": 1.4555, |
|
"step": 279000 |
|
}, |
|
{ |
|
"epoch": 19.636082619081073, |
|
"grad_norm": 1.0733200311660767, |
|
"learning_rate": 2.2076044527974556e-05, |
|
"loss": 1.4586, |
|
"step": 279500 |
|
}, |
|
{ |
|
"epoch": 19.671209779401433, |
|
"grad_norm": 1.0626968145370483, |
|
"learning_rate": 2.1931473182015323e-05, |
|
"loss": 1.4566, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 19.671209779401433, |
|
"eval_accuracy": 0.7253028435871818, |
|
"eval_loss": 1.2285022735595703, |
|
"eval_runtime": 141.7716, |
|
"eval_samples_per_second": 851.412, |
|
"eval_steps_per_second": 5.325, |
|
"step": 280000 |
|
}, |
|
{ |
|
"epoch": 19.706336939721794, |
|
"grad_norm": 1.0224528312683105, |
|
"learning_rate": 2.1786901836056094e-05, |
|
"loss": 1.4583, |
|
"step": 280500 |
|
}, |
|
{ |
|
"epoch": 19.74146410004215, |
|
"grad_norm": 1.0466312170028687, |
|
"learning_rate": 2.164233049009686e-05, |
|
"loss": 1.457, |
|
"step": 281000 |
|
}, |
|
{ |
|
"epoch": 19.776591260362512, |
|
"grad_norm": 1.0637938976287842, |
|
"learning_rate": 2.1497759144137632e-05, |
|
"loss": 1.4577, |
|
"step": 281500 |
|
}, |
|
{ |
|
"epoch": 19.811718420682872, |
|
"grad_norm": 1.0693141222000122, |
|
"learning_rate": 2.1353187798178403e-05, |
|
"loss": 1.4566, |
|
"step": 282000 |
|
}, |
|
{ |
|
"epoch": 19.846845581003233, |
|
"grad_norm": 1.0720393657684326, |
|
"learning_rate": 2.120861645221917e-05, |
|
"loss": 1.456, |
|
"step": 282500 |
|
}, |
|
{ |
|
"epoch": 19.88197274132359, |
|
"grad_norm": 1.071040153503418, |
|
"learning_rate": 2.106404510625994e-05, |
|
"loss": 1.4546, |
|
"step": 283000 |
|
}, |
|
{ |
|
"epoch": 19.91709990164395, |
|
"grad_norm": 1.0675835609436035, |
|
"learning_rate": 2.091947376030071e-05, |
|
"loss": 1.4582, |
|
"step": 283500 |
|
}, |
|
{ |
|
"epoch": 19.95222706196431, |
|
"grad_norm": 1.0603220462799072, |
|
"learning_rate": 2.077490241434148e-05, |
|
"loss": 1.4547, |
|
"step": 284000 |
|
}, |
|
{ |
|
"epoch": 19.987354222284672, |
|
"grad_norm": 1.0802963972091675, |
|
"learning_rate": 2.0630331068382247e-05, |
|
"loss": 1.4546, |
|
"step": 284500 |
|
}, |
|
{ |
|
"epoch": 20.02248138260503, |
|
"grad_norm": 1.0588197708129883, |
|
"learning_rate": 2.0485759722423017e-05, |
|
"loss": 1.4575, |
|
"step": 285000 |
|
}, |
|
{ |
|
"epoch": 20.05760854292539, |
|
"grad_norm": 1.0602234601974487, |
|
"learning_rate": 2.0341188376463785e-05, |
|
"loss": 1.4521, |
|
"step": 285500 |
|
}, |
|
{ |
|
"epoch": 20.09273570324575, |
|
"grad_norm": 1.1232300996780396, |
|
"learning_rate": 2.0196617030504556e-05, |
|
"loss": 1.4517, |
|
"step": 286000 |
|
}, |
|
{ |
|
"epoch": 20.127862863566108, |
|
"grad_norm": 1.0660282373428345, |
|
"learning_rate": 2.0052045684545323e-05, |
|
"loss": 1.4523, |
|
"step": 286500 |
|
}, |
|
{ |
|
"epoch": 20.16299002388647, |
|
"grad_norm": 1.0167709589004517, |
|
"learning_rate": 1.9907474338586094e-05, |
|
"loss": 1.4519, |
|
"step": 287000 |
|
}, |
|
{ |
|
"epoch": 20.19811718420683, |
|
"grad_norm": 1.047339677810669, |
|
"learning_rate": 1.976290299262686e-05, |
|
"loss": 1.4502, |
|
"step": 287500 |
|
}, |
|
{ |
|
"epoch": 20.23324434452719, |
|
"grad_norm": 1.0285905599594116, |
|
"learning_rate": 1.9618331646667632e-05, |
|
"loss": 1.4514, |
|
"step": 288000 |
|
}, |
|
{ |
|
"epoch": 20.268371504847547, |
|
"grad_norm": 1.05581533908844, |
|
"learning_rate": 1.9473760300708403e-05, |
|
"loss": 1.449, |
|
"step": 288500 |
|
}, |
|
{ |
|
"epoch": 20.303498665167908, |
|
"grad_norm": 1.0714962482452393, |
|
"learning_rate": 1.932918895474917e-05, |
|
"loss": 1.4521, |
|
"step": 289000 |
|
}, |
|
{ |
|
"epoch": 20.33862582548827, |
|
"grad_norm": 1.0655773878097534, |
|
"learning_rate": 1.918461760878994e-05, |
|
"loss": 1.4508, |
|
"step": 289500 |
|
}, |
|
{ |
|
"epoch": 20.37375298580863, |
|
"grad_norm": 1.0571376085281372, |
|
"learning_rate": 1.9040046262830708e-05, |
|
"loss": 1.4529, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 20.37375298580863, |
|
"eval_accuracy": 0.7261482132790039, |
|
"eval_loss": 1.223044753074646, |
|
"eval_runtime": 141.5948, |
|
"eval_samples_per_second": 852.475, |
|
"eval_steps_per_second": 5.332, |
|
"step": 290000 |
|
}, |
|
{ |
|
"epoch": 20.408880146128986, |
|
"grad_norm": 1.0565483570098877, |
|
"learning_rate": 1.889547491687148e-05, |
|
"loss": 1.4504, |
|
"step": 290500 |
|
}, |
|
{ |
|
"epoch": 20.444007306449347, |
|
"grad_norm": 1.0659668445587158, |
|
"learning_rate": 1.8750903570912246e-05, |
|
"loss": 1.4525, |
|
"step": 291000 |
|
}, |
|
{ |
|
"epoch": 20.479134466769708, |
|
"grad_norm": 1.122076153755188, |
|
"learning_rate": 1.8606332224953017e-05, |
|
"loss": 1.4485, |
|
"step": 291500 |
|
}, |
|
{ |
|
"epoch": 20.514261627090065, |
|
"grad_norm": 1.0631004571914673, |
|
"learning_rate": 1.8461760878993785e-05, |
|
"loss": 1.4513, |
|
"step": 292000 |
|
}, |
|
{ |
|
"epoch": 20.549388787410425, |
|
"grad_norm": 1.0630178451538086, |
|
"learning_rate": 1.8317189533034555e-05, |
|
"loss": 1.4486, |
|
"step": 292500 |
|
}, |
|
{ |
|
"epoch": 20.584515947730786, |
|
"grad_norm": 1.0705844163894653, |
|
"learning_rate": 1.8172618187075323e-05, |
|
"loss": 1.4503, |
|
"step": 293000 |
|
}, |
|
{ |
|
"epoch": 20.619643108051147, |
|
"grad_norm": 1.0725988149642944, |
|
"learning_rate": 1.8028046841116093e-05, |
|
"loss": 1.4515, |
|
"step": 293500 |
|
}, |
|
{ |
|
"epoch": 20.654770268371504, |
|
"grad_norm": 1.0657289028167725, |
|
"learning_rate": 1.788347549515686e-05, |
|
"loss": 1.4475, |
|
"step": 294000 |
|
}, |
|
{ |
|
"epoch": 20.689897428691864, |
|
"grad_norm": 1.062153697013855, |
|
"learning_rate": 1.7738904149197628e-05, |
|
"loss": 1.4503, |
|
"step": 294500 |
|
}, |
|
{ |
|
"epoch": 20.725024589012225, |
|
"grad_norm": 1.0483520030975342, |
|
"learning_rate": 1.75943328032384e-05, |
|
"loss": 1.4492, |
|
"step": 295000 |
|
}, |
|
{ |
|
"epoch": 20.760151749332586, |
|
"grad_norm": 1.07576322555542, |
|
"learning_rate": 1.7449761457279166e-05, |
|
"loss": 1.4514, |
|
"step": 295500 |
|
}, |
|
{ |
|
"epoch": 20.795278909652943, |
|
"grad_norm": 1.0874416828155518, |
|
"learning_rate": 1.7305190111319937e-05, |
|
"loss": 1.4463, |
|
"step": 296000 |
|
}, |
|
{ |
|
"epoch": 20.830406069973304, |
|
"grad_norm": 1.0873123407363892, |
|
"learning_rate": 1.7160618765360705e-05, |
|
"loss": 1.4483, |
|
"step": 296500 |
|
}, |
|
{ |
|
"epoch": 20.865533230293664, |
|
"grad_norm": 1.0750373601913452, |
|
"learning_rate": 1.7016047419401475e-05, |
|
"loss": 1.4461, |
|
"step": 297000 |
|
}, |
|
{ |
|
"epoch": 20.90066039061402, |
|
"grad_norm": 1.0832915306091309, |
|
"learning_rate": 1.6871476073442243e-05, |
|
"loss": 1.45, |
|
"step": 297500 |
|
}, |
|
{ |
|
"epoch": 20.935787550934382, |
|
"grad_norm": 1.0408779382705688, |
|
"learning_rate": 1.6726904727483014e-05, |
|
"loss": 1.4483, |
|
"step": 298000 |
|
}, |
|
{ |
|
"epoch": 20.970914711254743, |
|
"grad_norm": 1.0541013479232788, |
|
"learning_rate": 1.658233338152378e-05, |
|
"loss": 1.4492, |
|
"step": 298500 |
|
}, |
|
{ |
|
"epoch": 21.006041871575103, |
|
"grad_norm": 1.0725327730178833, |
|
"learning_rate": 1.6437762035564552e-05, |
|
"loss": 1.4475, |
|
"step": 299000 |
|
}, |
|
{ |
|
"epoch": 21.04116903189546, |
|
"grad_norm": 1.0820538997650146, |
|
"learning_rate": 1.629319068960532e-05, |
|
"loss": 1.4481, |
|
"step": 299500 |
|
}, |
|
{ |
|
"epoch": 21.07629619221582, |
|
"grad_norm": 1.0279037952423096, |
|
"learning_rate": 1.614861934364609e-05, |
|
"loss": 1.4451, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 21.07629619221582, |
|
"eval_accuracy": 0.7268696778575053, |
|
"eval_loss": 1.2188650369644165, |
|
"eval_runtime": 142.0191, |
|
"eval_samples_per_second": 849.928, |
|
"eval_steps_per_second": 5.316, |
|
"step": 300000 |
|
}, |
|
{ |
|
"epoch": 21.111423352536182, |
|
"grad_norm": 1.034382700920105, |
|
"learning_rate": 1.6004047997686857e-05, |
|
"loss": 1.4447, |
|
"step": 300500 |
|
}, |
|
{ |
|
"epoch": 21.14655051285654, |
|
"grad_norm": 1.0651057958602905, |
|
"learning_rate": 1.5859476651727628e-05, |
|
"loss": 1.4467, |
|
"step": 301000 |
|
}, |
|
{ |
|
"epoch": 21.1816776731769, |
|
"grad_norm": 1.086431622505188, |
|
"learning_rate": 1.57149053057684e-05, |
|
"loss": 1.4452, |
|
"step": 301500 |
|
}, |
|
{ |
|
"epoch": 21.21680483349726, |
|
"grad_norm": 1.0573840141296387, |
|
"learning_rate": 1.5570333959809166e-05, |
|
"loss": 1.4487, |
|
"step": 302000 |
|
}, |
|
{ |
|
"epoch": 21.25193199381762, |
|
"grad_norm": 1.0548052787780762, |
|
"learning_rate": 1.5425762613849937e-05, |
|
"loss": 1.4424, |
|
"step": 302500 |
|
}, |
|
{ |
|
"epoch": 21.287059154137978, |
|
"grad_norm": 1.0701725482940674, |
|
"learning_rate": 1.5281191267890704e-05, |
|
"loss": 1.4441, |
|
"step": 303000 |
|
}, |
|
{ |
|
"epoch": 21.32218631445834, |
|
"grad_norm": 1.0664572715759277, |
|
"learning_rate": 1.5136619921931475e-05, |
|
"loss": 1.4449, |
|
"step": 303500 |
|
}, |
|
{ |
|
"epoch": 21.3573134747787, |
|
"grad_norm": 1.0336108207702637, |
|
"learning_rate": 1.4992048575972243e-05, |
|
"loss": 1.4415, |
|
"step": 304000 |
|
}, |
|
{ |
|
"epoch": 21.39244063509906, |
|
"grad_norm": 1.098109245300293, |
|
"learning_rate": 1.4847477230013013e-05, |
|
"loss": 1.4453, |
|
"step": 304500 |
|
}, |
|
{ |
|
"epoch": 21.427567795419417, |
|
"grad_norm": 1.0706530809402466, |
|
"learning_rate": 1.470290588405378e-05, |
|
"loss": 1.4433, |
|
"step": 305000 |
|
}, |
|
{ |
|
"epoch": 21.462694955739778, |
|
"grad_norm": 1.086916208267212, |
|
"learning_rate": 1.4558334538094551e-05, |
|
"loss": 1.4441, |
|
"step": 305500 |
|
}, |
|
{ |
|
"epoch": 21.49782211606014, |
|
"grad_norm": 1.1005213260650635, |
|
"learning_rate": 1.4413763192135319e-05, |
|
"loss": 1.4429, |
|
"step": 306000 |
|
}, |
|
{ |
|
"epoch": 21.532949276380496, |
|
"grad_norm": 1.1273337602615356, |
|
"learning_rate": 1.4269191846176088e-05, |
|
"loss": 1.4429, |
|
"step": 306500 |
|
}, |
|
{ |
|
"epoch": 21.568076436700856, |
|
"grad_norm": 1.092872142791748, |
|
"learning_rate": 1.4124620500216857e-05, |
|
"loss": 1.4417, |
|
"step": 307000 |
|
}, |
|
{ |
|
"epoch": 21.603203597021217, |
|
"grad_norm": 1.0761197805404663, |
|
"learning_rate": 1.3980049154257626e-05, |
|
"loss": 1.4427, |
|
"step": 307500 |
|
}, |
|
{ |
|
"epoch": 21.638330757341578, |
|
"grad_norm": 1.1086857318878174, |
|
"learning_rate": 1.3835477808298397e-05, |
|
"loss": 1.447, |
|
"step": 308000 |
|
}, |
|
{ |
|
"epoch": 21.673457917661935, |
|
"grad_norm": 1.091773271560669, |
|
"learning_rate": 1.3690906462339164e-05, |
|
"loss": 1.4437, |
|
"step": 308500 |
|
}, |
|
{ |
|
"epoch": 21.708585077982296, |
|
"grad_norm": 1.0596261024475098, |
|
"learning_rate": 1.3546335116379935e-05, |
|
"loss": 1.4421, |
|
"step": 309000 |
|
}, |
|
{ |
|
"epoch": 21.743712238302656, |
|
"grad_norm": 1.0684653520584106, |
|
"learning_rate": 1.3401763770420702e-05, |
|
"loss": 1.4407, |
|
"step": 309500 |
|
}, |
|
{ |
|
"epoch": 21.778839398623017, |
|
"grad_norm": 1.0676227807998657, |
|
"learning_rate": 1.3257192424461473e-05, |
|
"loss": 1.443, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 21.778839398623017, |
|
"eval_accuracy": 0.7277766831400472, |
|
"eval_loss": 1.2135990858078003, |
|
"eval_runtime": 142.0609, |
|
"eval_samples_per_second": 849.678, |
|
"eval_steps_per_second": 5.315, |
|
"step": 310000 |
|
}, |
|
{ |
|
"epoch": 21.813966558943374, |
|
"grad_norm": 1.0764987468719482, |
|
"learning_rate": 1.311262107850224e-05, |
|
"loss": 1.4392, |
|
"step": 310500 |
|
}, |
|
{ |
|
"epoch": 21.849093719263735, |
|
"grad_norm": 1.075104832649231, |
|
"learning_rate": 1.2968049732543011e-05, |
|
"loss": 1.439, |
|
"step": 311000 |
|
}, |
|
{ |
|
"epoch": 21.884220879584095, |
|
"grad_norm": 1.0967333316802979, |
|
"learning_rate": 1.2823478386583779e-05, |
|
"loss": 1.4428, |
|
"step": 311500 |
|
}, |
|
{ |
|
"epoch": 21.919348039904452, |
|
"grad_norm": 1.0619230270385742, |
|
"learning_rate": 1.267890704062455e-05, |
|
"loss": 1.4394, |
|
"step": 312000 |
|
}, |
|
{ |
|
"epoch": 21.954475200224813, |
|
"grad_norm": 1.074622631072998, |
|
"learning_rate": 1.2534335694665317e-05, |
|
"loss": 1.4416, |
|
"step": 312500 |
|
}, |
|
{ |
|
"epoch": 21.989602360545174, |
|
"grad_norm": 1.1134517192840576, |
|
"learning_rate": 1.2389764348706088e-05, |
|
"loss": 1.4424, |
|
"step": 313000 |
|
}, |
|
{ |
|
"epoch": 22.024729520865534, |
|
"grad_norm": 1.0811127424240112, |
|
"learning_rate": 1.2245193002746857e-05, |
|
"loss": 1.4396, |
|
"step": 313500 |
|
}, |
|
{ |
|
"epoch": 22.05985668118589, |
|
"grad_norm": 1.0637929439544678, |
|
"learning_rate": 1.2100621656787626e-05, |
|
"loss": 1.4368, |
|
"step": 314000 |
|
}, |
|
{ |
|
"epoch": 22.094983841506252, |
|
"grad_norm": 1.0761417150497437, |
|
"learning_rate": 1.1956050310828395e-05, |
|
"loss": 1.4403, |
|
"step": 314500 |
|
}, |
|
{ |
|
"epoch": 22.130111001826613, |
|
"grad_norm": 1.099725604057312, |
|
"learning_rate": 1.1811478964869162e-05, |
|
"loss": 1.4378, |
|
"step": 315000 |
|
}, |
|
{ |
|
"epoch": 22.165238162146974, |
|
"grad_norm": 1.0871621370315552, |
|
"learning_rate": 1.1666907618909931e-05, |
|
"loss": 1.4362, |
|
"step": 315500 |
|
}, |
|
{ |
|
"epoch": 22.20036532246733, |
|
"grad_norm": 1.057110071182251, |
|
"learning_rate": 1.15223362729507e-05, |
|
"loss": 1.4365, |
|
"step": 316000 |
|
}, |
|
{ |
|
"epoch": 22.23549248278769, |
|
"grad_norm": 1.1010171175003052, |
|
"learning_rate": 1.137776492699147e-05, |
|
"loss": 1.4387, |
|
"step": 316500 |
|
}, |
|
{ |
|
"epoch": 22.270619643108052, |
|
"grad_norm": 1.058934211730957, |
|
"learning_rate": 1.123319358103224e-05, |
|
"loss": 1.4394, |
|
"step": 317000 |
|
}, |
|
{ |
|
"epoch": 22.30574680342841, |
|
"grad_norm": 1.0834625959396362, |
|
"learning_rate": 1.108862223507301e-05, |
|
"loss": 1.4376, |
|
"step": 317500 |
|
}, |
|
{ |
|
"epoch": 22.34087396374877, |
|
"grad_norm": 1.0423885583877563, |
|
"learning_rate": 1.0944050889113779e-05, |
|
"loss": 1.4372, |
|
"step": 318000 |
|
}, |
|
{ |
|
"epoch": 22.37600112406913, |
|
"grad_norm": 1.0966408252716064, |
|
"learning_rate": 1.0799479543154548e-05, |
|
"loss": 1.4394, |
|
"step": 318500 |
|
}, |
|
{ |
|
"epoch": 22.41112828438949, |
|
"grad_norm": 1.0695067644119263, |
|
"learning_rate": 1.0654908197195317e-05, |
|
"loss": 1.4362, |
|
"step": 319000 |
|
}, |
|
{ |
|
"epoch": 22.44625544470985, |
|
"grad_norm": 1.134260892868042, |
|
"learning_rate": 1.0510336851236086e-05, |
|
"loss": 1.435, |
|
"step": 319500 |
|
}, |
|
{ |
|
"epoch": 22.48138260503021, |
|
"grad_norm": 1.1000449657440186, |
|
"learning_rate": 1.0365765505276855e-05, |
|
"loss": 1.4357, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 22.48138260503021, |
|
"eval_accuracy": 0.7284053903950559, |
|
"eval_loss": 1.210018277168274, |
|
"eval_runtime": 142.872, |
|
"eval_samples_per_second": 844.854, |
|
"eval_steps_per_second": 5.284, |
|
"step": 320000 |
|
}, |
|
{ |
|
"epoch": 22.51650976535057, |
|
"grad_norm": 1.0945639610290527, |
|
"learning_rate": 1.0221194159317624e-05, |
|
"loss": 1.4366, |
|
"step": 320500 |
|
}, |
|
{ |
|
"epoch": 22.55163692567093, |
|
"grad_norm": 1.1207478046417236, |
|
"learning_rate": 1.0076622813358393e-05, |
|
"loss": 1.4399, |
|
"step": 321000 |
|
}, |
|
{ |
|
"epoch": 22.586764085991287, |
|
"grad_norm": 1.0734148025512695, |
|
"learning_rate": 9.932051467399162e-06, |
|
"loss": 1.4385, |
|
"step": 321500 |
|
}, |
|
{ |
|
"epoch": 22.621891246311648, |
|
"grad_norm": 1.0629650354385376, |
|
"learning_rate": 9.787480121439931e-06, |
|
"loss": 1.4352, |
|
"step": 322000 |
|
}, |
|
{ |
|
"epoch": 22.65701840663201, |
|
"grad_norm": 1.0973799228668213, |
|
"learning_rate": 9.6429087754807e-06, |
|
"loss": 1.4377, |
|
"step": 322500 |
|
}, |
|
{ |
|
"epoch": 22.692145566952366, |
|
"grad_norm": 1.0496876239776611, |
|
"learning_rate": 9.49833742952147e-06, |
|
"loss": 1.4372, |
|
"step": 323000 |
|
}, |
|
{ |
|
"epoch": 22.727272727272727, |
|
"grad_norm": 1.0563302040100098, |
|
"learning_rate": 9.353766083562238e-06, |
|
"loss": 1.4352, |
|
"step": 323500 |
|
}, |
|
{ |
|
"epoch": 22.762399887593087, |
|
"grad_norm": 1.1020653247833252, |
|
"learning_rate": 9.209194737603008e-06, |
|
"loss": 1.4331, |
|
"step": 324000 |
|
}, |
|
{ |
|
"epoch": 22.797527047913448, |
|
"grad_norm": 1.0854381322860718, |
|
"learning_rate": 9.064623391643777e-06, |
|
"loss": 1.4371, |
|
"step": 324500 |
|
}, |
|
{ |
|
"epoch": 22.832654208233805, |
|
"grad_norm": 1.0770697593688965, |
|
"learning_rate": 8.920052045684546e-06, |
|
"loss": 1.4361, |
|
"step": 325000 |
|
}, |
|
{ |
|
"epoch": 22.867781368554166, |
|
"grad_norm": 1.0986183881759644, |
|
"learning_rate": 8.775480699725315e-06, |
|
"loss": 1.4354, |
|
"step": 325500 |
|
}, |
|
{ |
|
"epoch": 22.902908528874526, |
|
"grad_norm": 1.1258665323257446, |
|
"learning_rate": 8.630909353766084e-06, |
|
"loss": 1.4376, |
|
"step": 326000 |
|
}, |
|
{ |
|
"epoch": 22.938035689194887, |
|
"grad_norm": 1.0938217639923096, |
|
"learning_rate": 8.486338007806853e-06, |
|
"loss": 1.4346, |
|
"step": 326500 |
|
}, |
|
{ |
|
"epoch": 22.973162849515244, |
|
"grad_norm": 1.1074005365371704, |
|
"learning_rate": 8.341766661847622e-06, |
|
"loss": 1.4306, |
|
"step": 327000 |
|
}, |
|
{ |
|
"epoch": 23.008290009835605, |
|
"grad_norm": 1.0893038511276245, |
|
"learning_rate": 8.197195315888391e-06, |
|
"loss": 1.4346, |
|
"step": 327500 |
|
}, |
|
{ |
|
"epoch": 23.043417170155966, |
|
"grad_norm": 1.1042568683624268, |
|
"learning_rate": 8.05262396992916e-06, |
|
"loss": 1.4335, |
|
"step": 328000 |
|
}, |
|
{ |
|
"epoch": 23.078544330476323, |
|
"grad_norm": 1.0980304479599, |
|
"learning_rate": 7.90805262396993e-06, |
|
"loss": 1.43, |
|
"step": 328500 |
|
}, |
|
{ |
|
"epoch": 23.113671490796683, |
|
"grad_norm": 1.0918561220169067, |
|
"learning_rate": 7.763481278010698e-06, |
|
"loss": 1.4333, |
|
"step": 329000 |
|
}, |
|
{ |
|
"epoch": 23.148798651117044, |
|
"grad_norm": 1.1151840686798096, |
|
"learning_rate": 7.618909932051468e-06, |
|
"loss": 1.4346, |
|
"step": 329500 |
|
}, |
|
{ |
|
"epoch": 23.183925811437405, |
|
"grad_norm": 1.0970309972763062, |
|
"learning_rate": 7.474338586092237e-06, |
|
"loss": 1.4327, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 23.183925811437405, |
|
"eval_accuracy": 0.7289820707456471, |
|
"eval_loss": 1.2067663669586182, |
|
"eval_runtime": 141.4523, |
|
"eval_samples_per_second": 853.333, |
|
"eval_steps_per_second": 5.337, |
|
"step": 330000 |
|
}, |
|
{ |
|
"epoch": 23.219052971757762, |
|
"grad_norm": 1.1229480504989624, |
|
"learning_rate": 7.3297672401330065e-06, |
|
"loss": 1.429, |
|
"step": 330500 |
|
}, |
|
{ |
|
"epoch": 23.254180132078123, |
|
"grad_norm": 1.103352665901184, |
|
"learning_rate": 7.1851958941737756e-06, |
|
"loss": 1.4332, |
|
"step": 331000 |
|
}, |
|
{ |
|
"epoch": 23.289307292398483, |
|
"grad_norm": 1.0836817026138306, |
|
"learning_rate": 7.040624548214545e-06, |
|
"loss": 1.4331, |
|
"step": 331500 |
|
}, |
|
{ |
|
"epoch": 23.324434452718844, |
|
"grad_norm": 1.07485830783844, |
|
"learning_rate": 6.896053202255314e-06, |
|
"loss": 1.4351, |
|
"step": 332000 |
|
}, |
|
{ |
|
"epoch": 23.3595616130392, |
|
"grad_norm": 1.0870476961135864, |
|
"learning_rate": 6.751481856296083e-06, |
|
"loss": 1.4328, |
|
"step": 332500 |
|
}, |
|
{ |
|
"epoch": 23.39468877335956, |
|
"grad_norm": 1.0797423124313354, |
|
"learning_rate": 6.606910510336851e-06, |
|
"loss": 1.4297, |
|
"step": 333000 |
|
}, |
|
{ |
|
"epoch": 23.429815933679922, |
|
"grad_norm": 1.072199821472168, |
|
"learning_rate": 6.46233916437762e-06, |
|
"loss": 1.431, |
|
"step": 333500 |
|
}, |
|
{ |
|
"epoch": 23.46494309400028, |
|
"grad_norm": 1.0913589000701904, |
|
"learning_rate": 6.317767818418389e-06, |
|
"loss": 1.4317, |
|
"step": 334000 |
|
}, |
|
{ |
|
"epoch": 23.50007025432064, |
|
"grad_norm": 1.1012929677963257, |
|
"learning_rate": 6.173196472459159e-06, |
|
"loss": 1.4333, |
|
"step": 334500 |
|
}, |
|
{ |
|
"epoch": 23.535197414641, |
|
"grad_norm": 1.083678960800171, |
|
"learning_rate": 6.028625126499928e-06, |
|
"loss": 1.4331, |
|
"step": 335000 |
|
}, |
|
{ |
|
"epoch": 23.57032457496136, |
|
"grad_norm": 1.1238749027252197, |
|
"learning_rate": 5.884053780540697e-06, |
|
"loss": 1.4321, |
|
"step": 335500 |
|
}, |
|
{ |
|
"epoch": 23.60545173528172, |
|
"grad_norm": 1.0786747932434082, |
|
"learning_rate": 5.739482434581466e-06, |
|
"loss": 1.4292, |
|
"step": 336000 |
|
}, |
|
{ |
|
"epoch": 23.64057889560208, |
|
"grad_norm": 1.1055099964141846, |
|
"learning_rate": 5.5949110886222355e-06, |
|
"loss": 1.4271, |
|
"step": 336500 |
|
}, |
|
{ |
|
"epoch": 23.67570605592244, |
|
"grad_norm": 1.1266461610794067, |
|
"learning_rate": 5.4503397426630046e-06, |
|
"loss": 1.4317, |
|
"step": 337000 |
|
}, |
|
{ |
|
"epoch": 23.7108332162428, |
|
"grad_norm": 1.0842081308364868, |
|
"learning_rate": 5.305768396703774e-06, |
|
"loss": 1.4254, |
|
"step": 337500 |
|
}, |
|
{ |
|
"epoch": 23.745960376563158, |
|
"grad_norm": 1.0907618999481201, |
|
"learning_rate": 5.161197050744543e-06, |
|
"loss": 1.4276, |
|
"step": 338000 |
|
}, |
|
{ |
|
"epoch": 23.78108753688352, |
|
"grad_norm": 1.0893789529800415, |
|
"learning_rate": 5.016625704785312e-06, |
|
"loss": 1.4274, |
|
"step": 338500 |
|
}, |
|
{ |
|
"epoch": 23.81621469720388, |
|
"grad_norm": 1.108290195465088, |
|
"learning_rate": 4.872054358826081e-06, |
|
"loss": 1.4303, |
|
"step": 339000 |
|
}, |
|
{ |
|
"epoch": 23.851341857524236, |
|
"grad_norm": 1.1075944900512695, |
|
"learning_rate": 4.72748301286685e-06, |
|
"loss": 1.43, |
|
"step": 339500 |
|
}, |
|
{ |
|
"epoch": 23.886469017844597, |
|
"grad_norm": 1.0970890522003174, |
|
"learning_rate": 4.582911666907619e-06, |
|
"loss": 1.4309, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 23.886469017844597, |
|
"eval_accuracy": 0.7294783881654981, |
|
"eval_loss": 1.2039755582809448, |
|
"eval_runtime": 142.3189, |
|
"eval_samples_per_second": 848.137, |
|
"eval_steps_per_second": 5.305, |
|
"step": 340000 |
|
}, |
|
{ |
|
"epoch": 23.921596178164958, |
|
"grad_norm": 1.102753758430481, |
|
"learning_rate": 4.438340320948388e-06, |
|
"loss": 1.4285, |
|
"step": 340500 |
|
}, |
|
{ |
|
"epoch": 23.956723338485318, |
|
"grad_norm": 1.0833710432052612, |
|
"learning_rate": 4.293768974989157e-06, |
|
"loss": 1.4296, |
|
"step": 341000 |
|
}, |
|
{ |
|
"epoch": 23.991850498805675, |
|
"grad_norm": 1.0727189779281616, |
|
"learning_rate": 4.149197629029926e-06, |
|
"loss": 1.4298, |
|
"step": 341500 |
|
}, |
|
{ |
|
"epoch": 24.026977659126036, |
|
"grad_norm": 1.0745258331298828, |
|
"learning_rate": 4.004626283070695e-06, |
|
"loss": 1.4306, |
|
"step": 342000 |
|
}, |
|
{ |
|
"epoch": 24.062104819446397, |
|
"grad_norm": 1.1169886589050293, |
|
"learning_rate": 3.8600549371114645e-06, |
|
"loss": 1.4281, |
|
"step": 342500 |
|
}, |
|
{ |
|
"epoch": 24.097231979766757, |
|
"grad_norm": 1.1066441535949707, |
|
"learning_rate": 3.7154835911522336e-06, |
|
"loss": 1.4271, |
|
"step": 343000 |
|
}, |
|
{ |
|
"epoch": 24.132359140087114, |
|
"grad_norm": 1.0809811353683472, |
|
"learning_rate": 3.5709122451930026e-06, |
|
"loss": 1.4288, |
|
"step": 343500 |
|
}, |
|
{ |
|
"epoch": 24.167486300407475, |
|
"grad_norm": 1.0997700691223145, |
|
"learning_rate": 3.426340899233772e-06, |
|
"loss": 1.4311, |
|
"step": 344000 |
|
}, |
|
{ |
|
"epoch": 24.202613460727836, |
|
"grad_norm": 1.0773580074310303, |
|
"learning_rate": 3.2817695532745412e-06, |
|
"loss": 1.4286, |
|
"step": 344500 |
|
}, |
|
{ |
|
"epoch": 24.237740621048193, |
|
"grad_norm": 1.1262527704238892, |
|
"learning_rate": 3.1371982073153103e-06, |
|
"loss": 1.4309, |
|
"step": 345000 |
|
}, |
|
{ |
|
"epoch": 24.272867781368554, |
|
"grad_norm": 1.123502254486084, |
|
"learning_rate": 2.9926268613560794e-06, |
|
"loss": 1.4282, |
|
"step": 345500 |
|
}, |
|
{ |
|
"epoch": 24.307994941688914, |
|
"grad_norm": 1.1002156734466553, |
|
"learning_rate": 2.8480555153968485e-06, |
|
"loss": 1.4275, |
|
"step": 346000 |
|
}, |
|
{ |
|
"epoch": 24.343122102009275, |
|
"grad_norm": 1.07537043094635, |
|
"learning_rate": 2.7034841694376176e-06, |
|
"loss": 1.4253, |
|
"step": 346500 |
|
}, |
|
{ |
|
"epoch": 24.378249262329632, |
|
"grad_norm": 1.1036957502365112, |
|
"learning_rate": 2.5589128234783866e-06, |
|
"loss": 1.4279, |
|
"step": 347000 |
|
}, |
|
{ |
|
"epoch": 24.413376422649993, |
|
"grad_norm": 1.1081724166870117, |
|
"learning_rate": 2.414341477519156e-06, |
|
"loss": 1.427, |
|
"step": 347500 |
|
}, |
|
{ |
|
"epoch": 24.448503582970353, |
|
"grad_norm": 1.0847169160842896, |
|
"learning_rate": 2.269770131559925e-06, |
|
"loss": 1.4282, |
|
"step": 348000 |
|
}, |
|
{ |
|
"epoch": 24.483630743290714, |
|
"grad_norm": 1.0844043493270874, |
|
"learning_rate": 2.125198785600694e-06, |
|
"loss": 1.4258, |
|
"step": 348500 |
|
}, |
|
{ |
|
"epoch": 24.51875790361107, |
|
"grad_norm": 1.0586968660354614, |
|
"learning_rate": 1.980627439641463e-06, |
|
"loss": 1.4246, |
|
"step": 349000 |
|
}, |
|
{ |
|
"epoch": 24.553885063931432, |
|
"grad_norm": 1.0943015813827515, |
|
"learning_rate": 1.8360560936822323e-06, |
|
"loss": 1.425, |
|
"step": 349500 |
|
}, |
|
{ |
|
"epoch": 24.589012224251793, |
|
"grad_norm": 1.080601453781128, |
|
"learning_rate": 1.6914847477230013e-06, |
|
"loss": 1.4281, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 24.589012224251793, |
|
"eval_accuracy": 0.7300085771105469, |
|
"eval_loss": 1.2004581689834595, |
|
"eval_runtime": 142.3807, |
|
"eval_samples_per_second": 847.769, |
|
"eval_steps_per_second": 5.303, |
|
"step": 350000 |
|
}, |
|
{ |
|
"epoch": 24.62413938457215, |
|
"grad_norm": 1.1096524000167847, |
|
"learning_rate": 1.5469134017637704e-06, |
|
"loss": 1.4259, |
|
"step": 350500 |
|
}, |
|
{ |
|
"epoch": 24.65926654489251, |
|
"grad_norm": 1.1088886260986328, |
|
"learning_rate": 1.4023420558045395e-06, |
|
"loss": 1.4247, |
|
"step": 351000 |
|
}, |
|
{ |
|
"epoch": 24.69439370521287, |
|
"grad_norm": 1.0978398323059082, |
|
"learning_rate": 1.2577707098453088e-06, |
|
"loss": 1.4257, |
|
"step": 351500 |
|
}, |
|
{ |
|
"epoch": 24.72952086553323, |
|
"grad_norm": 1.131113886833191, |
|
"learning_rate": 1.1131993638860779e-06, |
|
"loss": 1.4275, |
|
"step": 352000 |
|
}, |
|
{ |
|
"epoch": 24.76464802585359, |
|
"grad_norm": 1.1421183347702026, |
|
"learning_rate": 9.68628017926847e-07, |
|
"loss": 1.4258, |
|
"step": 352500 |
|
}, |
|
{ |
|
"epoch": 24.79977518617395, |
|
"grad_norm": 1.0884921550750732, |
|
"learning_rate": 8.240566719676161e-07, |
|
"loss": 1.4264, |
|
"step": 353000 |
|
}, |
|
{ |
|
"epoch": 24.83490234649431, |
|
"grad_norm": 1.0992377996444702, |
|
"learning_rate": 6.794853260083851e-07, |
|
"loss": 1.4257, |
|
"step": 353500 |
|
}, |
|
{ |
|
"epoch": 24.870029506814667, |
|
"grad_norm": 1.0836498737335205, |
|
"learning_rate": 5.349139800491543e-07, |
|
"loss": 1.4275, |
|
"step": 354000 |
|
}, |
|
{ |
|
"epoch": 24.905156667135028, |
|
"grad_norm": 1.1038559675216675, |
|
"learning_rate": 3.903426340899234e-07, |
|
"loss": 1.4262, |
|
"step": 354500 |
|
}, |
|
{ |
|
"epoch": 24.94028382745539, |
|
"grad_norm": 1.1141635179519653, |
|
"learning_rate": 2.457712881306925e-07, |
|
"loss": 1.4226, |
|
"step": 355000 |
|
}, |
|
{ |
|
"epoch": 24.97541098777575, |
|
"grad_norm": 1.0651347637176514, |
|
"learning_rate": 1.0119994217146162e-07, |
|
"loss": 1.4269, |
|
"step": 355500 |
|
}, |
|
{ |
|
"epoch": 25.0, |
|
"step": 355850, |
|
"total_flos": 7.547177208649421e+18, |
|
"train_loss": 1.849292092869351, |
|
"train_runtime": 122993.5623, |
|
"train_samples_per_second": 462.899, |
|
"train_steps_per_second": 2.893 |
|
} |
|
], |
|
"logging_steps": 500, |
|
"max_steps": 355850, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 25, |
|
"save_steps": 10000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 7.547177208649421e+18, |
|
"train_batch_size": 160, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|