ELECTRA-Marian-Model-on-DJANGO / trainer_state.json
AhmedSSoliman's picture
Upload 10 files
ff178c0
raw
history blame
12.5 kB
{
"best_metric": 0.27424004673957825,
"best_model_checkpoint": "./ELECTRA-marian-training1/checkpoint-20000",
"epoch": 10.0,
"global_step": 20000,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.25,
"learning_rate": 4.9900000000000005e-06,
"loss": 0.2687,
"step": 500
},
{
"epoch": 0.5,
"learning_rate": 9.990000000000001e-06,
"loss": 0.277,
"step": 1000
},
{
"epoch": 0.75,
"learning_rate": 9.737368421052633e-06,
"loss": 0.2836,
"step": 1500
},
{
"epoch": 1.0,
"learning_rate": 9.474210526315791e-06,
"loss": 0.2768,
"step": 2000
},
{
"epoch": 1.0,
"eval_BLEU": 57.12999682789548,
"eval_BLEU-Bigram-Precision": 60.86708499714775,
"eval_BLEU-Trigram-Precision": 52.9979629793641,
"eval_BLEU-Unigram-Precision": 69.55014693693015,
"eval_ROUGE-2": 67.63182650261197,
"eval_ROUGE-L": 84.79524146842938,
"eval_Sacre-Bigram-Precision": 59.36794582392776,
"eval_Sacre-Trigram-Precision": 52.62858227367622,
"eval_Sacre-Unigram-Precision": 67.98210576769452,
"eval_SacreBLEU": 56.24327158648058,
"eval_loss": 0.32656434178352356,
"eval_runtime": 631.6153,
"eval_samples_per_second": 1.583,
"eval_steps_per_second": 0.396,
"step": 2000
},
{
"epoch": 1.25,
"learning_rate": 9.211578947368421e-06,
"loss": 0.2726,
"step": 2500
},
{
"epoch": 1.5,
"learning_rate": 8.94842105263158e-06,
"loss": 0.269,
"step": 3000
},
{
"epoch": 1.75,
"learning_rate": 8.685263157894738e-06,
"loss": 0.2435,
"step": 3500
},
{
"epoch": 2.0,
"learning_rate": 8.422105263157896e-06,
"loss": 0.2547,
"step": 4000
},
{
"epoch": 2.0,
"eval_BLEU": 63.708307948566365,
"eval_BLEU-Bigram-Precision": 67.31060268850709,
"eval_BLEU-Trigram-Precision": 59.51428850092656,
"eval_BLEU-Unigram-Precision": 76.10561595683805,
"eval_ROUGE-2": 68.87217612147542,
"eval_ROUGE-L": 85.55610984216905,
"eval_Sacre-Bigram-Precision": 66.05032405642395,
"eval_Sacre-Trigram-Precision": 59.367115222876365,
"eval_Sacre-Unigram-Precision": 74.86947441698572,
"eval_SacreBLEU": 63.05612776143653,
"eval_loss": 0.30988046526908875,
"eval_runtime": 630.4886,
"eval_samples_per_second": 1.586,
"eval_steps_per_second": 0.397,
"step": 4000
},
{
"epoch": 2.25,
"learning_rate": 8.159473684210528e-06,
"loss": 0.2381,
"step": 4500
},
{
"epoch": 2.5,
"learning_rate": 7.896315789473686e-06,
"loss": 0.232,
"step": 5000
},
{
"epoch": 2.75,
"learning_rate": 7.633157894736842e-06,
"loss": 0.2435,
"step": 5500
},
{
"epoch": 3.0,
"learning_rate": 7.370000000000001e-06,
"loss": 0.2296,
"step": 6000
},
{
"epoch": 3.0,
"eval_BLEU": 64.79809534146953,
"eval_BLEU-Bigram-Precision": 68.19514796054385,
"eval_BLEU-Trigram-Precision": 60.96563808040495,
"eval_BLEU-Unigram-Precision": 76.75671264180201,
"eval_ROUGE-2": 69.30039589621857,
"eval_ROUGE-L": 85.78830113401601,
"eval_Sacre-Bigram-Precision": 67.02522608281771,
"eval_Sacre-Trigram-Precision": 60.43044619422572,
"eval_Sacre-Unigram-Precision": 75.53237722729249,
"eval_SacreBLEU": 63.96756911093659,
"eval_loss": 0.29766443371772766,
"eval_runtime": 629.2959,
"eval_samples_per_second": 1.589,
"eval_steps_per_second": 0.397,
"step": 6000
},
{
"epoch": 3.25,
"learning_rate": 7.106842105263159e-06,
"loss": 0.2285,
"step": 6500
},
{
"epoch": 3.5,
"learning_rate": 6.843684210526317e-06,
"loss": 0.2196,
"step": 7000
},
{
"epoch": 3.75,
"learning_rate": 6.580526315789474e-06,
"loss": 0.2231,
"step": 7500
},
{
"epoch": 4.0,
"learning_rate": 6.317894736842106e-06,
"loss": 0.2001,
"step": 8000
},
{
"epoch": 4.0,
"eval_BLEU": 71.96063699911728,
"eval_BLEU-Bigram-Precision": 75.06338989662571,
"eval_BLEU-Trigram-Precision": 68.15829199913738,
"eval_BLEU-Unigram-Precision": 83.48142882530655,
"eval_ROUGE-2": 71.30024878570693,
"eval_ROUGE-L": 87.3933702803602,
"eval_Sacre-Bigram-Precision": 74.10648392198208,
"eval_Sacre-Trigram-Precision": 67.83068783068784,
"eval_Sacre-Unigram-Precision": 82.60371959942775,
"eval_SacreBLEU": 71.36004249317806,
"eval_loss": 0.2898695468902588,
"eval_runtime": 628.5892,
"eval_samples_per_second": 1.591,
"eval_steps_per_second": 0.398,
"step": 8000
},
{
"epoch": 4.25,
"learning_rate": 6.054736842105264e-06,
"loss": 0.2076,
"step": 8500
},
{
"epoch": 4.5,
"learning_rate": 5.791578947368422e-06,
"loss": 0.1981,
"step": 9000
},
{
"epoch": 4.75,
"learning_rate": 5.52842105263158e-06,
"loss": 0.2021,
"step": 9500
},
{
"epoch": 5.0,
"learning_rate": 5.265263157894738e-06,
"loss": 0.2024,
"step": 10000
},
{
"epoch": 5.0,
"eval_BLEU": 66.40826174420631,
"eval_BLEU-Bigram-Precision": 69.87874920229739,
"eval_BLEU-Trigram-Precision": 62.338572429672645,
"eval_BLEU-Unigram-Precision": 78.25215139109366,
"eval_ROUGE-2": 70.57079335392051,
"eval_ROUGE-L": 86.73765256866646,
"eval_Sacre-Bigram-Precision": 68.74877618954376,
"eval_Sacre-Trigram-Precision": 62.37816764132553,
"eval_Sacre-Unigram-Precision": 77.08221865525236,
"eval_SacreBLEU": 65.90571315873414,
"eval_loss": 0.2885148823261261,
"eval_runtime": 627.0,
"eval_samples_per_second": 1.595,
"eval_steps_per_second": 0.399,
"step": 10000
},
{
"epoch": 5.25,
"learning_rate": 5.002105263157895e-06,
"loss": 0.1887,
"step": 10500
},
{
"epoch": 5.5,
"learning_rate": 4.738947368421053e-06,
"loss": 0.1874,
"step": 11000
},
{
"epoch": 5.75,
"learning_rate": 4.475789473684211e-06,
"loss": 0.1992,
"step": 11500
},
{
"epoch": 6.0,
"learning_rate": 4.213157894736842e-06,
"loss": 0.1872,
"step": 12000
},
{
"epoch": 6.0,
"eval_BLEU": 68.61093978961019,
"eval_BLEU-Bigram-Precision": 71.91774448683354,
"eval_BLEU-Trigram-Precision": 64.64625780690079,
"eval_BLEU-Unigram-Precision": 80.26730228994637,
"eval_ROUGE-2": 70.81950260424799,
"eval_ROUGE-L": 86.8879332417248,
"eval_Sacre-Bigram-Precision": 70.91601523351373,
"eval_Sacre-Trigram-Precision": 64.70326739275394,
"eval_Sacre-Unigram-Precision": 79.22208052468574,
"eval_SacreBLEU": 68.17923527991039,
"eval_loss": 0.28207001090049744,
"eval_runtime": 624.7912,
"eval_samples_per_second": 1.601,
"eval_steps_per_second": 0.4,
"step": 12000
},
{
"epoch": 6.25,
"learning_rate": 3.95e-06,
"loss": 0.187,
"step": 12500
},
{
"epoch": 6.5,
"learning_rate": 3.686842105263158e-06,
"loss": 0.1883,
"step": 13000
},
{
"epoch": 6.75,
"learning_rate": 3.4236842105263162e-06,
"loss": 0.1763,
"step": 13500
},
{
"epoch": 7.0,
"learning_rate": 3.1610526315789474e-06,
"loss": 0.1744,
"step": 14000
},
{
"epoch": 7.0,
"eval_BLEU": 69.71037409859504,
"eval_BLEU-Bigram-Precision": 73.0115503803174,
"eval_BLEU-Trigram-Precision": 65.73585686213673,
"eval_BLEU-Unigram-Precision": 80.83097261567517,
"eval_ROUGE-2": 72.23412572401338,
"eval_ROUGE-L": 87.99795170784617,
"eval_Sacre-Bigram-Precision": 71.93000202306291,
"eval_Sacre-Trigram-Precision": 65.9443072086234,
"eval_Sacre-Unigram-Precision": 79.78137056770163,
"eval_SacreBLEU": 69.33649660546098,
"eval_loss": 0.2822955548763275,
"eval_runtime": 624.3473,
"eval_samples_per_second": 1.602,
"eval_steps_per_second": 0.4,
"step": 14000
},
{
"epoch": 7.25,
"learning_rate": 2.8978947368421055e-06,
"loss": 0.1737,
"step": 14500
},
{
"epoch": 7.5,
"learning_rate": 2.6347368421052633e-06,
"loss": 0.172,
"step": 15000
},
{
"epoch": 7.75,
"learning_rate": 2.371578947368421e-06,
"loss": 0.173,
"step": 15500
},
{
"epoch": 8.0,
"learning_rate": 2.108947368421053e-06,
"loss": 0.1723,
"step": 16000
},
{
"epoch": 8.0,
"eval_BLEU": 70.73541701126916,
"eval_BLEU-Bigram-Precision": 73.84659198784888,
"eval_BLEU-Trigram-Precision": 66.86204731002722,
"eval_BLEU-Unigram-Precision": 81.84498005895612,
"eval_ROUGE-2": 72.0862001759618,
"eval_ROUGE-L": 87.92745504885133,
"eval_Sacre-Bigram-Precision": 72.94695883678067,
"eval_Sacre-Trigram-Precision": 67.04985203733212,
"eval_Sacre-Unigram-Precision": 80.88426527958387,
"eval_SacreBLEU": 70.41870207567095,
"eval_loss": 0.27576586604118347,
"eval_runtime": 623.9988,
"eval_samples_per_second": 1.603,
"eval_steps_per_second": 0.401,
"step": 16000
},
{
"epoch": 8.25,
"learning_rate": 1.8457894736842108e-06,
"loss": 0.1719,
"step": 16500
},
{
"epoch": 8.5,
"learning_rate": 1.5831578947368423e-06,
"loss": 0.1689,
"step": 17000
},
{
"epoch": 8.75,
"learning_rate": 1.32e-06,
"loss": 0.1636,
"step": 17500
},
{
"epoch": 9.0,
"learning_rate": 1.0568421052631578e-06,
"loss": 0.1669,
"step": 18000
},
{
"epoch": 9.0,
"eval_BLEU": 70.48783846238173,
"eval_BLEU-Bigram-Precision": 73.69567275826154,
"eval_BLEU-Trigram-Precision": 66.59012629161883,
"eval_BLEU-Unigram-Precision": 81.4548914453767,
"eval_ROUGE-2": 72.0137632559393,
"eval_ROUGE-L": 87.87940419205542,
"eval_Sacre-Bigram-Precision": 72.72819989801121,
"eval_Sacre-Trigram-Precision": 66.90084985835693,
"eval_Sacre-Unigram-Precision": 80.49051365108745,
"eval_SacreBLEU": 70.22585210766724,
"eval_loss": 0.27468475699424744,
"eval_runtime": 623.7859,
"eval_samples_per_second": 1.603,
"eval_steps_per_second": 0.401,
"step": 18000
},
{
"epoch": 9.25,
"learning_rate": 7.936842105263158e-07,
"loss": 0.1676,
"step": 18500
},
{
"epoch": 9.5,
"learning_rate": 5.310526315789474e-07,
"loss": 0.1596,
"step": 19000
},
{
"epoch": 9.75,
"learning_rate": 2.678947368421053e-07,
"loss": 0.1682,
"step": 19500
},
{
"epoch": 10.0,
"learning_rate": 4.736842105263158e-09,
"loss": 0.1575,
"step": 20000
},
{
"epoch": 10.0,
"eval_BLEU": 70.32327217265255,
"eval_BLEU-Bigram-Precision": 73.60204371274483,
"eval_BLEU-Trigram-Precision": 66.35728438836166,
"eval_BLEU-Unigram-Precision": 81.4936468147636,
"eval_ROUGE-2": 71.64453167067346,
"eval_ROUGE-L": 87.60926086741465,
"eval_Sacre-Bigram-Precision": 72.7263458401305,
"eval_Sacre-Trigram-Precision": 66.82147711826008,
"eval_Sacre-Unigram-Precision": 80.53293856402665,
"eval_SacreBLEU": 70.1714765727047,
"eval_loss": 0.27424004673957825,
"eval_runtime": 624.9089,
"eval_samples_per_second": 1.6,
"eval_steps_per_second": 0.4,
"step": 20000
},
{
"epoch": 10.0,
"step": 20000,
"total_flos": 925520363520000.0,
"train_loss": 0.2068538417816162,
"train_runtime": 7846.7767,
"train_samples_per_second": 20.391,
"train_steps_per_second": 2.549
}
],
"max_steps": 20000,
"num_train_epochs": 10,
"total_flos": 925520363520000.0,
"trial_name": null,
"trial_params": null
}