lm1-4b2-84b-c4-repetitions
/
4b284b12bc4
/evaluation
/generation
/slim.4b284b12bc4_GEM-web_nlg_en_PALM_prompt_1.json
{ | |
"results": [ | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"bleu": 0.41914858834195134, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"bleu_stderr": 0.030279335876129 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge1_precision": 0.07536633674836868, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge1_precision_stderr": 0.001620641410096321 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge1_recall": 0.3290768382699901, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge1_recall_stderr": 0.00481767508183653 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge1_fmeasure": 0.11424698089656772, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge1_fmeasure_stderr": 0.001973221738343803 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge2_precision": 0.03540467062379218, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge2_precision_stderr": 0.001074817084017668 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge2_recall": 0.16089821041540717, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge2_recall_stderr": 0.0033011630774406127 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rouge2_fmeasure": 0.05368591058094131, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rouge2_fmeasure_stderr": 0.0012551880063213156 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeL_precision": 0.07231503158237214, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeL_precision_stderr": 0.0015163361416883465 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeL_recall": 0.3189205930522712, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeL_recall_stderr": 0.004694857387684187 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeL_fmeasure": 0.10991123942051419, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeL_fmeasure_stderr": 0.0018557651460448018 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeLsum_precision": 0.07148579673935408, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeLsum_precision_stderr": 0.0015357817111525064 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeLsum_recall": 0.3110112645350247, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeLsum_recall_stderr": 0.00441643475943137 | |
}, | |
{ | |
"task_name": "GEM/web_nlg_en", | |
"prompt_name": "PALM_prompt", | |
"rougeLsum_fmeasure": 0.1082043807305256, | |
"dataset_path": "GEM/web_nlg", | |
"dataset_name": "en", | |
"subset": null, | |
"rougeLsum_fmeasure_stderr": 0.0018480349337665876 | |
} | |
], | |
"config": { | |
"model": "hf-causal", | |
"model_args": "pretrained=/pfs/lustrep4/scratch/project_462000119/muennighoff/nov-2022-bettercom/lm1-4b2-84b-c4-repetitions/4b284b12bc4/transformers,use_accelerate=True,tokenizer=/pfs/lustrep4/scratch/project_462000119/muennighoff/nov-2022-bettercom/gpt2,dtype=bfloat16", | |
"task_args": "", | |
"num_fewshot": 1, | |
"batch_size": 16, | |
"device": "cuda", | |
"use_cache": false, | |
"limit": 3000, | |
"bootstrap_iters": 10, | |
"seed": 1234 | |
} | |
} |