File size: 1,307 Bytes
be97774 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
{
"model": "meta-llama/Meta-Llama-3-8B-Instruct",
"project_name": "autotrain-llama3-no-robots",
"data_path": "HuggingFaceH4/no_robots",
"train_split": "train",
"valid_split": null,
"add_eos_token": false,
"block_size": 1024,
"model_max_length": 2048,
"padding": "right",
"trainer": "sft",
"use_flash_attention_2": false,
"log": "none",
"disable_gradient_checkpointing": false,
"logging_steps": -1,
"evaluation_strategy": "epoch",
"save_total_limit": 1,
"save_strategy": "epoch",
"auto_find_batch_size": false,
"mixed_precision": "bf16",
"lr": 2e-05,
"epochs": 3,
"batch_size": 4,
"warmup_ratio": 0.1,
"gradient_accumulation": 1,
"optimizer": "adamw_torch",
"scheduler": "linear",
"weight_decay": 0.0,
"max_grad_norm": 1.0,
"seed": 42,
"chat_template": "zephyr",
"quantization": null,
"target_modules": null,
"merge_adapter": false,
"peft": false,
"lora_r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"model_ref": null,
"dpo_beta": 0.1,
"prompt_text_column": "prompt",
"text_column": "messages",
"rejected_text_column": "rejected",
"push_to_hub": true,
"repo_id": "abhishek/autotrain-llama3-no-robots",
"username": "abhishek"
} |