File size: 693 Bytes
e1c05da |
1 |
{"base_model": "/run/determined/workdir/model/llama-7b", "data_path": "/run/determined/workdir/data/ChatAlpaca/chatalpaca_data_20k_new", "output_dir": "/run/determined/workdir/output/test_eos_chatalpaca_20k", "batch_size": 64, "micro_batch_size": 8, "num_epochs": 3, "learning_rate": 0.0003, "cutoff_len": 512, "lora_r": 16, "lora_alpha": 16, "lora_dropout": 0.05, "lora_target_modules": ["q_proj", "k_proj", "v_proj", "o_proj"], "train_on_inputs": false, "group_by_length": false, "wandb_project": "chatalpaca_eos", "wandb_run_name": "chatalpaca_eos_test", "wandb_watch": "", "wandb_log_model": "", "resume_from_checkpoint": null, "prompt_style": "vicuna", "save_steps": 200, "local_rank": 0} |