Delta-Vector commited on
Commit
ee6333f
·
verified ·
1 Parent(s): 4ae740a

Upload sdprompterv3.yaml

Browse files
Files changed (1) hide show
  1. sdprompterv3.yaml +73 -0
sdprompterv3.yaml ADDED
@@ -0,0 +1,73 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_model: Delta-Vector/Holland-4B
2
+ model_type: AutoModelForCausalLM
3
+ tokenizer_type: AutoTokenizer
4
+
5
+ trust_remote_code: true
6
+
7
+ load_in_8bit: false
8
+ load_in_4bit: false
9
+ strict: false
10
+
11
+ datasets:
12
+ - path: NewEden/CivitAI-SD-Prompts
13
+ type: chatml
14
+ conversation: mpt-30b-instruct
15
+ chat_template: chatml
16
+
17
+ dataset_prepared_path:
18
+ val_set_size: 0.02
19
+ output_dir: ./outputs/out2
20
+ sequence_len: 8192
21
+ sample_packing: true
22
+ eval_sample_packing: false
23
+ pad_to_sequence_len: true
24
+
25
+ plugins:
26
+ - axolotl.integrations.liger.LigerPlugin
27
+ liger_rope: true
28
+ liger_rms_norm: true
29
+ liger_swiglu: true
30
+ liger_fused_linear_cross_entropy: true
31
+
32
+ wandb_project: SDprompterV3
33
+ wandb_entity:
34
+ wandb_watch:
35
+ wandb_name: SDprompterV3-attempt1
36
+ wandb_log_model:
37
+
38
+ hub_model_id: NewEden/SDprompterV3
39
+ hub_strategy: "all_checkpoints"
40
+ hf_use_auth_token: true
41
+
42
+ gradient_accumulation_steps: 2
43
+ micro_batch_size: 2
44
+ num_epochs: 4
45
+ optimizer: adamw_torch
46
+ lr_scheduler: cosine
47
+ learning_rate: 0.00001
48
+
49
+ train_on_inputs: false
50
+ group_by_length: false
51
+ bf16: auto
52
+ fp16:
53
+ tf32: true
54
+
55
+ gradient_checkpointing: true
56
+ gradient_checkpointing_kwargs:
57
+ use_reentrant: false
58
+ early_stopping_patience:
59
+ resume_from_checkpoint:
60
+ local_rank:
61
+ logging_steps: 1
62
+ xformers_attention:
63
+ flash_attention: true
64
+
65
+ warmup_ratio: 0.05
66
+ evals_per_epoch: 4
67
+ saves_per_epoch: 1
68
+ debug:
69
+ weight_decay: 0.05
70
+ deepspeed: /workspace/axolotl/deepspeed_configs/zero2.json
71
+ special_tokens:
72
+ pad_token: <|finetune_right_pad_id|>
73
+ eos_token: <|eot_id|>