File size: 1,806 Bytes
eb8a567
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
[[subsets]]
num_repeats = 4
keep_tokens = 0
caption_extension = ".txt"
shuffle_caption = true
flip_aug = false
color_aug = false
random_crop = true
is_reg = false
image_dir = "F:/Desktop/taitai"
caption_dropout_rate = 0.04
caption_dropout_every_n_epochs = 0
caption_tag_dropout_rate = 0.0

[sample_args]

[logging_args]

[general_args.args]
pretrained_model_name_or_path = "F:/Desktop/stable diffusion/LoRA/AnimeFullFinal.safetensors"
mixed_precision = "bf16"
seed = 23
clip_skip = 2
max_data_loader_n_workers = 1
persistent_data_loader_workers = true
max_token_length = 225
prior_loss_weight = 1.0
xformers = true
max_train_steps = 2000

[general_args.dataset_args]
resolution = 896
batch_size = 8

[network_args.args]
network_dim = 4
network_alpha = 1.0
min_timestep = 0
max_timestep = 1000
network_dropout = 0.5

[optimizer_args.args]
optimizer_type = "AdamW8bit"
lr_scheduler = "cosine"
learning_rate = 0.0001
max_grad_norm = 1.0
lr_scheduler_type = "LoraEasyCustomOptimizer.CustomOptimizers.CosineAnnealingWarmupRestarts"
lr_scheduler_num_cycles = 4
unet_lr = 0.0005
warmup_ratio = 0.1
min_snr_gamma = 8
scale_weight_norms = 5.0

[saving_args.args]
output_dir = "F:/stable_diffusion_models_and_outputs/models/Lora/v1/Styles/taitai"
save_precision = "fp16"
save_model_as = "safetensors"
output_name = "taitai"
tag_occurrence = true
save_toml = true
save_every_n_epochs = 1

[bucket_args.dataset_args]
enable_bucket = true
min_bucket_reso = 256
max_bucket_reso = 1024
bucket_reso_steps = 64

[noise_args.args]
multires_noise_iterations = 6
multires_noise_discount = 0.3

[network_args.args.network_args]
module_dropout = 0.25
conv_dim = 16
conv_alpha = 8.0

[optimizer_args.args.lr_scheduler_args]
min_lr = 1e-6
gamma = 0.85

[optimizer_args.args.optimizer_args]
weight_decay = "0.1"
betas = "0.9,0.99"