[additional_network_arguments] unet_lr = 0.0006 text_encoder_lr = 0.0003 network_dim = 128 network_alpha = 1 network_module = "networks.lora" [optimizer_arguments] learning_rate = 0.0006 lr_scheduler = "cosine_with_restarts" lr_scheduler_num_cycles = 3 lr_warmup_steps = 49 optimizer_type = "AdamW8bit" [training_arguments] max_train_epochs = 15 save_every_n_epochs = 1 save_last_n_epochs = 15 train_batch_size = 6 clip_skip = 2 min_snr_gamma = 5.0 weighted_captions = false seed = 42 max_token_length = 225 xformers = true lowram = true max_data_loader_n_workers = 8 persistent_data_loader_workers = true save_precision = "fp16" mixed_precision = "fp16" output_dir = "/content/drive/MyDrive/Loras/SON_BITEN_V1/output" logging_dir = "/content/drive/MyDrive/Loras/_logs" output_name = "SON_BITEN_V1" log_prefix = "SON_BITEN_V1" save_state = false [model_arguments] pretrained_model_name_or_path = "/content/AnyLoRA_noVae_fp16-pruned.ckpt" v2 = false [saving_arguments] save_model_as = "safetensors" [dreambooth_arguments] prior_loss_weight = 1.0 [dataset_arguments] cache_latents = true