damian0815's picture
Upload trainSD21.json with huggingface_hub
2098ce6 verified
raw
history blame
1.39 kB
{
"amp": true,
"batch_size": 6,
"ckpt_every_n_minutes": null,
"clip_grad_norm": null,
"clip_skip": 0,
"cond_dropout": 0.01,
"data_root": "/workspace/data/train",
"disable_textenc_training": true,
"disable_xformers": false,
"flip_p": 0.0,
"gpuid": 0,
"gradient_checkpointing": true,
"grad_accum": 1,
"logdir": "logs",
"log_step": 25,
"lowvram": false,
"max_epochs": 5,
"notebook": false,
"optimizer_config": "optimizerSD21.json",
"project_name": "sd2-finetuned-te-laion-pop-6144",
"resolution": 768,
"resume_ckpt": "ckpts/stable-diffusion-2-1-w-laion-pop-6144-te",
"sample_prompts": "sample_prompts.json",
"sample_steps": 300,
"save_ckpt_dir": null,
"save_ckpts_from_n_epochs": 0,
"save_every_n_epochs": 20,
"save_optimizer": false,
"scale_lr": false,
"seed": 555,
"shuffle_tags": false,
"validation_config": "validation_default.json",
"wandb": false,
"write_schedule": false,
"rated_dataset": false,
"rated_dataset_target_dropout_percent": 50,
"zero_frequency_noise_ratio": 0.02,
"enable_zero_terminal_snr": false,
"load_settings_every_epoch": false,
"min_snr_gamma": null,
"ema_decay_rate": null,
"ema_strength_target": null,
"ema_update_interval": null,
"ema_device": null,
"ema_sample_nonema_model": false,
"ema_sample_ema_model": false,
"ema_resume_model" : null,
"no_save_ckpt": true
}