File size: 1,673 Bytes
831c14e |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 |
[[subsets]]
num_repeats = 15
caption_extension = ".txt"
shuffle_caption = true
flip_aug = false
is_reg = false
image_dir = "E:/Everything artificial intelligence/loradataset\\25_ohwx sakiika0513"
keep_tokens = 0
[noise_args]
[sample_args]
[logging_args]
[general_args.args]
pretrained_model_name_or_path = "E:/Everything artificial intelligence/stable-diffusion-webui/models/Stable-diffusion/animefull-final-pruned-fp16.safetensors"
mixed_precision = "fp16"
seed = 23
max_data_loader_n_workers = 1
persistent_data_loader_workers = true
max_token_length = 225
prior_loss_weight = 1.0
clip_skip = 2
xformers = true
cache_latents = true
max_train_steps = 3500
[general_args.dataset_args]
resolution = 768
batch_size = 1
[network_args.args]
network_dim = 32
network_alpha = 16.0
min_timestep = 0
max_timestep = 1000
[optimizer_args.args]
optimizer_type = "AdamW8bit"
lr_scheduler = "cosine"
learning_rate = 0.0001
max_grad_norm = 1.0
lr_scheduler_type = "LoraEasyCustomOptimizer.CustomOptimizers.CosineAnnealingWarmupRestarts"
lr_scheduler_num_cycles = 1
warmup_ratio = 0.05
[saving_args.args]
output_dir = "E:/Everything artificial intelligence/stable-diffusion-webui/models/Lora/sakiika0513"
save_precision = "fp16"
save_model_as = "safetensors"
output_name = "sakiika0513-test2"
save_toml = true
save_toml_location = "E:/Everything artificial intelligence/stable-diffusion-webui/models/Lora/sakiika0513"
[bucket_args.dataset_args]
enable_bucket = true
min_bucket_reso = 256
max_bucket_reso = 1024
bucket_reso_steps = 64
[optimizer_args.args.lr_scheduler_args]
min_lr = 1e-6
gamma = 0.9
[optimizer_args.args.optimizer_args]
weight_decay = "0.1"
betas = "0.9,0.99"
|