[[subsets]] num_repeats = 10 caption_extension = ".txt" shuffle_caption = false flip_aug = false is_reg = false image_dir = "E:/Everything artificial intelligence/loradataset\\10_ohw mokopekko" keep_tokens = 0 [sample_args] [logging_args] [general_args.args] pretrained_model_name_or_path = "E:/Everything artificial intelligence/stable-diffusion-webui/models/Stable-diffusion/animefull-final-pruned-fp16.safetensors" mixed_precision = "fp16" seed = 23 max_data_loader_n_workers = 1 persistent_data_loader_workers = true max_token_length = 225 prior_loss_weight = 1.0 vae = "" max_train_epochs = 30 clip_skip = 2 xformers = true cache_latents = true gradient_checkpointing = true gradient_accumulation_steps = 1 [general_args.dataset_args] resolution = 896 batch_size = 8 [network_args.args] network_dim = 4 network_alpha = 1.0 min_timestep = 0 max_timestep = 1000 network_dropout = 0.5 [optimizer_args.args] optimizer_type = "AdamW8bit" lr_scheduler = "cosine" learning_rate = 0.0001 max_grad_norm = 1.0 lr_scheduler_type = "LoraEasyCustomOptimizer.CustomOptimizers.CosineAnnealingWarmupRestarts" lr_scheduler_num_cycles = 4 unet_lr = 0.0005 warmup_ratio = 0.1 min_snr_gamma = 8 scale_weight_norms = 5.0 [saving_args.args] output_dir = "E:/Everything artificial intelligence/stable-diffusion-webui/models/Lora" save_precision = "fp16" save_model_as = "safetensors" output_name = "mokopekkotest3" tag_occurrence = true save_toml = true save_every_n_epochs = 2 [bucket_args.dataset_args] enable_bucket = true min_bucket_reso = 256 max_bucket_reso = 1024 bucket_reso_steps = 64 [noise_args.args] multires_noise_iterations = 6 multires_noise_discount = 0.3 [network_args.args.network_args] conv_dim = 16 conv_alpha = 8.0 module_dropout = 0.25 [optimizer_args.args.lr_scheduler_args] min_lr = 1e-6 gamma = 0.85 [optimizer_args.args.optimizer_args] weight_decay = "0.1" betas = "0.9,0.99"