Update README.md
Browse files
README.md
CHANGED
@@ -1,5 +1,60 @@
|
|
1 |
-
---
|
2 |
-
license: other
|
3 |
-
license_name: fair-ai-public-license-1.0-sd
|
4 |
-
license_link: https://freedevproject.org/faipl-1.0-sd/
|
5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: other
|
3 |
+
license_name: fair-ai-public-license-1.0-sd
|
4 |
+
license_link: https://freedevproject.org/faipl-1.0-sd/
|
5 |
+
datasets:
|
6 |
+
- pls2000/aiart_channel_nai3_geachu
|
7 |
+
base_model:
|
8 |
+
- OnomaAIResearch/Illustrious-xl-early-release-v0
|
9 |
+
tags:
|
10 |
+
- lora
|
11 |
+
---
|
12 |
+
|
13 |
+
# Lora Training (`arcain_2411.safetensors`)
|
14 |
+
Lora trained on Illustrious-xl v0.1, but this lora can applied with other ILXL-based models such as NoobAI-XL.
|
15 |
+
|
16 |
+
- Tool: kohya-ss/sd-scripts
|
17 |
+
- GPUs: 4x RTX3060
|
18 |
+
- Dataset: pls2000/aiart_channel_nai3_geachu + additional data until 24/11/14 - blue archive data
|
19 |
+
- Time taken: 50.5 hours (walltime)
|
20 |
+
|
21 |
+
|
22 |
+
#### lora_arcain.sh
|
23 |
+
```
|
24 |
+
NCCL_P2P_DISABLE=1 NCCL_IB_DISABLE=1 accelerate launch --num_cpu_threads_per_process 4 sdxl_train_network.py \
|
25 |
+
--network_train_unet_only \
|
26 |
+
--network_module="networks.lora" --network_dim 128 --network_alpha 128 \
|
27 |
+
--pretrained_model_name_or_path="/ai/data/sd/models/Stable-diffusion/SDXL/Illustrious-XL-v0.1.safetensors" \
|
28 |
+
--dataset_config="arcain.lora.toml" \
|
29 |
+
--output_dir="results/lora" --output_name="arcain-`date +%y%m`" \
|
30 |
+
--save_model_as="safetensors" \
|
31 |
+
--train_batch_size 2 --gradient_accumulation_steps 64 \
|
32 |
+
--learning_rate=1e-5 --optimizer_type="Lion8bit" \
|
33 |
+
--lr_scheduler="constant_with_warmup" --lr_warmup_steps 100 --optimizer_args "weight_decay=0.01" "betas=0.9,0.95" --min_snr_gamma 5 \
|
34 |
+
--sdpa \
|
35 |
+
--no_half_vae \
|
36 |
+
--cache_latents --cache_latents_to_disk \
|
37 |
+
--gradient_checkpointing \
|
38 |
+
--full_bf16 --mixed_precision="bf16" --save_precision="fp16" \
|
39 |
+
--ddp_timeout=10000000 \
|
40 |
+
--max_train_epochs 8 --save_every_n_epochs 1 \
|
41 |
+
--log_with wandb --log_tracker_name kohya-ss --wandb_run_name "arcain_`date +%y%m%d-%H%M`" --logging_dir wandb \
|
42 |
+
```
|
43 |
+
|
44 |
+
#### arcain.lora.toml
|
45 |
+
```
|
46 |
+
[general]
|
47 |
+
shuffle_caption = true
|
48 |
+
caption_tag_dropout_rate = 0.2
|
49 |
+
keep_tokens_separator = "|||"
|
50 |
+
caption_extension = ".txt"
|
51 |
+
|
52 |
+
[[datasets]]
|
53 |
+
enable_bucket = true
|
54 |
+
min_bucket_reso = 512
|
55 |
+
max_bucket_reso = 4096
|
56 |
+
resolution = 1024
|
57 |
+
[[datasets.subsets]]
|
58 |
+
image_dir = "/mnt/wd8tb/train/to_train"
|
59 |
+
num_repeats = 1
|
60 |
+
```
|