config: | |
name: appelsiensam_snowsculpture_v1 | |
process: | |
- datasets: | |
- cache_latents_to_disk: true | |
caption_dropout_rate: 0.2 | |
caption_ext: txt | |
folder_path: /root/lorahub/appelsiensam_snowsculpture_v1/dataset | |
resolution: | |
- 512 | |
- 768 | |
- 1024 | |
shuffle_tokens: false | |
token_dropout_rate: 0.01 | |
device: cuda:0 | |
model: | |
is_flux: true | |
name_or_path: black-forest-labs/FLUX.1-dev | |
quantize: true | |
text_encoder_bits: 8 | |
network: | |
linear: 42 | |
linear_alpha: 42 | |
transformer_only: true | |
type: lora | |
performance_log_every: 500 | |
sample: | |
guidance_scale: 3.5 | |
height: 1024 | |
neg: '' | |
prompts: | |
- '[trigger] shrek' | |
- '[trigger] 1995 Porsche 928 GTS' | |
- '[trigger] jezus christ.' | |
sample_every: 500 | |
sample_steps: 25 | |
sampler: flowmatch | |
seed: 42 | |
walk_seed: true | |
width: 1024 | |
save: | |
dtype: float16 | |
max_step_saves_to_keep: 3 | |
save_every: 500 | |
save_format: diffusers | |
train: | |
batch_size: 1 | |
dtype: bf16 | |
ema_config: | |
ema_decay: 0.99 | |
use_ema: true | |
gradient_accumulation_steps: 1 | |
gradient_checkpointing: true | |
linear_timesteps: true | |
loss_type: mse | |
lr: 0.0002 | |
noise_scheduler: flowmatch | |
optimizer: adamw8bit | |
reg_weight: 1.0 | |
steps: 3000 | |
target_noise_multiplier: 1.0 | |
train_text_encoder: false | |
train_unet: true | |
training_folder: /root/lorahub | |
trigger_word: SNWSCLPTR_PPLSNSM | |
type: sd_trainer | |
job: extension | |
meta: | |
description: '' | |