SunderAli17 commited on
Commit
fb33b84
·
verified ·
1 Parent(s): 67c1a11

Create train_previewer_lora.sh

Browse files
Files changed (1) hide show
  1. functions/train_previewer_lora.sh +24 -0
functions/train_previewer_lora.sh ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # After DCP training, distill the Previewer with DCP in `train_previewer_lora.py`:
2
+ accelerate launch --num_processes <num_of_gpus> train_previewer_lora.py \
3
+ --output_dir <your/output/path> \
4
+ --train_data_dir <your/data/path> \
5
+ --logging_dir <your/logging/path> \
6
+ --pretrained_model_name_or_path <your/sdxl/path> \
7
+ --feature_extractor_path <your/dinov2/path> \
8
+ --pretrained_adapter_model_path <your/dcp/path> \
9
+ --losses_config_path config_files/losses.yaml \
10
+ --data_config_path config_files/IR_dataset.yaml \
11
+ --save_only_adapter \
12
+ --gradient_checkpointing \
13
+ --num_train_timesteps 1000 \
14
+ --num_ddim_timesteps 50 \
15
+ --lora_alpha 1 \
16
+ --mixed_precision fp16 \
17
+ --train_batch_size 32 \
18
+ --vae_encode_batch_size 16 \
19
+ --gradient_accumulation_steps 1 \
20
+ --learning_rate 1e-4 \
21
+ --lr_warmup_steps 1000 \
22
+ --lr_scheduler cosine \
23
+ --lr_num_cycles 1 \
24
+ --resume_from_checkpoint latest