feat: upload nbx-r2 lora model
Browse files
nbx-r2_config/config_file.toml
CHANGED
@@ -29,14 +29,14 @@ vae_batch_size = 4
|
|
29 |
|
30 |
[training_arguments]
|
31 |
output_dir = "/content/LoRA/output"
|
32 |
-
output_name = "nbx-r2"
|
33 |
save_precision = "fp16"
|
34 |
save_every_n_epochs = 1
|
35 |
train_batch_size = 2
|
36 |
max_token_length = 225
|
37 |
mem_eff_attn = false
|
38 |
xformers = true
|
39 |
-
max_train_epochs =
|
40 |
max_data_loader_n_workers = 8
|
41 |
persistent_data_loader_workers = true
|
42 |
seed = 42
|
|
|
29 |
|
30 |
[training_arguments]
|
31 |
output_dir = "/content/LoRA/output"
|
32 |
+
output_name = "nbx-r2-v2"
|
33 |
save_precision = "fp16"
|
34 |
save_every_n_epochs = 1
|
35 |
train_batch_size = 2
|
36 |
max_token_length = 225
|
37 |
mem_eff_attn = false
|
38 |
xformers = true
|
39 |
+
max_train_epochs = 20
|
40 |
max_data_loader_n_workers = 8
|
41 |
persistent_data_loader_workers = true
|
42 |
seed = 42
|