end
Browse files- checkpoints/checkpoint_epoch=399.ckpt +3 -0
- checkpoints/checkpoint_epoch=499.ckpt +3 -0
- checkpoints/checkpoint_epoch=599.ckpt +3 -0
- checkpoints/checkpoint_epoch=699.ckpt +3 -0
- checkpoints/checkpoint_epoch=799.ckpt +3 -0
- checkpoints/checkpoint_epoch=899.ckpt +3 -0
- checkpoints/last.ckpt +2 -2
- tensorboard/version_0/events.out.tfevents.1729366325.0959f84f9bd7.22.0 +3 -0
- tensorboard/version_0/hparams.yaml +115 -0
- train.log +40 -0
checkpoints/checkpoint_epoch=399.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:1151cd7c9ce36258a5e8f125471dd33f32ffddcee137bf11f14b052cd1089fb6
|
3 |
+
size 218840327
|
checkpoints/checkpoint_epoch=499.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:802947c447968f893cfafb4c3361c8641c671f665225ef7fc867cf18b1f2c16f
|
3 |
+
size 218840710
|
checkpoints/checkpoint_epoch=599.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:fed2174b0a971098edb110d40815a0013e9c29ab119f7d3cde9cdfb8853d1a61
|
3 |
+
size 218841029
|
checkpoints/checkpoint_epoch=699.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:17a076f2cc33d47a6707c19aba5587c693b878ebb0e0e3d17988f083f9e6d8e2
|
3 |
+
size 218841412
|
checkpoints/checkpoint_epoch=799.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:69d6ac3da37a1e63557ad4dd50e85fe9132d15a2dbe3df705c10f26b0403f488
|
3 |
+
size 218841795
|
checkpoints/checkpoint_epoch=899.ckpt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b68cd1ce3255316a71119ba608db65cce3a3dba1d401ec000ad685025c962720
|
3 |
+
size 218842178
|
checkpoints/last.ckpt
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:adb0b628157cf3e905bbe39cee8de969d52a024fa014fe98e83a5e35a3f06acf
|
3 |
+
size 218842178
|
tensorboard/version_0/events.out.tfevents.1729366325.0959f84f9bd7.22.0
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d476c573f8ba1e26d791499bd0a9472e47c9fe2eab99ad52d0e43ca8d327f369
|
3 |
+
size 518352307
|
tensorboard/version_0/hparams.yaml
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
model:
|
2 |
+
_target_: matcha.models.matcha_tts.MatchaTTS
|
3 |
+
n_vocab: 178
|
4 |
+
n_spks: ${data.n_spks}
|
5 |
+
spk_emb_dim: 64
|
6 |
+
n_feats: 80
|
7 |
+
data_statistics: ${data.data_statistics}
|
8 |
+
out_size: null
|
9 |
+
prior_loss: true
|
10 |
+
use_precomputed_durations: ${data.load_durations}
|
11 |
+
encoder:
|
12 |
+
encoder_type: RoPE Encoder
|
13 |
+
encoder_params:
|
14 |
+
n_feats: ${model.n_feats}
|
15 |
+
n_channels: 192
|
16 |
+
filter_channels: 768
|
17 |
+
filter_channels_dp: 256
|
18 |
+
n_heads: 2
|
19 |
+
n_layers: 6
|
20 |
+
kernel_size: 3
|
21 |
+
p_dropout: 0.1
|
22 |
+
spk_emb_dim: 64
|
23 |
+
n_spks: 1
|
24 |
+
prenet: true
|
25 |
+
duration_predictor_params:
|
26 |
+
filter_channels_dp: ${model.encoder.encoder_params.filter_channels_dp}
|
27 |
+
kernel_size: 3
|
28 |
+
p_dropout: ${model.encoder.encoder_params.p_dropout}
|
29 |
+
decoder:
|
30 |
+
channels:
|
31 |
+
- 256
|
32 |
+
- 256
|
33 |
+
dropout: 0.05
|
34 |
+
attention_head_dim: 64
|
35 |
+
n_blocks: 1
|
36 |
+
num_mid_blocks: 2
|
37 |
+
num_heads: 2
|
38 |
+
act_fn: snakebeta
|
39 |
+
cfm:
|
40 |
+
name: CFM
|
41 |
+
solver: euler
|
42 |
+
sigma_min: 0.0001
|
43 |
+
optimizer:
|
44 |
+
_target_: torch.optim.Adam
|
45 |
+
_partial_: true
|
46 |
+
lr: 0.0001
|
47 |
+
weight_decay: 0.0
|
48 |
+
model/params/total: 18204193
|
49 |
+
model/params/trainable: 18204193
|
50 |
+
model/params/non_trainable: 0
|
51 |
+
data:
|
52 |
+
_target_: matcha.data.text_mel_datamodule.TextMelDataModule
|
53 |
+
name: ljspeech
|
54 |
+
train_filelist_path: data/LJSpeech-1.1/train.txt
|
55 |
+
valid_filelist_path: data/LJSpeech-1.1/val.txt
|
56 |
+
batch_size: 32
|
57 |
+
num_workers: 20
|
58 |
+
pin_memory: true
|
59 |
+
cleaners:
|
60 |
+
- english_cleaners2
|
61 |
+
add_blank: true
|
62 |
+
n_spks: 1
|
63 |
+
n_fft: 1024
|
64 |
+
n_feats: 80
|
65 |
+
sample_rate: 22050
|
66 |
+
hop_length: 256
|
67 |
+
win_length: 1024
|
68 |
+
f_min: 0
|
69 |
+
f_max: 8000
|
70 |
+
data_statistics:
|
71 |
+
mel_mean: -5.536622
|
72 |
+
mel_std: 2.116101
|
73 |
+
seed: ${seed}
|
74 |
+
load_durations: false
|
75 |
+
trainer:
|
76 |
+
_target_: lightning.pytorch.trainer.Trainer
|
77 |
+
default_root_dir: ${paths.output_dir}
|
78 |
+
max_epochs: -1
|
79 |
+
accelerator: gpu
|
80 |
+
devices:
|
81 |
+
- 0
|
82 |
+
precision: 16-mixed
|
83 |
+
check_val_every_n_epoch: 1
|
84 |
+
deterministic: false
|
85 |
+
gradient_clip_val: 5.0
|
86 |
+
callbacks:
|
87 |
+
model_checkpoint:
|
88 |
+
_target_: lightning.pytorch.callbacks.ModelCheckpoint
|
89 |
+
dirpath: ${paths.output_dir}/checkpoints
|
90 |
+
filename: checkpoint_{epoch:03d}
|
91 |
+
monitor: epoch
|
92 |
+
verbose: false
|
93 |
+
save_last: true
|
94 |
+
save_top_k: 10
|
95 |
+
mode: max
|
96 |
+
auto_insert_metric_name: true
|
97 |
+
save_weights_only: false
|
98 |
+
every_n_train_steps: null
|
99 |
+
train_time_interval: null
|
100 |
+
every_n_epochs: 100
|
101 |
+
save_on_train_epoch_end: null
|
102 |
+
model_summary:
|
103 |
+
_target_: lightning.pytorch.callbacks.RichModelSummary
|
104 |
+
max_depth: 3
|
105 |
+
rich_progress_bar:
|
106 |
+
_target_: lightning.pytorch.callbacks.RichProgressBar
|
107 |
+
extras:
|
108 |
+
ignore_warnings: false
|
109 |
+
enforce_tags: true
|
110 |
+
print_config: true
|
111 |
+
task_name: train
|
112 |
+
tags:
|
113 |
+
- ljspeech
|
114 |
+
ckpt_path: null
|
115 |
+
seed: 1234
|
train.log
CHANGED
@@ -11,3 +11,43 @@
|
|
11 |
[2024-10-19 19:32:05,026][__main__][INFO] - Instantiating trainer <lightning.pytorch.trainer.Trainer>
|
12 |
[2024-10-19 19:32:05,116][__main__][INFO] - Logging hyperparameters!
|
13 |
[2024-10-19 19:32:05,170][__main__][INFO] - Starting training!
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
[2024-10-19 19:32:05,026][__main__][INFO] - Instantiating trainer <lightning.pytorch.trainer.Trainer>
|
12 |
[2024-10-19 19:32:05,116][__main__][INFO] - Logging hyperparameters!
|
13 |
[2024-10-19 19:32:05,170][__main__][INFO] - Starting training!
|
14 |
+
[2024-10-22 01:31:05,786][matcha.utils.utils][ERROR] -
|
15 |
+
Traceback (most recent call last):
|
16 |
+
File "/workspace/Matcha-TTS/matcha/utils/utils.py", line 77, in wrap
|
17 |
+
metric_dict, object_dict = task_func(cfg=cfg)
|
18 |
+
File "/workspace/Matcha-TTS/matcha/train.py", line 79, in train
|
19 |
+
trainer.fit(model=model, datamodule=datamodule, ckpt_path=cfg.get("ckpt_path"))
|
20 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/trainer.py", line 538, in fit
|
21 |
+
call._call_and_handle_interrupt(
|
22 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/call.py", line 47, in _call_and_handle_interrupt
|
23 |
+
return trainer_fn(*args, **kwargs)
|
24 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/trainer.py", line 574, in _fit_impl
|
25 |
+
self._run(model, ckpt_path=ckpt_path)
|
26 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/trainer.py", line 981, in _run
|
27 |
+
results = self._run_stage()
|
28 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/trainer.py", line 1025, in _run_stage
|
29 |
+
self.fit_loop.run()
|
30 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/loops/fit_loop.py", line 206, in run
|
31 |
+
self.on_advance_end()
|
32 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/loops/fit_loop.py", line 378, in on_advance_end
|
33 |
+
call._call_callback_hooks(trainer, "on_train_epoch_end", monitoring_callbacks=True)
|
34 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/call.py", line 218, in _call_callback_hooks
|
35 |
+
fn(trainer, trainer.lightning_module, *args, **kwargs)
|
36 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/callbacks/model_checkpoint.py", line 326, in on_train_epoch_end
|
37 |
+
self._save_last_checkpoint(trainer, monitor_candidates)
|
38 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/callbacks/model_checkpoint.py", line 696, in _save_last_checkpoint
|
39 |
+
self._save_checkpoint(trainer, filepath)
|
40 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/callbacks/model_checkpoint.py", line 390, in _save_checkpoint
|
41 |
+
trainer.save_checkpoint(filepath, self.save_weights_only)
|
42 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/trainer/trainer.py", line 1365, in save_checkpoint
|
43 |
+
self.strategy.save_checkpoint(checkpoint, filepath, storage_options=storage_options)
|
44 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/pytorch/strategies/strategy.py", line 490, in save_checkpoint
|
45 |
+
self.checkpoint_io.save_checkpoint(checkpoint, filepath, storage_options=storage_options)
|
46 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/fabric/plugins/io/torch_io.py", line 58, in save_checkpoint
|
47 |
+
_atomic_save(checkpoint, path)
|
48 |
+
File "/opt/conda/lib/python3.10/site-packages/lightning/fabric/utilities/cloud_io.py", line 90, in _atomic_save
|
49 |
+
f.write(bytesbuffer.getvalue())
|
50 |
+
File "/opt/conda/lib/python3.10/site-packages/fsspec/implementations/local.py", line 373, in write
|
51 |
+
return self.f.write(*args, **kwargs)
|
52 |
+
OSError: [Errno 28] No space left on device
|
53 |
+
[2024-10-22 01:31:05,792][matcha.utils.utils][INFO] - Output dir: /workspace/Matcha-TTS/logs/train/ljspeech/runs/2024-10-19_19-32-03
|