danielizham's picture
Training in progress, step 1000
7a36179
raw
history blame contribute delete
No virus
8.73 kB
{
"os": "Linux-3.10.0-957.27.2.el7.x86_64-x86_64-with-glibc2.17",
"python": "3.9.16",
"heartbeatAt": "2023-05-09T08:52:12.559849",
"startedAt": "2023-05-09T08:52:11.473313",
"docker": null,
"cuda": null,
"args": [
"--model_name_or_path=openai/whisper-small",
"--dataset_name=mozilla-foundation/common_voice_11_0",
"--dataset_config_name=es",
"--language=spanish",
"--train_split_name=train+validation",
"--eval_split_name=test",
"--model_index_name=Whisper Small Spanish",
"--max_steps=5000",
"--output_dir=./",
"--per_device_train_batch_size=32",
"--gradient_accumulation_steps=2",
"--per_device_eval_batch_size=32",
"--logging_steps=25",
"--learning_rate=1e-5",
"--warmup_steps=500",
"--evaluation_strategy=steps",
"--eval_steps=1000",
"--save_strategy=steps",
"--save_steps=1000",
"--generation_max_length=225",
"--length_column_name=input_length",
"--max_duration_in_seconds=30",
"--text_column_name=sentence",
"--freeze_feature_encoder=False",
"--report_to=tensorboard",
"--report_to=wandb",
"--metric_for_best_model=wer",
"--greater_is_better=False",
"--load_best_model_at_end",
"--gradient_checkpointing",
"--fp16",
"--overwrite_output_dir",
"--do_train",
"--do_eval",
"--predict_with_generate",
"--do_normalize_eval",
"--streaming",
"--use_auth_token",
"--push_to_hub"
],
"state": "running",
"program": "/home/local/QCRI/dizham/kanari/whisper/whisper-small-es/run_speech_recognition_seq2seq_streaming.py",
"codePath": "run_speech_recognition_seq2seq_streaming.py",
"git": {
"remote": "https://huggingface.co/danielizham/whisper-small-es",
"commit": "18d64585144dfb2b42f36bb96efcf34f21a79aff"
},
"email": "daniel.izham@gmail.com",
"root": "/home/local/QCRI/dizham/kanari/whisper/whisper-small-es",
"host": "crimv3mgpu016",
"username": "dizham",
"executable": "/home/local/QCRI/dizham/miniconda3/envs/whisper/bin/python",
"cpu_count": 28,
"cpu_count_logical": 56,
"cpu_freq": {
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
"cpu_freq_per_core": [
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
},
{
"current": 2201.0,
"min": 1000.0,
"max": 2201.0
}
],
"disk": {
"total": 99.951171875,
"used": 15.493389129638672
},
"gpu": "Tesla V100-SXM2-32GB",
"gpu_count": 2,
"gpu_devices": [
{
"name": "Tesla V100-SXM2-32GB",
"memory_total": 34089730048
},
{
"name": "Tesla V100-SXM2-32GB",
"memory_total": 34089730048
}
],
"memory": {
"total": 251.55353164672852
}
}