File size: 375 Bytes
002ea51 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
{
"architectures": [
"MMLM"
],
"codebook_size": 2048,
"lm_model_name": "voidful/Llama-3.2-8B-Whisper",
"model_type": "mmlm",
"num_heads": 8,
"queue_duration": 3600,
"queue_length": 86400000,
"sampling_rate": 24000,
"speaker_emb_dim": 192,
"step_duration": 0.08,
"step_size": 1920,
"torch_dtype": "bfloat16",
"transformers_version": "4.47.0"
}
|