Spaces:
Runtime error
Runtime error
File size: 634 Bytes
508fc3a aed75c5 508fc3a aed75c5 508fc3a 8a39dbf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
import pandas as pd
import gradio as gr
print(gr.__version__)
import torch
import torchaudio
torch._C._jit_override_can_fuse_on_cpu(False)
torch._C._jit_override_can_fuse_on_gpu(False)
torch._C._jit_set_texpr_fuser_enabled(False)
torch._C._jit_set_nvfuser_enabled(False)
loader = torch.jit.load("audio_loader.pt")
model = torch.jit.load('QuartzNet15x5Base_En_1.pt').eval()
vocab = model.text_transform.vocab.itos
vocab[-1] = ''
def predict(path):
audio = loader(path)
return model.predict(audio)
gr.Interface(fn=predict, inputs=[gr.inputs.Audio(source='microphone', type='filepath')], outputs= 'text').launch(debug=True) |