keess's picture
- add custom endpoint handler
44c0e8a
raw
history blame
984 Bytes
from typing import Dict, List, Any
import torch as torch
from transformers import pipeline
class EndpointHandler():
def __init__(self, path=""):
device = 0 if torch.cuda.is_available() else "cpu"
self.pipe = pipeline(
task="automatic-speech-recognition",
model="openai/whisper-large",
chunk_length_s=30,
device=device,
)
self.pipe.model.config.forced_decoder_ids = self.pipe.tokenizer.get_decoder_prompt_ids(language="nl", task="transcribe")
def __call__(self, data: Dict[str, Any]) -> List[Dict[str, Any]]:
"""
data args:
inputs (:obj: `str`)
date (:obj: `str`)
Return:
A :obj:`list` | `dict`: will be serialized and returned
"""
#print request
print("request")
print(data)
# get inputs
inputs = data.pop("inputs", data)
text = self.pipe(inputs)["text"]
return text