from transformers import Pipeline import torch import joblib class CustomPipeline(Pipeline): def __init__(self, model, tokenizer, device=-1, **kwargs): super().__init__(model=model, tokenizer=tokenizer, device=device, **kwargs) self.label_mapping = joblib.load("label_mapping.joblib") def _sanitize_parameters(self, **kwargs): return {}, {}, {} def preprocess(self, inputs): return self.tokenizer(inputs, return_tensors="pt", truncation=True, padding=True, max_length=512) def _forward(self, model_inputs): with torch.no_grad(): outputs = self.model(**model_inputs) return outputs def postprocess(self, model_outputs): logits = model_outputs.logits predicted_class = torch.argmax(logits, dim=1).item() predicted_label = self.label_mapping[predicted_class] confidence = torch.softmax(logits, dim=1)[0][predicted_class].item() return { "label": predicted_label, "score": confidence }