from typing import Dict, List, Any from PIL import Image import os import json import numpy as np from fastai.learner import load_learner class OrdinalRegressionMetric(Metric): def __init__(self): super().__init__() self.total = 0 self.count = 0 def accumulate(self, learn): # Get predictions and targets preds, targs = learn.pred, learn.y # Your custom logic to convert predictions and targets to numeric values preds_numeric = torch.argmax(preds, dim=1) targs_numeric = targs #print("preds_numeric: ",preds_numeric) #print("targs_numeric: ",targs_numeric) # Calculate the metric (modify this based on your specific needs) squared_diff = torch.sum(torch.sqrt((preds_numeric - targs_numeric)**2)) # Normalize by the maximum possible difference max_diff = torch.sqrt((torch.max(targs_numeric) - torch.min(targs_numeric))**2) #print("squared_diff: ",squared_diff) #print("max_diff: ",max_diff) # Update the metric value self.total += squared_diff #print("self.total: ",self.total) self.count += max_diff #print("self.count: ",self.count) @property def value(self): if self.count == 0: return 0.0 # or handle this case appropriately #print("self.total / self.count: ", (self.total / self.count)) # Calculate the normalized metric value metric_value = 1/(self.total / self.count) return metric_value class PreTrainedPipeline(): def __init__(self, path=""): # IMPLEMENT_THIS # Preload all the elements you are going to need at inference. # For instance your model, processors, tokenizer that might be needed. # This function is only called once, so do all the heavy processing I/O here""" self.metric = OrdinalRegressionMetric() self.model = load_learner(os.path.join(path, "textfile3-2.pk1")) with open(os.path.join(path, "config.json")) as config: config = json.load(config) self.id2label = config["id2label"] def __call__(self, inputs: "Image.Image") -> List[Dict[str, Any]]: """ Args: inputs (:obj:`PIL.Image`): The raw image representation as PIL. No transformation made whatsoever from the input. Make all necessary transformations here. Return: A :obj:`list`:. The list contains items that are dicts should be liked {"label": "XXX", "score": 0.82} It is preferred if the returned list is in decreasing `score` order """ # IMPLEMENT_THIS # FastAI expects a np array, not a PIL Image. _, _, preds = self.model.predict(np.array(inputs)) preds = preds.tolist() labels = [ {"label": str(self.id2label["0"]), "score": preds[0]}, {"label": str(self.id2label["1"]), "score": preds[1]}, {"label": str(self.id2label["2"]), "score": preds[2]}, {"label": str(self.id2label["3"]), "score": preds[3]}, {"label": str(self.id2label["4"]), "score": preds[4]}, {"label": str(self.id2label["5"]), "score": preds[5]}, {"label": str(self.id2label["6"]), "score": preds[6]}, {"label": str(self.id2label["7"]), "score": preds[7]}, {"label": str(self.id2label["8"]), "score": preds[8]}, ] return labels