pushpikaLiyanagama's picture
Update app.py
7a2af10 verified
raw
history blame
2.12 kB
import numpy as np
import joblib
import json
from typing import List, Dict
# Load the scaler and models
scaler = joblib.load('scaler.joblib')
models = {
"processing": joblib.load('svm_model_processing.joblib'),
"perception": joblib.load('svm_model_perception.joblib'),
"input": joblib.load('svm_model_input.joblib'),
"understanding": joblib.load('svm_model_understanding.joblib'),
}
# Define the prediction function
def predict(features: List[float]) -> Dict[str, float]:
"""
Predict outcomes for all target variables based on input features.
Args:
features (List[float]): A list of 12 numeric features in the correct order.
Returns:
Dict[str, float]: A dictionary with predictions for each target variable.
"""
# Ensure the input is a NumPy array
input_array = np.array(features).reshape(1, -1)
# Scale the input
scaled_input = scaler.transform(input_array)
# Predict outcomes
predictions = {}
for target, model in models.items():
predictions[target] = model.predict(scaled_input)[0] # Get single prediction
return predictions
# Define a callable class for Hugging Face
class Model:
def __init__(self):
self.scaler = scaler
self.models = models
def __call__(self, inputs: List[List[float]]) -> List[Dict[str, float]]:
"""
Hugging Face expects the model to handle a batch of inputs.
Args:
inputs (List[List[float]]): A batch of feature vectors.
Returns:
List[Dict[str, float]]: A list of predictions for each input.
"""
outputs = []
for features in inputs:
predictions = predict(features)
outputs.append(predictions)
return outputs
# Instantiate the model
model = Model()
# Hugging Face Inference API expects `model` to be callable
if __name__ == "__main__":
# For local testing or debugging
test_input = [
[0.5, 1.0, 0.0, 1.0, 0.5, 0.0, 1.0, 0.5, 1.0, 0.0, 0.0, 0.5] # Example input
]
output = model(test_input)
print(json.dumps(output, indent=4))