samyak152002's picture
Update script
bc9b616
raw
history blame
762 Bytes
import torch
from transformers import DistilBertTokenizer, DistilBertModel
# Load the tokenizer and model
tokenizer = DistilBertTokenizer.from_pretrained("tokenizer_config.json")
model = DistilBertModel.from_pretrained("pytorch_model.bin")
# Define the inference function
def predict(text):
# Tokenize the input
inputs = tokenizer(text, padding="max_length", truncation=True, return_tensors="pt")
# Perform the inference
with torch.no_grad():
outputs = model(**inputs)
logits = outputs.logits
# Convert logits to probabilities
probabilities = torch.softmax(logits, dim=1).squeeze().tolist()
return probabilities
# Example usage
text = "This is a sample input."
probabilities = predict(text)
print(probabilities)