--- library_name: transformers tags: [] --- # INFERENCE ```python from transformers import AutoTokenizer, AutoModelForSequenceClassification import time import torch device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") model = AutoModelForSequenceClassification.from_pretrained("Mr-Vicky-01/helping_agent_classification").to(device) tokenizer = AutoTokenizer.from_pretrained("Mr-Vicky-01/helping_agent_classification") start = time.time() question = "show my critical vulns" print("Question: ", question) inputs = tokenizer(question, return_tensors="pt").to("cuda") with torch.no_grad(): logits = model(**inputs).logits predicted_class_id = logits.argmax().item() print("predicted_class: ", model.config.id2label[predicted_class_id]) print("Time_taken: ", time.time() - start) ```