# Load model directly import ast from transformers import AutoTokenizer, AutoModelForSequenceClassification import torch # assets_path = cached_assets_path(library_name="datasets", namespace="SQuAD", subfolder="download") # something_path = assets_path / "config.json" # Do anything you like in your assets folder ! tokenizer = AutoTokenizer.from_pretrained("pretrained_models/Bio_ClinicalBERT-finetuned-medicalcondition") model = AutoModelForSequenceClassification.from_pretrained("pretrained_models/Bio_ClinicalBERT-finetuned-medicalcondition") input=tokenizer("I love using transformers for natural language processing.", return_tensors="pt") # 使用模型进行预测 with torch.no_grad(): logits = model(**input).logits # 解析预测结果 predicted_class_id = logits.argmax().item() print(f"Predicted class id: {predicted_class_id}")