import torch from transformers import AutoTokenizer, BertForSequenceClassification import gradio as gr tokenizer = AutoTokenizer.from_pretrained("AkshatSurolia/ICD-10-Code-Prediction") model = BertForSequenceClassification.from_pretrained("AkshatSurolia/ICD-10-Code-Prediction") config = model.config def prompt(text): encoded_input = tokenizer(text, return_tensors='pt') output = model(**encoded_input) results = output.logits.detach().cpu().numpy()[0].argsort()[::-1][:5] return [ config.id2label[ids] for ids in results] #return tokenizer.decode(output[0][0].argmax(dim=-1)) demo = gr.Interface(fn=prompt,inputs= "text", outputs="text") demo.launch(debug=True)