import gradio as gr from transformers import AutoTokenizer, AutoModelWithLMHead tokenizer = AutoTokenizer.from_pretrained("dbernsohn/t5_wikisql_en2SQL") model = AutoModelWithLMHead.from_pretrained("dbernsohn/t5_wikisql_en2SQL") def greet(query): input_text = f"translate English to Sql: {query} " features = tokenizer([input_text], return_tensors='pt') output = model.generate(input_ids=features['input_ids'], attention_mask=features['attention_mask']) return tokenizer.decode(output[0]) iface = gr.Interface(fn=greet, inputs="text", outputs="text", examples=[ "what are the names of all the people in the USA" ]) iface.launch()