metadata
license: apache-2.0
datasets:
- wikisql
language:
- en
pipeline_tag: text2text-generation
tags:
- nl2sql
How to Use
from transformers import AutoTokenizer, BartForConditionalGeneration
device = torch.device('cuda:0')
tokenizer = AutoTokenizer.from_pretrained("LarkAI/bart_large_nl2sql")
model = BartForConditionalGeneration.from_pretrained("LarkAI/bart_large_nl2sql").to(device)
text = "question: which club was in toronto 2003-06 table: Player,No.,Nationality,Position,Years in Toronto,School/Club Team"
inputs = tokenizer([text], max_length=1024, return_tensors="pt")
output_ids = model.generate(inputs["input_ids"].to(self.device), num_beams=self.beams, max_length=128, min_length=8)
response_text = tokenizer.batch_decode(output_ids, skip_special_tokens=True, clean_up_tokenization_spaces=False)[0]
# select School/Club Team from product where Years in Toronto = 2003-06
reference: https://huggingface.co/juierror/flan-t5-text2sql-with-schema