File size: 867 Bytes
431fc47 ce0f79a 431fc47 fc0235a 431fc47 fc0235a ce0f79a e9be75f 431fc47 fc0235a c359b90 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 |
#!/usr/bin/env python
from transformers import AutoTokenizer, TFAutoModelForCausalLM, AutoModelForCausalLM, GPT2Config
import tensorflow as tf
task_specific_params = {
"text-generation": {
"do_sample": False,
"max_length": 50
}
}
config = GPT2Config.from_pretrained("Sentdex/GPyT", _name_or_path='prophetikai/code-gpt', use_cache=True, task_specific_params=task_specific_params)
tokenizer = AutoTokenizer.from_pretrained("Sentdex/GPyT")
tf_model = TFAutoModelForCausalLM.from_pretrained("Sentdex/GPyT", config=config)
pytorch_model = AutoModelForCausalLM.from_pretrained("Sentdex/GPyT", config=config)
config.save_pretrained('./')
tokenizer.save_pretrained(save_directory='./')
tf_model.save_pretrained(save_directory='./', saved_model=True, version='sentdex')
pytorch_model.save_pretrained(save_directory='./') |