File size: 1,219 Bytes
7b34e37 0461bfe 7b34e37 0461bfe 7b34e37 0461bfe 7b34e37 0461bfe 7b34e37 0461bfe 7b34e37 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
"""
A script that is run when the server starts.
"""
import os
from transformers import AutoModelForCausalLM, AutoTokenizer
DOWNLOADED_MODELS_DIR = os.path.join(os.path.dirname(__file__), 'models')
def download_model(model_name: str):
"""
Downloads a model from hugging face hub to the disk but not to the RAM.
:param model_name: The name of the model to download.
"""
print(f"Downloading model: {model_name}")
model = AutoModelForCausalLM.from_pretrained(model_name)
del model
tokenizer = AutoTokenizer.from_pretrained(model_name)
del tokenizer
print(f"Downloaded model: {model_name}")
def download_useful_models():
"""
Downloads the models that are useful for this project.
So that the user doesn't have to wait for the models to download when they first use the app.
"""
print("Downloading useful models...")
os.makedirs(DOWNLOADED_MODELS_DIR, exist_ok=True)
useful_models = (
"gpt2",
"EleutherAI/gpt-j-6B",
"sberbank-ai/mGPT",
"facebook/opt-125m",
)
for model_name in useful_models:
download_model(model_name)
def main():
download_useful_models()
if __name__ == "__main__":
main()
|