import os | |
from transformers import AutoModelForCausalLM, AutoTokenizer | |
# Define the local directory where the model will be saved | |
local_model_dir = "./llama-2-7b-hf" | |
# Create the directory if it doesn't exist | |
os.makedirs(local_model_dir, exist_ok=True) | |
# Download the model and tokenizer | |
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir) | |
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir) | |