import os from transformers import AutoModelForCausalLM, AutoTokenizer # Retrieve the token from the environment variable hf_api_token = os.getenv("HF_API_TOKEN") if hf_api_token is None: raise ValueError("HF_API_TOKEN environment variable is not set") # Authenticate with Hugging Face login(token=hf_api_token, add_to_git_credential=True) # Define the local directory where the model will be saved local_model_dir = "./llama-2-7b-hf" # Create the directory if it doesn't exist os.makedirs(local_model_dir, exist_ok=True) # Download the model and tokenizer model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir) tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir)