File size: 809 Bytes
dac5c38
 
7a827c3
 
b137f9b
255c853
 
 
 
 
 
 
 
 
 
dac5c38
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
import os
from transformers import AutoModelForCausalLM, AutoTokenizer
from huggingface_hub import login
import os


# Retrieve the token from the environment variable
hf_api_token = os.getenv("HF_API_TOKEN")

if hf_api_token is None:
    raise ValueError("HF_API_TOKEN environment variable is not set")

# Authenticate with Hugging Face
login(token=hf_api_token, add_to_git_credential=True)

# Define the local directory where the model will be saved
local_model_dir = "./llama-2-7b-hf"

# Create the directory if it doesn't exist
os.makedirs(local_model_dir, exist_ok=True)

# Download the model and tokenizer
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir)
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir)