|
import os |
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
from huggingface_hub import login |
|
import os |
|
|
|
|
|
|
|
hf_api_token = os.getenv("HF_API_TOKEN") |
|
|
|
if hf_api_token is None: |
|
raise ValueError("HF_API_TOKEN environment variable is not set") |
|
|
|
|
|
login(token=hf_api_token, add_to_git_credential=True) |
|
|
|
|
|
local_model_dir = "./llama-2-7b-hf" |
|
|
|
|
|
os.makedirs(local_model_dir, exist_ok=True) |
|
|
|
|
|
model = AutoModelForCausalLM.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir) |
|
tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-hf", cache_dir=local_model_dir) |
|
|