from transformers import AutoTokenizer, AutoModelForCausalLM import os def load_model(): """Loads tokenizer and model from local files.""" model_dir = "./" # Ensure all model files are in the root directory tokenizer = AutoTokenizer.from_pretrained(model_dir, config="config.json") model = AutoModelForCausalLM.from_pretrained(model_dir) return tokenizer, model if __name__ == "__main__": tokenizer, model = load_model() print("Model and tokenizer loaded successfully.")