File size: 286 Bytes
0eb9e5f
 
 
 
 
 
 
1
2
3
4
5
6
7
from transformers import AutoModelForCausalLM, AutoTokenizer

model_name = "ibm-granite/granite-7b-instruct"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, device_map="auto")

# Codice per eseguire inferenze con il modello