File size: 507 Bytes
f1f6374
 
 
 
5f6d4f4
 
 
 
 
 
f1f6374
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
from transformers import AutoTokenizer, AutoModelForCausalLM

model_name = "meta-llama/Meta-Llama-3-8B"

def load():
  global model
  global tokenizer
  
  model = AutoModelForCausalLM.from_pretrained(model_name)
  tokenizer = AutoTokenizer.from_pretrained(model_name)

def generate(input_text):
  input_ids = tokenizer.encode(input_text, return_tensors="pt")
  output_ids = model.generate(input_ids, num_beams=5, no_repeat_ngram_size=2)
  
  return tokenizer.decode(output_ids[0], skip_special_tokens=True)