alexkueck commited on
Commit
9078d55
·
1 Parent(s): 013cf76

Update utils.py

Browse files
Files changed (1) hide show
  1. utils.py +3 -4
utils.py CHANGED
@@ -68,7 +68,7 @@ def generate_prompt_with_history(text, history, tokenizer, max_length=2048):
68
 
69
 
70
 
71
- def load_tokenizer_and_model(base_model, use_auth_token, load_8bit=False):
72
  if torch.cuda.is_available():
73
  device = "cuda"
74
  else:
@@ -80,12 +80,11 @@ def load_tokenizer_and_model(base_model, use_auth_token, load_8bit=False):
80
  base_model,
81
  load_in_8bit=load_8bit,
82
  torch_dtype=torch.float16,
83
- device_map="auto",
84
- use_auth_token=use_auth_token
85
  )
86
  else:
87
  model = AutoModelForCausalLM.from_pretrained(
88
- base_model, device_map={"": device}, low_cpu_mem_usage=True, use_auth_token=use_auth_token
89
  )
90
 
91
  #if not load_8bit:
 
68
 
69
 
70
 
71
+ def load_tokenizer_and_model(base_model, load_8bit=False):
72
  if torch.cuda.is_available():
73
  device = "cuda"
74
  else:
 
80
  base_model,
81
  load_in_8bit=load_8bit,
82
  torch_dtype=torch.float16,
83
+ device_map="auto"
 
84
  )
85
  else:
86
  model = AutoModelForCausalLM.from_pretrained(
87
+ base_model, device_map={"": device}, low_cpu_mem_usage=True
88
  )
89
 
90
  #if not load_8bit: