Artples commited on
Commit
5229350
1 Parent(s): 103f453

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -36,13 +36,13 @@ if not torch.cuda.is_available():
36
 
37
 
38
  if torch.cuda.is_available():
39
- model_id = "meta-llama/Llama-2-13b-chat-hf"
40
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
41
  tokenizer = AutoTokenizer.from_pretrained(model_id)
42
  tokenizer.use_default_system_prompt = False
43
 
44
 
45
- @spaces.GPU
46
  def generate(
47
  message: str,
48
  chat_history: list[tuple[str, str]],
 
36
 
37
 
38
  if torch.cuda.is_available():
39
+ model_id = "ibm/labradorite-13b"
40
  model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", load_in_4bit=True)
41
  tokenizer = AutoTokenizer.from_pretrained(model_id)
42
  tokenizer.use_default_system_prompt = False
43
 
44
 
45
+ @spaces.GPU(enable_queue=True)
46
  def generate(
47
  message: str,
48
  chat_history: list[tuple[str, str]],