Leyo commited on
Commit
75d3aa1
1 Parent(s): 8abd3af

switch default

Browse files
Files changed (1) hide show
  1. app_dialogue.py +4 -4
app_dialogue.py CHANGED
@@ -24,15 +24,15 @@ from transformers import Idefics2ForConditionalGeneration
24
 
25
  DEVICE = torch.device("cuda")
26
  MODELS = {
27
- "idefics2-8b (sft)": Idefics2ForConditionalGeneration.from_pretrained(
28
- "HuggingFaceM4/idefics2-8b",
29
  torch_dtype=torch.bfloat16,
30
  _attn_implementation="flash_attention_2",
31
  trust_remote_code=True,
32
  token=os.environ["HF_AUTH_TOKEN"],
33
  ).to(DEVICE),
34
- "idefics2-8b (chat)": Idefics2ForConditionalGeneration.from_pretrained(
35
- "HuggingFaceM4/idefics2-chat-tfrm-compatible",
36
  torch_dtype=torch.bfloat16,
37
  _attn_implementation="flash_attention_2",
38
  trust_remote_code=True,
 
24
 
25
  DEVICE = torch.device("cuda")
26
  MODELS = {
27
+ "idefics2-8b (chat)": Idefics2ForConditionalGeneration.from_pretrained(
28
+ "HuggingFaceM4/idefics2-chat-tfrm-compatible",
29
  torch_dtype=torch.bfloat16,
30
  _attn_implementation="flash_attention_2",
31
  trust_remote_code=True,
32
  token=os.environ["HF_AUTH_TOKEN"],
33
  ).to(DEVICE),
34
+ "idefics2-8b (sft)": Idefics2ForConditionalGeneration.from_pretrained(
35
+ "HuggingFaceM4/idefics2-8b",
36
  torch_dtype=torch.bfloat16,
37
  _attn_implementation="flash_attention_2",
38
  trust_remote_code=True,