J-LAB commited on
Commit
c6e96fd
·
verified ·
1 Parent(s): 91016aa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -1
app.py CHANGED
@@ -7,7 +7,7 @@ from PIL import Image
7
  import subprocess
8
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
 
10
- model_id = 'J-LAB/Florence_2_F_FluxiAI_Product_Caption'
11
  model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).to("cuda").eval()
12
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
13
 
 
7
  import subprocess
8
  subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, shell=True)
9
 
10
+ model_id = 'J-LAB/Florence-vl2'
11
  model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True).to("cuda").eval()
12
  processor = AutoProcessor.from_pretrained(model_id, trust_remote_code=True)
13