talay commited on
Commit
942c56c
·
verified ·
1 Parent(s): 904f24a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -0
app.py CHANGED
@@ -1,3 +1,25 @@
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
  gr.load("models/Salesforce/blip-image-captioning-large").launch()
 
1
  import gradio as gr
2
+ import requests
3
+ from PIL import Image
4
+ from transformers import BlipProcessor, BlipForConditionalGeneration
5
+
6
+ processor = BlipProcessor.from_pretrained("Salesforce/blip-image-captioning-large")
7
+ model = BlipForConditionalGeneration.from_pretrained("Salesforce/blip-image-captioning-large")
8
+
9
+ img_url = 'https://storage.googleapis.com/sfr-vision-language-research/BLIP/demo.jpg'
10
+ raw_image = Image.open(requests.get(img_url, stream=True).raw).convert('RGB')
11
+
12
+ # conditional image captioning
13
+ text = "a photography of"
14
+ inputs = processor(raw_image, text, return_tensors="pt")
15
+
16
+ out = model.generate(**inputs)
17
+ print(processor.decode(out[0], skip_special_tokens=True))
18
+
19
+ # unconditional image captioning
20
+ inputs = processor(raw_image, return_tensors="pt")
21
+
22
+ out = model.generate(**inputs)
23
+ print(processor.decode(out[0], skip_special_tokens=True))
24
 
25
  gr.load("models/Salesforce/blip-image-captioning-large").launch()