hysts HF staff commited on
Commit
863b4ea
1 Parent(s): 315fd24
Files changed (2) hide show
  1. app.py +28 -51
  2. style.css +0 -1
app.py CHANGED
@@ -1,11 +1,8 @@
1
  from __future__ import annotations
2
 
3
- import os
4
-
5
  import gradio as gr
6
  import torch
7
  from gradio_client import Client
8
- from gradio_client.client import Job
9
 
10
  DESCRIPTION = "# Comparing image captioning models"
11
  ORIGINAL_SPACE_INFO = """\
@@ -26,31 +23,28 @@ torch.hub.download_url_to_file(
26
  )
27
 
28
 
29
- def generate_caption_git(image_path: str, return_job: bool = False) -> str | Job:
30
  try:
31
  client = Client("hysts/image-captioning-with-git")
32
- fn = client.submit if return_job else client.predict
33
- return fn(image_path, api_name="/caption")
34
  except Exception:
35
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
36
  return ""
37
 
38
 
39
- def generate_caption_blip(image_path: str, return_job: bool = False) -> str | Job:
40
  try:
41
  client = Client("hysts/image-captioning-with-blip")
42
- fn = client.submit if return_job else client.predict
43
- return fn(image_path, "A picture of", api_name="/caption")
44
  except Exception:
45
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
46
  return ""
47
 
48
 
49
- def generate_caption_blip2_opt(image_path: str, return_job: bool = False) -> str | Job:
50
  try:
51
  client = Client("merve/BLIP2-with-transformers")
52
- fn = client.submit if return_job else client.predict
53
- return fn(
54
  image_path,
55
  "Beam search",
56
  1, # temperature
@@ -63,11 +57,10 @@ def generate_caption_blip2_opt(image_path: str, return_job: bool = False) -> str
63
  return ""
64
 
65
 
66
- def generate_caption_blip2_t5xxl(image_path: str, return_job: bool = False) -> str | Job:
67
  try:
68
  client = Client("hysts/BLIP2")
69
- fn = client.submit if return_job else client.predict
70
- return fn(
71
  image_path,
72
  "Beam search",
73
  1, # temperature
@@ -84,11 +77,10 @@ def generate_caption_blip2_t5xxl(image_path: str, return_job: bool = False) -> s
84
  return ""
85
 
86
 
87
- def generate_caption_instructblip(image_path: str, return_job: bool = False) -> str | Job:
88
  try:
89
  client = Client("hysts/InstructBLIP")
90
- fn = client.submit if return_job else client.predict
91
- return fn(
92
  image_path,
93
  "Describe the image.",
94
  "Beam search",
@@ -106,28 +98,15 @@ def generate_caption_instructblip(image_path: str, return_job: bool = False) ->
106
  return ""
107
 
108
 
109
- def generate_caption_fuyu(image_path: str, return_job: bool = False) -> str | Job:
110
  try:
111
  client = Client("adept/fuyu-8b-demo")
112
- fn = client.submit if return_job else client.predict
113
- return fn(image_path, "Generate a coco style caption.\n", fn_index=3)
114
  except Exception:
115
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
116
  return ""
117
 
118
 
119
- def generate_captions(image_path: str) -> tuple[str, str, str, str, str, str]:
120
- jobs = [
121
- generate_caption_git(image_path, return_job=True),
122
- generate_caption_blip(image_path, return_job=True),
123
- generate_caption_blip2_opt(image_path, return_job=True),
124
- generate_caption_blip2_t5xxl(image_path, return_job=True),
125
- generate_caption_instructblip(image_path, return_job=True),
126
- generate_caption_fuyu(image_path, return_job=True),
127
- ]
128
- return tuple(job.result() if job else "" for job in jobs)
129
-
130
-
131
  with gr.Blocks(css="style.css") as demo:
132
  gr.Markdown(DESCRIPTION)
133
  with gr.Row():
@@ -142,14 +121,6 @@ with gr.Blocks(css="style.css") as demo:
142
  out_instructblip = gr.Textbox(label="InstructBLIP")
143
  out_fuyu = gr.Textbox(label="Fuyu-8B")
144
 
145
- outputs = [
146
- out_git,
147
- out_blip,
148
- out_blip2_opt,
149
- out_blip2_t5xxl,
150
- out_instructblip,
151
- out_fuyu,
152
- ]
153
  gr.Examples(
154
  examples=[
155
  "cats.jpg",
@@ -157,20 +128,26 @@ with gr.Blocks(css="style.css") as demo:
157
  "astronaut.jpg",
158
  ],
159
  inputs=input_image,
160
- outputs=outputs,
161
- fn=generate_captions,
162
- cache_examples=os.getenv("CACHE_EXAMPLES") == "1",
163
  )
164
 
165
  with gr.Accordion(label="The original Spaces can be found here:", open=False):
166
  gr.Markdown(ORIGINAL_SPACE_INFO)
167
 
168
- run_button.click(
169
- fn=generate_captions,
170
- inputs=input_image,
171
- outputs=outputs,
172
- api_name="caption",
173
- )
 
 
 
 
 
 
 
 
 
174
 
175
  if __name__ == "__main__":
176
- demo.queue(max_size=20).launch()
 
1
  from __future__ import annotations
2
 
 
 
3
  import gradio as gr
4
  import torch
5
  from gradio_client import Client
 
6
 
7
  DESCRIPTION = "# Comparing image captioning models"
8
  ORIGINAL_SPACE_INFO = """\
 
23
  )
24
 
25
 
26
+ def generate_caption_git(image_path: str) -> str:
27
  try:
28
  client = Client("hysts/image-captioning-with-git")
29
+ return client.predict(image_path, api_name="/caption")
 
30
  except Exception:
31
  gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
32
  return ""
33
 
34
 
35
+ def generate_caption_blip(image_path: str) -> str:
36
  try:
37
  client = Client("hysts/image-captioning-with-blip")
38
+ return client.predict(image_path, "A picture of", api_name="/caption")
 
39
  except Exception:
40
  gr.Warning("The BLIP-large Space is currently unavailable. Please try again later.")
41
  return ""
42
 
43
 
44
+ def generate_caption_blip2_opt(image_path: str) -> str:
45
  try:
46
  client = Client("merve/BLIP2-with-transformers")
47
+ return client.predict(
 
48
  image_path,
49
  "Beam search",
50
  1, # temperature
 
57
  return ""
58
 
59
 
60
+ def generate_caption_blip2_t5xxl(image_path: str) -> str:
61
  try:
62
  client = Client("hysts/BLIP2")
63
+ return client.predict(
 
64
  image_path,
65
  "Beam search",
66
  1, # temperature
 
77
  return ""
78
 
79
 
80
+ def generate_caption_instructblip(image_path: str) -> str:
81
  try:
82
  client = Client("hysts/InstructBLIP")
83
+ return client.predict(
 
84
  image_path,
85
  "Describe the image.",
86
  "Beam search",
 
98
  return ""
99
 
100
 
101
+ def generate_caption_fuyu(image_path: str) -> str:
102
  try:
103
  client = Client("adept/fuyu-8b-demo")
104
+ return client.predict(image_path, "Generate a coco style caption.\n", fn_index=3)
 
105
  except Exception:
106
  gr.Warning("The Fuyu-8B Space is currently unavailable. Please try again later.")
107
  return ""
108
 
109
 
 
 
 
 
 
 
 
 
 
 
 
 
110
  with gr.Blocks(css="style.css") as demo:
111
  gr.Markdown(DESCRIPTION)
112
  with gr.Row():
 
121
  out_instructblip = gr.Textbox(label="InstructBLIP")
122
  out_fuyu = gr.Textbox(label="Fuyu-8B")
123
 
 
 
 
 
 
 
 
 
124
  gr.Examples(
125
  examples=[
126
  "cats.jpg",
 
128
  "astronaut.jpg",
129
  ],
130
  inputs=input_image,
 
 
 
131
  )
132
 
133
  with gr.Accordion(label="The original Spaces can be found here:", open=False):
134
  gr.Markdown(ORIGINAL_SPACE_INFO)
135
 
136
+ fn_out_pairs = [
137
+ (generate_caption_git, out_git),
138
+ (generate_caption_blip, out_blip),
139
+ (generate_caption_blip2_opt, out_blip2_opt),
140
+ (generate_caption_blip2_t5xxl, out_blip2_t5xxl),
141
+ (generate_caption_instructblip, out_instructblip),
142
+ (generate_caption_fuyu, out_fuyu),
143
+ ]
144
+ for fn, out in fn_out_pairs:
145
+ run_button.click(
146
+ fn=fn,
147
+ inputs=input_image,
148
+ outputs=out,
149
+ api_name=False,
150
+ )
151
 
152
  if __name__ == "__main__":
153
+ demo.queue(max_size=20, api_open=False).launch(show_api=False)
style.css CHANGED
@@ -2,4 +2,3 @@ h1 {
2
  text-align: center;
3
  display: block;
4
  }
5
-
 
2
  text-align: center;
3
  display: block;
4
  }