Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -8,7 +8,7 @@ from gradio_client import Client
|
|
8 |
|
9 |
DESCRIPTION = "# Comparing image captioning models"
|
10 |
ORIGINAL_SPACE_INFO = """\
|
11 |
-
- [GIT-large fine-tuned on COCO](https://huggingface.co/spaces/library-samples/image-captioning-with-git)
|
12 |
- [BLIP-large](https://huggingface.co/spaces/library-samples/image-captioning-with-blip)
|
13 |
- [BLIP-2 OPT 6.7B](https://huggingface.co/spaces/merve/BLIP2-with-transformers)
|
14 |
- [BLIP-2 T5-XXL](https://huggingface.co/spaces/hysts/BLIP2-with-transformers)
|
@@ -25,13 +25,13 @@ torch.hub.download_url_to_file(
|
|
25 |
)
|
26 |
|
27 |
|
28 |
-
def generate_caption_git(image_path: str) -> str:
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
|
36 |
|
37 |
def generate_caption_blip(image_path: str) -> str:
|
@@ -111,7 +111,7 @@ def generate_caption_fuyu(image_path: str) -> str:
|
|
111 |
|
112 |
def generate_captions(image_path: str) -> tuple[str, str, str, str, str, str]:
|
113 |
return (
|
114 |
-
generate_caption_git(image_path),
|
115 |
generate_caption_blip(image_path),
|
116 |
generate_caption_blip2_opt(image_path),
|
117 |
generate_caption_blip2_t5xxl(image_path),
|
@@ -127,7 +127,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
127 |
input_image = gr.Image(type="filepath")
|
128 |
run_button = gr.Button("Caption")
|
129 |
with gr.Column():
|
130 |
-
out_git = gr.Textbox(label="GIT-large fine-tuned on COCO")
|
131 |
out_blip = gr.Textbox(label="BLIP-large")
|
132 |
out_blip2_opt = gr.Textbox(label="BLIP-2 OPT 6.7B")
|
133 |
out_blip2_t5xxl = gr.Textbox(label="BLIP-2 T5-XXL")
|
@@ -135,7 +135,7 @@ with gr.Blocks(css="style.css") as demo:
|
|
135 |
out_fuyu = gr.Textbox(label="Fuyu-8B")
|
136 |
|
137 |
outputs = [
|
138 |
-
out_git,
|
139 |
out_blip,
|
140 |
out_blip2_opt,
|
141 |
out_blip2_t5xxl,
|
|
|
8 |
|
9 |
DESCRIPTION = "# Comparing image captioning models"
|
10 |
ORIGINAL_SPACE_INFO = """\
|
11 |
+
- [(Omit because it wasn't good)GIT-large fine-tuned on COCO](https://huggingface.co/spaces/library-samples/image-captioning-with-git)
|
12 |
- [BLIP-large](https://huggingface.co/spaces/library-samples/image-captioning-with-blip)
|
13 |
- [BLIP-2 OPT 6.7B](https://huggingface.co/spaces/merve/BLIP2-with-transformers)
|
14 |
- [BLIP-2 T5-XXL](https://huggingface.co/spaces/hysts/BLIP2-with-transformers)
|
|
|
25 |
)
|
26 |
|
27 |
|
28 |
+
# def generate_caption_git(image_path: str) -> str:
|
29 |
+
# try:
|
30 |
+
# client = Client("library-samples/image-captioning-with-git")
|
31 |
+
# return client.predict(image_path, api_name="/caption")
|
32 |
+
# except Exception:
|
33 |
+
# gr.Warning("The GIT-large Space is currently unavailable. Please try again later.")
|
34 |
+
# return ""
|
35 |
|
36 |
|
37 |
def generate_caption_blip(image_path: str) -> str:
|
|
|
111 |
|
112 |
def generate_captions(image_path: str) -> tuple[str, str, str, str, str, str]:
|
113 |
return (
|
114 |
+
# generate_caption_git(image_path),
|
115 |
generate_caption_blip(image_path),
|
116 |
generate_caption_blip2_opt(image_path),
|
117 |
generate_caption_blip2_t5xxl(image_path),
|
|
|
127 |
input_image = gr.Image(type="filepath")
|
128 |
run_button = gr.Button("Caption")
|
129 |
with gr.Column():
|
130 |
+
# out_git = gr.Textbox(label="GIT-large fine-tuned on COCO")
|
131 |
out_blip = gr.Textbox(label="BLIP-large")
|
132 |
out_blip2_opt = gr.Textbox(label="BLIP-2 OPT 6.7B")
|
133 |
out_blip2_t5xxl = gr.Textbox(label="BLIP-2 T5-XXL")
|
|
|
135 |
out_fuyu = gr.Textbox(label="Fuyu-8B")
|
136 |
|
137 |
outputs = [
|
138 |
+
# out_git,
|
139 |
out_blip,
|
140 |
out_blip2_opt,
|
141 |
out_blip2_t5xxl,
|