Jyothirmai
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -5,26 +5,9 @@ import vitGPT
|
|
5 |
import skimage.io as io
|
6 |
import PIL.Image
|
7 |
import difflib
|
|
|
8 |
|
9 |
|
10 |
-
def compare_and_highlight(text1, text2):
|
11 |
-
print("Triggered function")
|
12 |
-
|
13 |
-
matcher = difflib.SequenceMatcher(None, text1, text2)
|
14 |
-
output = ''
|
15 |
-
for op, a1, a2, b1, b2 in matcher.get_opcodes():
|
16 |
-
if op == 'equal':
|
17 |
-
output += f"**{text1[a1:a2]}**" # Highlight matches in bold
|
18 |
-
elif op == 'insert':
|
19 |
-
output += f"<ins>{text2[b1:b2]}</ins>"
|
20 |
-
elif op == 'delete':
|
21 |
-
output += f"<del>{text1[a1:a2]}</del>"
|
22 |
-
elif op == 'replace':
|
23 |
-
# Handle replacements (more complex)
|
24 |
-
output += f"<del>{text1[a1:a2]}</del> <ins>{text2[b1:b2]}</ins>"
|
25 |
-
print(output)
|
26 |
-
return output
|
27 |
-
|
28 |
|
29 |
# Caption generation functions
|
30 |
def generate_caption_clipgpt(image):
|
@@ -35,6 +18,9 @@ def generate_caption_vitgpt(image):
|
|
35 |
caption = vitGPT.generate_caption(image)
|
36 |
return caption
|
37 |
|
|
|
|
|
|
|
38 |
|
39 |
|
40 |
with gr.Blocks() as demo:
|
@@ -50,19 +36,12 @@ with gr.Blocks() as demo:
|
|
50 |
"CXR193_IM-0601-1001.png",
|
51 |
"CXR194_IM-0609-1001.png",
|
52 |
"CXR195_IM-0618-1001.png"
|
53 |
-
|
54 |
|
55 |
|
56 |
image = gr.Image(label="Upload Chest X-ray")
|
57 |
-
gr.Gallery(
|
58 |
-
value = sample_images,
|
59 |
-
label="Sample Images",
|
60 |
-
)
|
61 |
|
62 |
-
|
63 |
-
# value = sample_images,
|
64 |
-
# label="Sample Images",
|
65 |
-
# )
|
66 |
|
67 |
with gr.Row():
|
68 |
model_choice = gr.Radio(["CLIP-GPT2", "ViT-GPT2", "ViT-CoAttention"], label="Select Model")
|
@@ -75,23 +54,15 @@ with gr.Blocks() as demo:
|
|
75 |
return generate_caption_clipgpt(img)
|
76 |
elif model_name == "ViT-GPT2":
|
77 |
return generate_caption_vitgpt(img)
|
|
|
|
|
78 |
else:
|
79 |
return "Caption generation for this model is not yet implemented."
|
80 |
|
81 |
-
with gr.Row():
|
82 |
-
text1 = gr.Textbox(label="Text 1")
|
83 |
-
text2 = gr.Textbox(label="Text 2")
|
84 |
-
compare_button = gr.Button("Compare Texts")
|
85 |
-
with gr.Row():
|
86 |
-
comparison_result = gr.Textbox(label="Comparison Result")
|
87 |
|
88 |
# Event handlers
|
89 |
-
|
90 |
generate_button.click(predict, [image, model_choice], caption) # Trigger prediction on button click
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
# sample_images_gallery.change(predict, [sample_images_gallery, model_choice], caption) # Handle sample images
|
95 |
|
96 |
|
97 |
demo.launch()
|
|
|
5 |
import skimage.io as io
|
6 |
import PIL.Image
|
7 |
import difflib
|
8 |
+
import tester
|
9 |
|
10 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
11 |
|
12 |
# Caption generation functions
|
13 |
def generate_caption_clipgpt(image):
|
|
|
18 |
caption = vitGPT.generate_caption(image)
|
19 |
return caption
|
20 |
|
21 |
+
def generate_caption_vitCoAtt(image):
|
22 |
+
caption = tester.main(image)
|
23 |
+
return caption
|
24 |
|
25 |
|
26 |
with gr.Blocks() as demo:
|
|
|
36 |
"CXR193_IM-0601-1001.png",
|
37 |
"CXR194_IM-0609-1001.png",
|
38 |
"CXR195_IM-0618-1001.png"
|
39 |
+
]
|
40 |
|
41 |
|
42 |
image = gr.Image(label="Upload Chest X-ray")
|
|
|
|
|
|
|
|
|
43 |
|
44 |
+
sample_images_gallery = gr.Gallery(value = sample_images,label="Sample Images")
|
|
|
|
|
|
|
45 |
|
46 |
with gr.Row():
|
47 |
model_choice = gr.Radio(["CLIP-GPT2", "ViT-GPT2", "ViT-CoAttention"], label="Select Model")
|
|
|
54 |
return generate_caption_clipgpt(img)
|
55 |
elif model_name == "ViT-GPT2":
|
56 |
return generate_caption_vitgpt(img)
|
57 |
+
elif model_name == "ViT-CoAttention":
|
58 |
+
return generate_caption_vitCoAtt(img)
|
59 |
else:
|
60 |
return "Caption generation for this model is not yet implemented."
|
61 |
|
|
|
|
|
|
|
|
|
|
|
|
|
62 |
|
63 |
# Event handlers
|
|
|
64 |
generate_button.click(predict, [image, model_choice], caption) # Trigger prediction on button click
|
65 |
+
sample_images_gallery.change(predict, [sample_images_gallery, model_choice], caption) # Handle sample images
|
|
|
|
|
|
|
66 |
|
67 |
|
68 |
demo.launch()
|