taesiri commited on
Commit
67ffc3f
1 Parent(s): 40b8da9
Files changed (3) hide show
  1. README.md +1 -1
  2. app.py +45 -59
  3. requirements.txt +6 -7
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🐢
4
  colorFrom: yellow
5
  colorTo: green
6
  sdk: gradio
7
- sdk_version: 4.22.0
8
  app_file: app.py
9
  pinned: false
10
  ---
 
4
  colorFrom: yellow
5
  colorTo: green
6
  sdk: gradio
7
+ sdk_version: 4.36.0
8
  app_file: app.py
9
  pinned: false
10
  ---
app.py CHANGED
@@ -15,10 +15,6 @@ csv.field_size_limit(sys.maxsize)
15
 
16
 
17
  def compute_spatial_similarity(conv1, conv2):
18
- """
19
- Takes in the last convolutional layer from two images, computes the pooled output
20
- feature, and then generates the spatial similarity map for both images.
21
- """
22
  conv1 = conv1.reshape(-1, 7 * 7).T
23
  conv2 = conv2.reshape(-1, 7 * 7).T
24
 
@@ -37,8 +33,6 @@ def compute_spatial_similarity(conv1, conv2):
37
  return similarity1, similarity2
38
 
39
 
40
- # Get Layer 4
41
-
42
  display_transform = transforms.Compose(
43
  [transforms.Resize(256), transforms.CenterCrop((224, 224))]
44
  )
@@ -68,19 +62,14 @@ class Wrapper(torch.nn.Module):
68
  _ = self.model(input)
69
  return self.layer4_ouputs
70
 
71
- def __repr__(self):
72
- return "Wrapper"
73
-
74
 
75
  def get_layer4(input_image):
76
  l4_model = models.resnet50(pretrained=True)
77
- # l4_model = l4_model.cuda()
78
  l4_model.eval()
79
  wrapped_model = Wrapper(l4_model)
80
 
81
  with torch.no_grad():
82
  data = imagenet_transform(input_image).unsqueeze(0)
83
- # data = data.cuda()
84
  reference_layer4 = wrapped_model(data)
85
 
86
  return reference_layer4.data.to("cpu").numpy()
@@ -91,10 +80,10 @@ def NormalizeData(data):
91
 
92
 
93
  # Visualization
94
- def visualize_similarities(q, n):
95
- image1 = Image.fromarray(q)
96
- image2 = Image.fromarray(n)
97
-
98
  a = get_layer4(image1).squeeze()
99
  b = get_layer4(image2).squeeze()
100
  sim1, sim2 = compute_spatial_similarity(a, b)
@@ -128,60 +117,57 @@ def visualize_similarities(q, n):
128
  fig.colorbar(im2, ax=axes[1])
129
  plt.tight_layout()
130
 
131
- q_image = display_transform(image1)
132
- nearest_image = display_transform(image2)
133
 
134
- # make a binarized veruin of the Q
135
- fig2, ax = plt.subplots(1, figsize=(5, 5))
136
- ax.imshow(display_transform(image1))
137
 
138
- # create a binarized version of sim1 , for value below 0.5 set to 0 and above 0.5 set to 1
139
- sim1_bin = np.where(sim1 > 0.5, 1, 0)
140
- print(sim1_bin)
141
- # create a binarized version of sim2 , for value below 0.5 set to 0 and above 0.5 set to 1
142
- sim2_bin = np.where(sim2 > 0.5, 1, 0)
143
 
144
- ax.imshow(
145
- skimage.transform.resize(sim1_bin, (224, 224)),
146
- alpha=1,
147
- cmap="binary",
148
- vmin=0,
149
- vmax=1,
150
- )
151
 
152
- return fig, q_image, nearest_image, fig2
153
-
154
-
155
- # GRADIO APP
156
- main = gr.Interface(
157
- fn=visualize_similarities,
158
- inputs=["image", "image"],
159
- allow_flagging="never",
160
- outputs=["plot", "image", "image", "plot"],
161
- cache_examples=True,
162
- enable_queue=False,
163
- examples=[
164
- [
165
- "./examples/Red_Winged_Blackbird_0012_6015.jpg",
166
- "./examples/Red_Winged_Blackbird_0025_5342.jpg",
167
- ],
168
- ],
169
- )
170
 
171
- # iface.launch()
172
 
173
  blocks = gr.Blocks()
174
- with blocks:
175
 
 
 
 
176
  gr.Markdown(
177
- """
178
- # Visualizing Deep Similarity Networks
179
- A quick demo to visualize the similarity between two images.
180
- [Original Paper](https://arxiv.org/pdf/1901.00536.pdf) - [Github Page](https://github.com/GWUvision/Similarity-Visualization)
181
- """
182
  )
183
 
184
- gr.TabbedInterface([main], ["Main"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
 
 
 
186
 
187
- blocks.launch(debug=True)
 
 
15
 
16
 
17
  def compute_spatial_similarity(conv1, conv2):
 
 
 
 
18
  conv1 = conv1.reshape(-1, 7 * 7).T
19
  conv2 = conv2.reshape(-1, 7 * 7).T
20
 
 
33
  return similarity1, similarity2
34
 
35
 
 
 
36
  display_transform = transforms.Compose(
37
  [transforms.Resize(256), transforms.CenterCrop((224, 224))]
38
  )
 
62
  _ = self.model(input)
63
  return self.layer4_ouputs
64
 
 
 
 
65
 
66
  def get_layer4(input_image):
67
  l4_model = models.resnet50(pretrained=True)
 
68
  l4_model.eval()
69
  wrapped_model = Wrapper(l4_model)
70
 
71
  with torch.no_grad():
72
  data = imagenet_transform(input_image).unsqueeze(0)
 
73
  reference_layer4 = wrapped_model(data)
74
 
75
  return reference_layer4.data.to("cpu").numpy()
 
80
 
81
 
82
  # Visualization
83
+ def visualize_similarities(image1, image2):
84
+ print(f"image1: {image1}")
85
+ print(f"image2: {image2}")
86
+ print(type(image1))
87
  a = get_layer4(image1).squeeze()
88
  b = get_layer4(image2).squeeze()
89
  sim1, sim2 = compute_spatial_similarity(a, b)
 
117
  fig.colorbar(im2, ax=axes[1])
118
  plt.tight_layout()
119
 
120
+ # q_image = display_transform(image1)
121
+ # nearest_image = display_transform(image2)
122
 
123
+ # # make a binarized veruin of the Q
124
+ # fig2, ax2 = plt.subplots(1, figsize=(5, 5))
125
+ # ax2.imshow(display_transform(image1))
126
 
127
+ # # create a binarized version of sim1 , for value below 0.5 set to 0 and above 0.5 set to 1
128
+ # sim1_bin = np.where(sim1 > 0.5, 1, 0)
129
+ # # create a binarized version of sim2 , for value below 0.5 set to 0 and above 0.5 set to 1
130
+ # sim2_bin = np.where(sim2 > 0.5, 1, 0)
 
131
 
132
+ # ax2.imshow(
133
+ # skimage.transform.resize(sim1_bin, (224, 224)),
134
+ # alpha=1,
135
+ # cmap="binary",
136
+ # vmin=0,
137
+ # vmax=1,
138
+ # )
139
 
140
+ return fig
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
141
 
 
142
 
143
  blocks = gr.Blocks()
 
144
 
145
+ with blocks as demo:
146
+ gr.Markdown("# Visualizing Deep Similarity Networks")
147
+ gr.Markdown("A quick demo to visualize the similarity between two images.")
148
  gr.Markdown(
149
+ "[Original Paper](https://arxiv.org/pdf/1901.00536.pdf) - [Github Page](https://github.com/GWUvision/Similarity-Visualization)"
 
 
 
 
150
  )
151
 
152
+ with gr.Row():
153
+ with gr.Column():
154
+ image1 = gr.Image(label="Image 1", type="pil")
155
+ image2 = gr.Image(label="Image 2", type="pil")
156
+ with gr.Column():
157
+ sim1_output = gr.Plot()
158
+
159
+ examples = gr.Examples(
160
+ examples=[
161
+ [
162
+ "./examples/Red_Winged_Blackbird_0012_6015.jpg",
163
+ "./examples/Red_Winged_Blackbird_0025_5342.jpg",
164
+ ],
165
+ ],
166
+ inputs=[image1, image2],
167
+ )
168
 
169
+ btn = gr.Button("Compute Similarity")
170
+ btn.click(visualize_similarities, inputs=[image1, image2], outputs=[sim1_output])
171
 
172
+ demo.launch(debug=True)
173
+ # blocks.launch(debug=True)
requirements.txt CHANGED
@@ -1,7 +1,6 @@
1
- gradio==3.0.5
2
- matplotlib==3.5.2
3
- numpy==1.21.5
4
- Pillow==9.0.1
5
- scikit-image==0.19.2
6
- torch==1.11.0
7
- torchvision==0.12.0
 
1
+ matplotlib
2
+ numpy
3
+ Pillow
4
+ scikit-image
5
+ torch
6
+ torchvision