ariG23498 HF staff commited on
Commit
b79f89f
β€’
1 Parent(s): 537c4b5
Files changed (1) hide show
  1. app.py +45 -14
app.py CHANGED
@@ -39,12 +39,33 @@ model = model.eval()
39
 
40
  import gradio as gr
41
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
42
  def run_clip_dinoiser(input_image, text_prompts):
43
  image = input_image.convert("RGB")
44
  text_prompts = text_prompts.split(",")
45
- palette = [
46
- (random.randint(0, 256), random.randint(0, 256), random.randint(0, 256)) for _ in range(len(text_prompts))
47
- ]
48
 
49
  model.clip_backbone.decode_head.update_vocab(text_prompts)
50
  model.to(device)
@@ -78,7 +99,14 @@ def run_clip_dinoiser(input_image, text_prompts):
78
  palette_array = np.array(itemgetter(*classes)(palette)).reshape(1, -1, 3)
79
  alpha=0.5
80
  blend = (alpha)*np.array(image)/255. + (1-alpha) * mask/255.
81
- return palette_array, blend, mask
 
 
 
 
 
 
 
82
 
83
 
84
  if __name__ == "__main__":
@@ -94,9 +122,6 @@ if __name__ == "__main__":
94
  run_button = gr.Button(value="Run")
95
 
96
  with gr.Column():
97
- palette_array = gr.Image(
98
- type="numpy",
99
- )
100
  with gr.Row():
101
  overlay_mask = gr.Image(
102
  type="numpy",
@@ -104,18 +129,24 @@ if __name__ == "__main__":
104
  only_mask = gr.Image(
105
  type="numpy",
106
  )
 
 
 
 
 
 
107
 
108
  run_button.click(
109
  fn=run_clip_dinoiser,
110
  inputs=[input_image, text_prompts,],
111
- outputs=[palette_array, overlay_mask, only_mask]
112
  )
113
  gr.Examples(
114
- [["vintage_bike.jpeg", "background, vintage bike, leather bag"]],
115
- inputs = [input_image, text_prompts,],
116
- outputs = [palette_array, overlay_mask, only_mask],
117
- fn=run_clip_dinoiser,
118
- cache_examples=True,
119
- label='Try this example input!'
120
  )
121
  block.launch(share=False, show_api=False, show_error=True)
 
39
 
40
  import gradio as gr
41
 
42
+ colors = [
43
+ (0, 255, 0),
44
+ (0, 0, 255),
45
+ (255, 255, 0),
46
+ (255, 0, 255),
47
+ (0, 255, 255),
48
+ (114, 128, 250),
49
+ (0, 165, 255),
50
+ (0, 128, 0),
51
+ (144, 238, 144),
52
+ (238, 238, 175),
53
+ (255, 191, 0),
54
+ (0, 128, 0),
55
+ (226, 43, 138),
56
+ (255, 0, 255),
57
+ (0, 215, 255),
58
+ (255, 0, 0),
59
+ ]
60
+
61
+ color_map = {
62
+ f"{color_id}": f"#{hex(color[2])[2:].zfill(2)}{hex(color[1])[2:].zfill(2)}{hex(color[0])[2:].zfill(2)}" for color_id, color in enumerate(colors)
63
+ }
64
+
65
  def run_clip_dinoiser(input_image, text_prompts):
66
  image = input_image.convert("RGB")
67
  text_prompts = text_prompts.split(",")
68
+ palette = colors[:len(text_prompts)]
 
 
69
 
70
  model.clip_backbone.decode_head.update_vocab(text_prompts)
71
  model.to(device)
 
99
  palette_array = np.array(itemgetter(*classes)(palette)).reshape(1, -1, 3)
100
  alpha=0.5
101
  blend = (alpha)*np.array(image)/255. + (1-alpha) * mask/255.
102
+
103
+ h_text = list()
104
+ for idx, text in enumerate(text_prompts):
105
+ for alphabet in text:
106
+ h_text.append((alphabet, color_map[str(idx)]))
107
+ return blend, mask, h_text
108
+
109
+
110
 
111
 
112
  if __name__ == "__main__":
 
122
  run_button = gr.Button(value="Run")
123
 
124
  with gr.Column():
 
 
 
125
  with gr.Row():
126
  overlay_mask = gr.Image(
127
  type="numpy",
 
129
  only_mask = gr.Image(
130
  type="numpy",
131
  )
132
+ h_text = gr.HighlightedText(
133
+ label="text",
134
+ combine_adjacent=False,
135
+ show_legend=False,
136
+ color_map=color_map
137
+ )
138
 
139
  run_button.click(
140
  fn=run_clip_dinoiser,
141
  inputs=[input_image, text_prompts,],
142
+ outputs=[overlay_mask, only_mask, h_text]
143
  )
144
  gr.Examples(
145
+ [["vintage_bike.jpeg", "background, vintage bike, leather bag"]],
146
+ inputs = [input_image, text_prompts,],
147
+ outputs = [overlay_mask, only_mask, h_text],
148
+ fn=run_clip_dinoiser,
149
+ cache_examples=True,
150
+ label='Try this example input!'
151
  )
152
  block.launch(share=False, show_api=False, show_error=True)