ishworrsubedii commited on
Commit
7f72add
Β·
1 Parent(s): 62a1fe7

update: combine inpaint and necklacetryon

Browse files
Files changed (2) hide show
  1. .gitignore +1 -0
  2. app.py +44 -60
.gitignore ADDED
@@ -0,0 +1 @@
 
 
1
+ .idea
app.py CHANGED
@@ -14,10 +14,7 @@ import cvzone
14
  from cvzone.PoseModule import PoseDetector
15
  import spaces
16
 
17
- # images
18
  choker_images = [Image.open(os.path.join("choker", x)) for x in os.listdir("choker")]
19
- short_necklaces = [Image.open(os.path.join("short_necklace", x)) for x in os.listdir("short_necklace")]
20
- long_necklaces = [Image.open(os.path.join("long_haram", x)) for x in os.listdir("long_haram")]
21
  person_images = [Image.open(os.path.join("without_necklace", x)) for x in os.listdir("without_necklace")]
22
 
23
  # initialising the stable diffusion model
@@ -27,14 +24,16 @@ pipeline = StableDiffusionInpaintPipeline.from_pretrained(
27
  )
28
  pipeline = pipeline.to("cuda")
29
 
 
30
  # functions
31
  def clearFunc():
32
  torch.cuda.empty_cache()
33
  gc.collect()
34
 
35
- def necklaceTryOnPipeline(image, jewellery):
36
- global binaryMask
37
-
 
38
  image = np.array(image)
39
  copy_image = image.copy()
40
  jewellery = np.array(jewellery)
@@ -93,27 +92,22 @@ def necklaceTryOnPipeline(image, jewellery):
93
  available_space = copy_image.shape[0] - y_coordinate
94
  extra = jewellery.shape[0] - available_space
95
  if extra > 0:
96
- jewellery = jewellery[extra + 10 :, :]
97
  return necklaceTryOnPipeline(
98
  Image.fromarray(copy_image), Image.fromarray(jewellery)
99
  )
100
  else:
101
  result = cvzone.overlayPNG(copy_image, jewellery, (avg_x1, y_coordinate))
102
  # masking
103
- blackedNecklace = np.zeros(shape = copy_image.shape)
104
  # overlay
105
  cvzone.overlayPNG(blackedNecklace, jewellery, (avg_x1, y_coordinate))
106
  blackedNecklace = cv2.cvtColor(blackedNecklace.astype(np.uint8), cv2.COLOR_BGR2GRAY)
107
  binaryMask = blackedNecklace * ((blackedNecklace > 5) * 255)
108
  binaryMask[binaryMask >= 255] = 255
109
  binaryMask[binaryMask < 255] = 0
110
- return Image.fromarray(result.astype(np.uint8)), Image.fromarray(binaryMask.astype(np.uint8))
111
-
112
- # SD Model
113
- @spaces.GPU
114
- def sd_inpaint(image, mask):
115
- # image = Image.fromarray(image)
116
- # mask = Image.fromarray(mask)
117
 
118
  jewellery_mask = Image.fromarray(
119
  np.bitwise_and(np.array(mask), np.array(image))
@@ -137,57 +131,47 @@ def sd_inpaint(image, mask):
137
  mask = mask.resize((512, 512))
138
 
139
  results = []
140
- for colour in ["Red", "Blue", "Green"]:
141
- prompt = f"{colour}, South Indian Saree, properly worn, natural setting, elegant, natural look, neckline without jewellery, simple"
142
- negative_prompt = "necklaces, jewellery, jewelry, necklace, neckpiece, garland, chain, neck wear, jewelled neck, jeweled neck, necklace on neck, jewellery on neck, accessories, watermark, text, changed background, wider body, narrower body, bad proportions, extra limbs, mutated hands, changed sizes, altered proportions, unnatural body proportions, blury, ugly"
143
-
144
- output = pipeline(
145
- prompt=prompt,
146
- negative_prompt=negative_prompt,
147
- image=image,
148
- mask_image=mask,
149
- strength=0.95,
150
- guidance_score=9,
151
- # generator = torch.Generator("cuda").manual_seed(42)
152
- ).images[0]
153
-
154
- output = output.resize(orig_size)
155
- temp_generated = np.bitwise_and(
156
- np.array(output),
157
- np.bitwise_not(np.array(Image.fromarray(arr_orig).convert("RGB"))),
158
- )
159
- results.append(temp_generated)
160
 
161
  results = [
162
  Image.fromarray(np.bitwise_or(x, np.array(jewellery_mask))) for x in results
163
  ]
164
  clearFunc()
165
- return results[0], results[1], results[2]
 
166
 
167
  # interface
168
 
169
  with gr.Blocks() as interface:
170
- with gr.Row():
171
- inputImage = gr.Image(label = "Input Image", type = "pil", image_mode = "RGB", interactive = True)
172
- selectedNecklace = gr.Image(label = "Selected Necklace", type = "pil", image_mode = "RGBA", visible = False)
173
- hiddenMask = gr.Image(visible = False, type = "pil")
174
- with gr.Row():
175
- gr.Examples(examples = person_images, inputs=[inputImage], label="Models")
176
- with gr.Row():
177
- gr.Examples(examples = choker_images, inputs = [selectedNecklace], label = "Chokers")
178
- with gr.Row():
179
- gr.Examples(examples = short_necklaces, inputs = [selectedNecklace], label = "Short Necklaces")
180
- with gr.Row():
181
- gr.Examples(examples = long_necklaces, inputs = [selectedNecklace], label = "Long Necklaces")
182
- with gr.Row():
183
- outputOne = gr.Image(label = "Output 1", interactive = False)
184
- outputTwo = gr.Image(label = "Output 2", interactive = False)
185
- outputThree = gr.Image(label = "Output 3", interactive = False)
186
- with gr.Row():
187
- submit = gr.Button("Enter")
188
-
189
- selectedNecklace.change(fn = necklaceTryOnPipeline, inputs = [inputImage, selectedNecklace], outputs = [inputImage, hiddenMask])
190
- submit.click(fn = sd_inpaint, inputs = [inputImage, hiddenMask], outputs = [outputOne, outputTwo, outputThree])
191
-
192
-
193
- interface.launch(debug = True)
 
14
  from cvzone.PoseModule import PoseDetector
15
  import spaces
16
 
 
17
  choker_images = [Image.open(os.path.join("choker", x)) for x in os.listdir("choker")]
 
 
18
  person_images = [Image.open(os.path.join("without_necklace", x)) for x in os.listdir("without_necklace")]
19
 
20
  # initialising the stable diffusion model
 
24
  )
25
  pipeline = pipeline.to("cuda")
26
 
27
+
28
  # functions
29
  def clearFunc():
30
  torch.cuda.empty_cache()
31
  gc.collect()
32
 
33
+
34
+ # SD Model
35
+ @spaces.GPU
36
+ def sd_inpaint(image, jewellery):
37
  image = np.array(image)
38
  copy_image = image.copy()
39
  jewellery = np.array(jewellery)
 
92
  available_space = copy_image.shape[0] - y_coordinate
93
  extra = jewellery.shape[0] - available_space
94
  if extra > 0:
95
+ jewellery = jewellery[extra + 10:, :]
96
  return necklaceTryOnPipeline(
97
  Image.fromarray(copy_image), Image.fromarray(jewellery)
98
  )
99
  else:
100
  result = cvzone.overlayPNG(copy_image, jewellery, (avg_x1, y_coordinate))
101
  # masking
102
+ blackedNecklace = np.zeros(shape=copy_image.shape)
103
  # overlay
104
  cvzone.overlayPNG(blackedNecklace, jewellery, (avg_x1, y_coordinate))
105
  blackedNecklace = cv2.cvtColor(blackedNecklace.astype(np.uint8), cv2.COLOR_BGR2GRAY)
106
  binaryMask = blackedNecklace * ((blackedNecklace > 5) * 255)
107
  binaryMask[binaryMask >= 255] = 255
108
  binaryMask[binaryMask < 255] = 0
109
+ image = Image.fromarray(result.astype(np.uint8))
110
+ mask = Image.fromarray(binaryMask.astype(np.uint8))
 
 
 
 
 
111
 
112
  jewellery_mask = Image.fromarray(
113
  np.bitwise_and(np.array(mask), np.array(image))
 
131
  mask = mask.resize((512, 512))
132
 
133
  results = []
134
+ prompt = f" South Indian Saree, properly worn, natural setting, elegant, natural look, neckline without jewellery, simple"
135
+ negative_prompt = "necklaces, jewellery, jewelry, necklace, neckpiece, garland, chain, neck wear, jewelled neck, jeweled neck, necklace on neck, jewellery on neck, accessories, watermark, text, changed background, wider body, narrower body, bad proportions, extra limbs, mutated hands, changed sizes, altered proportions, unnatural body proportions, blury, ugly"
136
+
137
+ output = pipeline(
138
+ prompt=prompt,
139
+ negative_prompt=negative_prompt,
140
+ image=image,
141
+ mask_image=mask,
142
+ strength=0.95,
143
+ guidance_score=9,
144
+ # generator = torch.Generator("cuda").manual_seed(42)
145
+ ).images[0]
146
+
147
+ output = output.resize(orig_size)
148
+ temp_generated = np.bitwise_and(
149
+ np.array(output),
150
+ np.bitwise_not(np.array(Image.fromarray(arr_orig).convert("RGB"))),
151
+ )
152
+ results.append(temp_generated)
 
153
 
154
  results = [
155
  Image.fromarray(np.bitwise_or(x, np.array(jewellery_mask))) for x in results
156
  ]
157
  clearFunc()
158
+ return results[0]
159
+
160
 
161
  # interface
162
 
163
  with gr.Blocks() as interface:
164
+ with gr.Row():
165
+ inputImage = gr.Image(label="Input Image", type="pil", image_mode="RGB", interactive=True)
166
+ selectedNecklace = gr.Image(label="Selected Necklace", type="pil", image_mode="RGBA", visible=False)
167
+ with gr.Row():
168
+ gr.Examples(examples=choker_images, inputs=[selectedNecklace], label="Necklaces")
169
+ gr.Examples(examples=person_images, inputs=[inputImage], label="Models")
170
+ with gr.Row():
171
+ outputOne = gr.Image(label="Output 1", interactive=False)
172
+ with gr.Row():
173
+ submit = gr.Button("Enter")
174
+
175
+ submit.click(fn=sd_inpaint, inputs=[inputImage, selectedNecklace], outputs=[outputOne, outputTwo, outputThree])
176
+
177
+ interface.launch(debug=True)