Abijith commited on
Commit
afbc21e
1 Parent(s): b7a783f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -3
app.py CHANGED
@@ -50,7 +50,6 @@ def segment_to_bbox(segment_indexs):
50
  return [0.0,0.0,0.0,0.0]
51
 
52
  def clipseg_prediction(image):
53
-
54
  print('Clip-Segmentation-started------->')
55
  img_w, img_h,_ = image.shape
56
  inputs = clip_processor(text=prompts, images=[image] * len(prompts), padding="max_length", return_tensors="pt")
@@ -94,8 +93,8 @@ def foward_pass(image_input: np.ndarray, points: List[List[int]]) -> np.ndarray:
94
  cache_data = [pixels, embedding]
95
  del inputs["pixel_values"]
96
 
97
- # outputs = model.forward(image_embeddings=cache_data[1], **inputs)
98
- outputs = model(**inputs)
99
  masks = processor.image_processor.post_process_masks(
100
  outputs.pred_masks.cpu(), inputs["original_sizes"].to(device), inputs["reshaped_input_sizes"].to(device)
101
  )
 
50
  return [0.0,0.0,0.0,0.0]
51
 
52
  def clipseg_prediction(image):
 
53
  print('Clip-Segmentation-started------->')
54
  img_w, img_h,_ = image.shape
55
  inputs = clip_processor(text=prompts, images=[image] * len(prompts), padding="max_length", return_tensors="pt")
 
93
  cache_data = [pixels, embedding]
94
  del inputs["pixel_values"]
95
 
96
+ outputs = model.forward(image_embeddings=cache_data[1], **inputs)
97
+ # outputs = model(**inputs)
98
  masks = processor.image_processor.post_process_masks(
99
  outputs.pred_masks.cpu(), inputs["original_sizes"].to(device), inputs["reshaped_input_sizes"].to(device)
100
  )