Spaces:
Running
on
Zero
Running
on
Zero
Martin Tomov
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -86,7 +86,7 @@ def get_boxes(detection_results: List[DetectionResult]) -> List[List[List[float]
|
|
86 |
|
87 |
def mask_to_polygon(mask: np.ndarray) -> np.ndarray:
|
88 |
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
89 |
-
if len(contours)
|
90 |
return np.array([])
|
91 |
largest_contour = max(contours, key=cv2.contourArea)
|
92 |
return largest_contour
|
@@ -190,11 +190,11 @@ def detections_to_json(detections):
|
|
190 |
detections_list.append(detection_dict)
|
191 |
return detections_list
|
192 |
|
193 |
-
def
|
194 |
crops = []
|
195 |
for detection in detections:
|
196 |
xmin, ymin, xmax, ymax = detection.box.xyxy
|
197 |
-
crop =
|
198 |
crops.append(crop)
|
199 |
return crops
|
200 |
|
@@ -206,7 +206,7 @@ def process_image(image, include_json, include_bboxes):
|
|
206 |
|
207 |
results = [annotated_image]
|
208 |
if include_bboxes:
|
209 |
-
crops =
|
210 |
results.extend(crops)
|
211 |
|
212 |
if include_json:
|
@@ -215,6 +215,8 @@ def process_image(image, include_json, include_bboxes):
|
|
215 |
with open(json_output_path, 'w') as json_file:
|
216 |
json.dump(detections_json, json_file, indent=4)
|
217 |
results.append(json.dumps(detections_json, separators=(',', ':')))
|
|
|
|
|
218 |
|
219 |
return tuple(results)
|
220 |
|
@@ -235,12 +237,18 @@ with gr.Blocks() as demo:
|
|
235 |
|
236 |
def update_outputs(image, include_json, include_bboxes):
|
237 |
results = process_image(image, include_json, include_bboxes)
|
238 |
-
if include_bboxes:
|
239 |
annotated_img, *crops, json_txt = results
|
240 |
return (annotated_img, json_txt, crops)
|
241 |
-
|
|
|
|
|
|
|
242 |
annotated_img, json_txt = results
|
243 |
return (annotated_img, json_txt, [])
|
|
|
|
|
|
|
244 |
|
245 |
submit_button.click(update_outputs, [image_input, include_json, include_bboxes], [annotated_output, json_output, crops_output])
|
246 |
|
|
|
86 |
|
87 |
def mask_to_polygon(mask: np.ndarray) -> np.ndarray:
|
88 |
contours, _ = cv2.findContours(mask, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
|
89 |
+
if len(contours) == 0:
|
90 |
return np.array([])
|
91 |
largest_contour = max(contours, key=cv2.contourArea)
|
92 |
return largest_contour
|
|
|
190 |
detections_list.append(detection_dict)
|
191 |
return detections_list
|
192 |
|
193 |
+
def crop_bounding_boxes_with_yellow_background(image: np.ndarray, yellow_background: np.ndarray, detections: List[DetectionResult]) -> List[np.ndarray]:
|
194 |
crops = []
|
195 |
for detection in detections:
|
196 |
xmin, ymin, xmax, ymax = detection.box.xyxy
|
197 |
+
crop = yellow_background[ymin:ymax, xmin:xmax]
|
198 |
crops.append(crop)
|
199 |
return crops
|
200 |
|
|
|
206 |
|
207 |
results = [annotated_image]
|
208 |
if include_bboxes:
|
209 |
+
crops = crop_bounding_boxes_with_yellow_background(np.array(original_image), yellow_background_with_insects, detections)
|
210 |
results.extend(crops)
|
211 |
|
212 |
if include_json:
|
|
|
215 |
with open(json_output_path, 'w') as json_file:
|
216 |
json.dump(detections_json, json_file, indent=4)
|
217 |
results.append(json.dumps(detections_json, separators=(',', ':')))
|
218 |
+
elif not include_bboxes:
|
219 |
+
results.append(None)
|
220 |
|
221 |
return tuple(results)
|
222 |
|
|
|
237 |
|
238 |
def update_outputs(image, include_json, include_bboxes):
|
239 |
results = process_image(image, include_json, include_bboxes)
|
240 |
+
if include_bboxes and include_json:
|
241 |
annotated_img, *crops, json_txt = results
|
242 |
return (annotated_img, json_txt, crops)
|
243 |
+
elif include_bboxes:
|
244 |
+
annotated_img, *crops = results
|
245 |
+
return (annotated_img, None, crops)
|
246 |
+
elif include_json:
|
247 |
annotated_img, json_txt = results
|
248 |
return (annotated_img, json_txt, [])
|
249 |
+
else:
|
250 |
+
annotated_img, = results
|
251 |
+
return (annotated_img, None, [])
|
252 |
|
253 |
submit_button.click(update_outputs, [image_input, include_json, include_bboxes], [annotated_output, json_output, crops_output])
|
254 |
|