danie94-lml commited on
Commit
239f6f6
1 Parent(s): c47c52d

upload data

Browse files
app.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import torch,torchvision
3
+ import detectron2
4
+ import pickle
5
+ from detectron2.utils.logger import setup_logger
6
+ logger = setup_logger()
7
+ from detectron2.engine import DefaultPredictor
8
+ from detectron2.projects import point_rend
9
+ from viz_app import plot_single_image # Correct import
10
+ from PIL import Image
11
+ import numpy as np
12
+ from detectron2.data import MetadataCatalog, DatasetCatalog
13
+ from detectron2.utils.visualizer import Visualizer
14
+
15
+ # Define configuration parameters
16
+ output_directories = {
17
+ "2 Classes": "output/pointrend/",
18
+ "4 Classes": "output/pointrend_4_cls_7k/",
19
+ }
20
+
21
+ # Function to load the configuration
22
+ def load_configuration(output_dir):
23
+ with open(output_dir + "cfg.pickle", "rb") as f:
24
+ return pickle.load(f)
25
+
26
+ # Function to run instance segmentation
27
+ def run_instance_segmentation(im, predictor, num_classes):
28
+
29
+ outputs = plot_single_image(im, predictor,num_classes)
30
+
31
+ masks = outputs['instances'].pred_masks.cpu().numpy()
32
+ classes = outputs['instances'].pred_classes.cpu().numpy()
33
+
34
+ if num_classes == 2:
35
+ # Create masks for Nest and Inorganic Material
36
+ nest_masks = masks[classes == 0]
37
+ inorganic_material_masks = masks[classes == 1]
38
+
39
+ # Calculate the total pixel area of Nest masks
40
+ total_nest_area = np.sum(nest_masks)
41
+
42
+ # Calculate the total area of Inorganic Material masks
43
+ total_inorganic_area = np.sum(inorganic_material_masks)
44
+
45
+ elif num_classes == 4:
46
+
47
+ # Create masks for Nest and Inorganic Material
48
+ plastic_masks = masks[classes == 0]
49
+ fishing_net_masks = masks[classes == 1]
50
+ rope_cloth_masks = masks[classes == 2]
51
+ nest_masks = masks[classes == 3]
52
+
53
+
54
+ # Calculate the total pixel area of Nest masks
55
+ total_nest_area = np.sum(nest_masks)
56
+
57
+ # Calculate the total area of Inorganic Material masks
58
+ plastic_area = np.sum(plastic_masks)
59
+ fishing_net_area = np.sum(fishing_net_masks)
60
+ rope_cloth_area = np.sum(rope_cloth_masks)
61
+ total_inorganic_area = np.sum([plastic_area, fishing_net_area, rope_cloth_area])
62
+
63
+ return total_nest_area, total_inorganic_area
64
+
65
+ def click_instance_segmentation(image, model_selection, predictor):
66
+ im = np.array(image)[:, :, ::-1]
67
+
68
+ if st.button("Run Instance Segmentation"):
69
+ num_classes = 2 if model_selection == "2 Classes" else 4
70
+ total_nest_area, total_inorganic_area = run_instance_segmentation(im, predictor, num_classes)
71
+
72
+ # Calculate the percentage of inorganic material within the Nest
73
+ percentage_inorganic_in_nest = (total_inorganic_area / total_nest_area) * 100
74
+
75
+ st.write(f"Percentage of Inorganic Material in Nest: {percentage_inorganic_in_nest:.2f}%")
76
+
77
+
78
+ def app():
79
+ # Create a sidebar to select the model
80
+ model_selection = st.sidebar.radio("Select Model", list(output_directories.keys()))
81
+ output_dir = output_directories[model_selection]
82
+ cfg = load_configuration(output_dir)
83
+
84
+ cfg.MODEL.WEIGHTS = output_dir + "model_best.pth"
85
+ cfg.MODEL.DEVICE = "cpu"
86
+ cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.70
87
+ predictor = DefaultPredictor(cfg)
88
+
89
+ default_image_path = 'image/DJI_0142_frame_0041.jpg'
90
+ st.header('Please upload an image')
91
+ file = st.file_uploader('', type=['png', 'jpg', 'jpeg'])
92
+
93
+ if file:
94
+ image = Image.open(file)
95
+ st.image(image, caption="Uploaded Image", use_column_width=True)
96
+
97
+ click_instance_segmentation(image, model_selection, predictor)
98
+
99
+ else:
100
+ st.write("No image uploaded. Using default example image.")
101
+ image = Image.open(default_image_path)
102
+ st.image(image, caption="Default Example Image", use_column_width=True)
103
+
104
+ click_instance_segmentation(image, model_selection, predictor)
image/DJI_0142_frame_0041.jpg ADDED
output/obj_train/cfg.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f51e1c968bc79c9529217e626bdf89f84ac0cb73810377f9c20af8506e32e566
3
+ size 5314
output/obj_train/coco_instances_results.json ADDED
@@ -0,0 +1 @@
 
 
1
+ [{"image_id": 0, "category_id": 1, "bbox": [1380.3045654296875, 0.0, 1438.6485595703125, 1592.285400390625], "score": 0.9805574417114258}, {"image_id": 1, "category_id": 1, "bbox": [1874.0032958984375, 978.786376953125, 978.5140380859375, 1117.531005859375], "score": 0.9970454573631287}, {"image_id": 2, "category_id": 1, "bbox": [3113.146484375, 759.0662841796875, 691.964599609375, 623.3697509765625], "score": 0.9902016520500183}, {"image_id": 2, "category_id": 1, "bbox": [57.53554916381836, 633.5829467773438, 884.0471801757812, 1299.072021484375], "score": 0.14743082225322723}, {"image_id": 2, "category_id": 1, "bbox": [2850.850830078125, 511.6058044433594, 949.111572265625, 992.2442626953125], "score": 0.10244540870189667}, {"image_id": 4, "category_id": 1, "bbox": [0.0, 778.7610473632812, 626.103515625, 656.4393920898438], "score": 0.9994876384735107}, {"image_id": 5, "category_id": 1, "bbox": [1771.83935546875, 247.21034240722656, 1810.787841796875, 1463.2742919921875], "score": 0.9947130084037781}, {"image_id": 8, "category_id": 1, "bbox": [1909.879150390625, 1464.859375, 679.975341796875, 595.724365234375], "score": 0.9981029033660889}, {"image_id": 14, "category_id": 1, "bbox": [541.721923828125, 797.4324951171875, 697.9720458984375, 637.4742431640625], "score": 0.9980442523956299}, {"image_id": 15, "category_id": 1, "bbox": [1857.3994140625, 403.26446533203125, 1328.87353515625, 1625.130859375], "score": 0.9933504462242126}, {"image_id": 15, "category_id": 1, "bbox": [134.29571533203125, 1116.14013671875, 1241.58935546875, 1036.642578125], "score": 0.1047867015004158}, {"image_id": 16, "category_id": 1, "bbox": [2842.546630859375, 689.4129638671875, 704.650390625, 623.471435546875], "score": 0.9981980919837952}, {"image_id": 18, "category_id": 1, "bbox": [1617.198974609375, 1239.3822021484375, 680.04638671875, 751.341796875], "score": 0.9980385899543762}, {"image_id": 20, "category_id": 1, "bbox": [1600.5550537109375, 757.17236328125, 540.1990966796875, 640.1346435546875], "score": 0.9994283318519592}, {"image_id": 23, "category_id": 1, "bbox": [1385.652587890625, 0.0, 1457.248779296875, 1388.0927734375], "score": 0.9772377610206604}, {"image_id": 23, "category_id": 1, "bbox": [171.53965759277344, 777.9434814453125, 1182.2618408203125, 1382.0565185546875], "score": 0.12595009803771973}]
output/obj_train/events.out.tfevents.1697765063.nl869hipei.66.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85b024fbd24535d7aaa02f0b8afec0096e9ca0b6563527bb6b8cd9f0183dfa93
3
+ size 230768
output/obj_train/last_checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ model_final.pth
output/obj_train/metrics.json ADDED
The diff for this file is too large to render. See raw diff
 
output/obj_train/model_best.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c46f433319ae3bd5872fb31c8a05671b3df7d7be0a5b150e88d5a8aa339c1054
3
+ size 330024803
output/pointrend/.gitattributes ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ *.pth filter=lfs diff=lfs merge=lfs -text
2
+ detectron2_repo filter=lfs diff=lfs merge=lfs -text
output/pointrend/cfg.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4f2b3ef2d86630cade3f212f8fc5b35044416e5bd310236bf0b5227e8b1c1781
3
+ size 5815
output/pointrend/coco_instances_results.json ADDED
The diff for this file is too large to render. See raw diff
 
output/pointrend/events.out.tfevents.1696544539.n5kgwzlg1r.444.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6edf5ccc8576db52b9c29d91b116bcfce0bb36d06950317fa8bb382f7a1daaab
3
+ size 150
output/pointrend/events.out.tfevents.1696544615.n5kgwzlg1r.532.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8ecf31f5ee105fa94446830204f394748310a13a19e28601289caea758d3b8c8
3
+ size 150
output/pointrend/events.out.tfevents.1696544818.n5kgwzlg1r.636.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2c33e01706eebbaeac2c530798e52f6f1ed48d9c32c43bad453a4270575daad
3
+ size 150
output/pointrend/events.out.tfevents.1696545047.n5kgwzlg1r.726.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e9c638b35931841b76b9eb49ba1eddd1c20a21cfca19d18f0e46fe3c6d48c31d
3
+ size 150
output/pointrend/events.out.tfevents.1696545254.n5kgwzlg1r.792.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:276913e82deffbff4d6d75bc7d3484e7659d7e5d86560090606aac7ffab776e5
3
+ size 320382
output/pointrend/last_checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ model_final.pth
output/pointrend/metrics.json ADDED
The diff for this file is too large to render. See raw diff
 
output/pointrend/model_best.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b33e1d8fb2347dab4b172dff25821bea64aa88b501a6b457f5eb7b85ea571e13
3
+ size 445740323
output/pointrend_4_cls_7k/cfg.pickle ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:52736bfe061572f53f16bba1bd129936a5dad47346e28667db20cfd63e9dcc61
3
+ size 5824
output/pointrend_4_cls_7k/coco_instances_results.json ADDED
The diff for this file is too large to render. See raw diff
 
output/pointrend_4_cls_7k/events.out.tfevents.1697558440.n3vd74xx22.59.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:deec6381f3dc2aab1f6cf1426a34c074a866b4a5542dfe93494f8fb5d3842596
3
+ size 322462
output/pointrend_4_cls_7k/events.out.tfevents.1697600075.n8pym182e6.59.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0c038a073127625381eac610e38c0aa81d7d24f5ec1b54357d7c9d9733db6fce
3
+ size 451486
output/pointrend_4_cls_7k/last_checkpoint ADDED
@@ -0,0 +1 @@
 
 
1
+ model_final.pth
output/pointrend_4_cls_7k/metrics.json ADDED
The diff for this file is too large to render. See raw diff
 
output/pointrend_4_cls_7k/model_best.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b458e0ee5e84438d79ab781e9474412ee26a864daaf94778538210dfcef24b0e
3
+ size 446642467
requirements.txt ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ numpy
2
+ pandas
3
+ Pillow
4
+ opencv-python
5
+ opencv-python-headless
6
+ torch
7
+ torchvision
8
+ streamlit
9
+ streamlit-option-menu
10
+ streamlit-aggrid
11
+
12
+
viz_app.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ from detectron2.data import Metadata, MetadataCatalog, DatasetCatalog
3
+ from detectron2.utils.visualizer import Visualizer
4
+
5
+
6
+ # Define the plot_single_image function
7
+ def plot_single_image(im, predictor, num_classes):
8
+
9
+ outputs = predictor(im)
10
+
11
+ if num_classes == 2:
12
+ modified_metadata = Metadata()
13
+ modified_metadata.thing_classes = ["Nest", "Inorganic_material"]
14
+ elif num_classes == 4:
15
+ modified_metadata = Metadata()
16
+ modified_metadata.thing_classes = ["Plastic", "Fishing_Net", "Rope/Cloth", "Nest"]
17
+
18
+ st.write('Using Visualizer to draw the predictions on Image')
19
+ v = Visualizer(im[:, :, ::-1], modified_metadata, scale=1.2)
20
+ out = v.draw_instance_predictions(outputs["instances"].to("cpu"))
21
+ st.image(out.get_image()[:, :, ::-1])
22
+
23
+ return outputs
24
+
25
+
26
+