File size: 6,792 Bytes
dba7a4b
0089dbb
 
 
 
 
 
 
9f12d03
6c68373
 
6a76b4b
6c68373
 
dba7a4b
 
0089dbb
 
 
 
 
 
 
 
 
 
 
 
9d82b2d
0089dbb
 
 
 
9d82b2d
e036928
 
 
 
 
 
 
 
 
 
 
0089dbb
 
0d19afe
0089dbb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8f860c6
0d19afe
8f860c6
0089dbb
8f860c6
0089dbb
8f860c6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0089dbb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d4e9b55
 
0089dbb
d4e9b55
 
0089dbb
 
 
d4e9b55
 
0089dbb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
afdb9ed
 
0089dbb
097fd30
95c22f4
9a8a333
2a0ba99
0377b31
0089dbb
 
 
 
 
 
 
 
 
 
 
 
14d7256
0089dbb
8f860c6
0089dbb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
import gradio as gr
from PIL import Image
from patchify import patchify, unpatchify
import numpy as np
from skimage.io import imshow, imsave
import tensorflow
import tensorflow as tf
from tensorflow.keras import backend as K
from tensorflow.keras.models import Model
import os
os.environ["SM_FRAMEWORK"] = "tf.keras"

from tensorflow import keras
import segmentation_models as sm


def jacard(y_true, y_pred):
    y_true_c = K.flatten(y_true)
    y_pred_c = K.flatten(y_pred)
    intersection = K.sum(y_true_c * y_pred_c)
    return (intersection + 1.0) / (K.sum(y_true_c) + K.sum(y_pred_c) - intersection + 1.0)


def bce_dice(y_true, y_pred):
    bce = tf.keras.losses.BinaryCrossentropy()
    return bce(y_true, y_pred) - K.log(jacard(y_true, y_pred))



pach_size = 256


def predict_2(image):
        size = 1024
        sizes_list = [256*10, 256*9, 256*8, 256*7, 256*6, 256*5, 256*4, 256*3, 256*2, 256*1]
        min_size = min(image.shape)
        if min_size%256==0:
                size = min_size
        else:
            for i in sizes_list:
                if min_size-i >= 0:
                    size = i
                    break
            
            
        image = Image.fromarray(image).resize((size,size))
        image = np.array(image)        
        stride = 1
        steps = int(pach_size/stride)
        patches_img = patchify(image, (pach_size, pach_size, 3), step=steps)  #Step=256 for 256 patches means no overlap        
        patches_img = patches_img[:,:,0,:,:,:]
        patched_prediction = []

        
        for i in range(patches_img.shape[0]):
              for j in range(patches_img.shape[1]):
    
                  single_patch_img = patches_img[i,j,:,:,:]
    
    
                  single_patch_img = single_patch_img/255
    
                  single_patch_img = np.expand_dims(single_patch_img, axis=0)
                  pred = model.predict(single_patch_img)
                  # Postprocess the mask
    
                  pred = np.argmax(pred, axis=3)
                  #print(pred.shape)
                  pred = pred[0, :,:]
    
    
                  patched_prediction.append(pred)
      
      
        patched_prediction = np.reshape(patched_prediction, [patches_img.shape[0], patches_img.shape[1],
                                                  patches_img.shape[2], patches_img.shape[3]])
      
        unpatched_prediction = unpatchify(patched_prediction, (image.shape[0], image.shape[1]))
        labels = LABEL_NAMES
        res = np.bincount(unpatched_prediction.reshape(-1))/unpatched_prediction.reshape(-1).shape[0]
        out = dict(list(zip(labels, res)))
        unpatched_prediction = targets_classes_colors[unpatched_prediction]
        

        return out, unpatched_prediction

LABEL_NAMES = ["unlabeled",    
               "paved-area",  
                     "dirt", 
                    "grass",    
                   "gravel",  
                    "water",   
                    "rocks",   
                     "pool",   
               "vegetation",  
                     "roof",  
                     "wall",  
                   "window",  
                     "door",  
                    "fence",  
               "fence-pole",  
                   "person",  
                      "dog",  
                      "car",    
                  "bicycle",  
                     "tree",   
                "bald-tree",  
                "ar-marker",  
                 "obstacle",    
              "conflicting",
            ]
targets_classes_colors = np.array([[  0,   0,   0],
                          [128,  64, 128],
                          [130,  76,   0],
                          [  0, 102,   0],
                          [112, 103,  87],
                          [ 28,  42, 168],
                          [ 48,  41,  30],
                          [  0,  50,  89],
                          [107, 142,  35],
                          [ 70,  70,  70],
                          [102, 102, 156],
                          [254, 228,  12],
                          [254, 148,  12],
                          [190, 153, 153],
                          [153, 153, 153],
                          [255,  22,  96],
                          [102,  51,   0],
                          [  9, 143, 150],
                          [119,  11,  32],
                          [ 51,  51,   0],
                          [190, 250, 190],
                          [112, 150, 146],
                          [  2, 135, 115],
                          [255,   0,   0]])

class_weights = {0: 0.1,
 1: 0.1,
 2: 2.171655596616696,
 3: 0.1,
 4: 0.1,
 5: 2.2101197049812593,
 6: 11.601519937899578,
 7: 7.99072122367673,
 8: 0.1,
 9: 0.1,
 10: 2.5426918173402457,
 11: 11.187574445057574,
 12: 241.57620214903147,
 13: 9.234779790464515,
 14: 1077.2745952165694,
 15: 7.396021659003857,
 16: 855.6730643687165,
 17: 6.410869993189135,
 18: 42.0186736125025,
 19: 2.5648760196752947,
 20: 4.089194047656931,
 21: 27.984593442818955,
 22: 2.0509251319694712}

weight_list = list(class_weights.values())

def weighted_categorical_crossentropy(weights):
    weights = weight_list
    def wcce(y_true, y_pred):
        Kweights = K.constant(weights)
        if not tf.is_tensor(y_pred): y_pred = K.constant(y_pred)
        y_true = K.cast(y_true, y_pred.dtype)
        return bce_dice(y_true, y_pred) * K.sum(y_true * Kweights, axis=-1)
    return wcce

from tensorflow.python.keras.utils import generic_utils
    
# Load the model
#model = tf.keras.models.load_model("model.h5", custom_objects={"jacard":jacard, "wcce":weighted_categorical_crossentropy})
#model = tf.keras.models.load_model("model_2.h5", custom_objects={"jacard":jacard, "bce_dice":bce_dice})
###model = tf.keras.models.load_model("model_2_A.h5", custom_objects={"jacard":jacard, "bce_dice":bce_dice})
model = tf.keras.models.load_model("model_2_A_0.h5", custom_objects={"jacard":jacard, "bce_dice":bce_dice})


# Create a user interface for the model
my_app = gr.Blocks()

with my_app:
  gr.Markdown("Statellite Image Segmentation Application UI with Gradio")
  with gr.Tabs():
    with gr.TabItem("Select your image"):
      with gr.Row():
        with gr.Column():
            img_source = gr.Image(label="Please select source Image")
            source_image_loader = gr.Button("Load above Image")
            img1 = gr.Image("__results___83_0 (2).png")
        with gr.Column():
            output_label = gr.Label(label="Predicted Masked Image")
            img_output = gr.Image(label="Image Output")
    source_image_loader.click(
        predict_2,
        [
            img_source
        ],
        [
            output_label,
            img_output
        ]
    )

my_app.launch(debug=True, share=True)

my_app.close()