Spaces:
Running
Running
from PIL import Image | |
import numpy as np | |
import json | |
DATASET = "satellogic/EarthView" | |
sets = { | |
"satellogic": { | |
"shards" : 3676, | |
}, | |
"sentinel_1": { | |
"shards" : 1763, | |
}, | |
"neon": { | |
"config" : "default", | |
"shards" : 607, | |
"path" : "data", | |
} | |
} | |
def get_sets(): | |
return sets.keys() | |
def item_to_images(config, item): | |
metadata = item["metadata"] | |
if type(metadata) == str: | |
metadata = json.loads(metadata) | |
item = { | |
k: np.asarray(v).astype("uint8") | |
for k,v in item.items() | |
if k != "metadata" | |
} | |
item["metadata"] = metadata | |
if config == "satellogic": | |
item["rgb"] = [ | |
Image.fromarray(image.transpose(1,2,0)) | |
for image in item["rgb"] | |
] | |
item["1m"] = [ | |
Image.fromarray(image[0,:,:]) | |
for image in item["1m"] | |
] | |
elif config == "sentinel_1": | |
# Mapping of V and H to RGB. May not be correct | |
# https://gis.stackexchange.com/questions/400726/creating-composite-rgb-images-from-sentinel-1-channels | |
i10m = item["10m"] | |
i10m = np.concatenate( | |
( i10m, | |
np.expand_dims( | |
i10m[:,0,:,:]/(i10m[:,1,:,:]+0.01)*256, | |
1 | |
).astype("uint8") | |
), | |
1 | |
) | |
item["10m"] = [ | |
Image.fromarray(image.transpose(1,2,0)) | |
for image in i10m | |
] | |
elif config == "default": | |
item["rgb"] = [ | |
Image.fromarray(image.transpose(1,2,0)) | |
for image in item["rgb"] | |
] | |
item["chm"] = [ | |
Image.fromarray(image[0]) | |
for image in item["chm"] | |
] | |
# The next is a very arbitrary conversion from the 369 hyperspectral data to RGB | |
# It just averages each 1/3 of the bads and assigns it to a channel | |
item["1m"] = [ | |
Image.fromarray( | |
np.concatenate(( | |
np.expand_dims(np.average(image[:124],0),2), | |
np.expand_dims(np.average(image[124:247],0),2), | |
np.expand_dims(np.average(image[247:],0),2)) | |
,2).astype("uint8")) | |
for image in item["1m"] | |
] | |
return item | |