aletrn commited on
Commit
6d1f220
·
1 Parent(s): 9271aef

[feat] wip workflow from request to response

Browse files
dockerfiles/dockerfile-lambda-fastsam-api CHANGED
@@ -21,8 +21,8 @@ RUN ls -l ${LAMBDA_TASK_ROOT}/models
21
  RUN python -c "import sys; print(sys.path)"
22
  RUN python -c "import osgeo"
23
  RUN python -c "import cv2"
24
- RUN python -c "import geopandas"
25
- RUN python -c "import onnxruntime"
26
  # RUN python -c "import rasterio"
27
  RUN python -c "import awslambdaric"
28
  RUN python -m pip list
 
21
  RUN python -c "import sys; print(sys.path)"
22
  RUN python -c "import osgeo"
23
  RUN python -c "import cv2"
24
+ # RUN python -c "import geopandas"
25
+ # RUN python -c "import onnxruntime"
26
  # RUN python -c "import rasterio"
27
  RUN python -c "import awslambdaric"
28
  RUN python -m pip list
events/payload_point2.json ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
3
+ "sw": {"lat": 37.455509218936974, "lng": 14.632807441554068},
4
+ "prompt": [{
5
+ "type": "point",
6
+ "data": {"lat": 37.0, "lng": 15.0},
7
+ "label": 0
8
+ }],
9
+ "zoom": 10,
10
+ "source_type": "Satellite"
11
+ }
requirements_dev.txt CHANGED
@@ -1,11 +1,15 @@
 
 
1
  aws-lambda-powertools
2
  awslambdaric
3
  bson
4
  geopandas
5
- httpx
6
  jmespath
7
  numpy
8
  onnxruntime
9
  opencv-python
10
  pillow
11
- rasterio
 
 
 
 
1
+ aiofiles
2
+ aiohttp
3
  aws-lambda-powertools
4
  awslambdaric
5
  bson
6
  geopandas
 
7
  jmespath
8
  numpy
9
  onnxruntime
10
  opencv-python
11
  pillow
12
+ pyproj
13
+ rasterio
14
+ requests
15
+ shapely
src/app.py CHANGED
@@ -7,7 +7,7 @@ from aws_lambda_powertools.event_handler import content_types
7
  from aws_lambda_powertools.utilities.typing import LambdaContext
8
 
9
  from src import app_logger
10
- from src.io.coordinates_pixel_conversion import get_point_latlng_to_pixel_coordinates, get_latlng_to_pixel_coordinates
11
  from src.prediction_api.predictors import samexporter_predict
12
  from src.utilities.constants import CUSTOM_RESPONSE_MESSAGES
13
  from src.utilities.utilities import base64_decode
@@ -27,7 +27,7 @@ def get_response(status: int, start_time: float, request_id: str, response_body:
27
  str: json response
28
 
29
  """
30
- app_logger.debug(f"response_body:{response_body}.")
31
  response_body["duration_run"] = time.time() - start_time
32
  response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
33
  response_body["request_id"] = request_id
@@ -44,8 +44,12 @@ def get_response(status: int, start_time: float, request_id: str, response_body:
44
 
45
  def get_parsed_bbox_points(request_input: Dict) -> Dict:
46
  app_logger.info(f"try to parsing input request {request_input}...")
47
- ne = request_input["ne"]
48
- sw = request_input["sw"]
 
 
 
 
49
  ne_latlng = [float(ne["lat"]), float(ne["lng"])]
50
  sw_latlng = [float(sw["lat"]), float(sw["lng"])]
51
  bbox = [ne_latlng, sw_latlng]
@@ -53,14 +57,23 @@ def get_parsed_bbox_points(request_input: Dict) -> Dict:
53
  for prompt in request_input["prompt"]:
54
  app_logger.info(f"current prompt: {type(prompt)}, value:{prompt}.")
55
  data = prompt["data"]
56
- app_logger.info(f"current data point: {type(data)}, value:{data}.")
57
-
58
- diff_pixel_coordinates_ne = get_latlng_to_pixel_coordinates(ne, data, zoom)
59
- app_logger.info(f'current data by current prompt["data"]: {type(data)}, {data} => {diff_pixel_coordinates_ne}.')
60
- prompt["data"] = [diff_pixel_coordinates_ne["x"], diff_pixel_coordinates_ne["y"]]
61
-
62
- app_logger.debug(f"bbox {bbox}.")
63
- app_logger.debug(f'request_input["prompt"]:{request_input["prompt"]}.')
 
 
 
 
 
 
 
 
 
64
 
65
  app_logger.info(f"unpacking elaborated {request_input}...")
66
  return {
@@ -78,8 +91,8 @@ def lambda_handler(event: dict, context: LambdaContext):
78
  app_logger.info(f"event version: {event['version']}.")
79
 
80
  try:
81
- app_logger.debug(f"event:{json.dumps(event)}...")
82
- app_logger.debug(f"context:{context}...")
83
 
84
  try:
85
  body = event["body"]
@@ -87,17 +100,18 @@ def lambda_handler(event: dict, context: LambdaContext):
87
  app_logger.error(f"e_constants1:{e_constants1}.")
88
  body = event
89
 
90
- app_logger.debug(f"body, #1: {type(body)}, {body}...")
91
 
92
  if isinstance(body, str):
93
  body_decoded_str = base64_decode(body)
94
- app_logger.debug(f"body_decoded_str: {type(body_decoded_str)}, {body_decoded_str}...")
95
  body = json.loads(body_decoded_str)
96
 
97
  app_logger.info(f"body, #2: {type(body)}, {body}...")
98
 
99
  try:
100
  body_request = get_parsed_bbox_points(body)
 
101
  body_response = samexporter_predict(body_request["bbox"], body_request["prompt"], body_request["zoom"])
102
  app_logger.info(f"output body_response:{body_response}.")
103
  response = get_response(HTTPStatus.OK.value, start_time, context.aws_request_id, body_response)
 
7
  from aws_lambda_powertools.utilities.typing import LambdaContext
8
 
9
  from src import app_logger
10
+ from src.io.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
11
  from src.prediction_api.predictors import samexporter_predict
12
  from src.utilities.constants import CUSTOM_RESPONSE_MESSAGES
13
  from src.utilities.utilities import base64_decode
 
27
  str: json response
28
 
29
  """
30
+ app_logger.info(f"response_body:{response_body}.")
31
  response_body["duration_run"] = time.time() - start_time
32
  response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
33
  response_body["request_id"] = request_id
 
44
 
45
  def get_parsed_bbox_points(request_input: Dict) -> Dict:
46
  app_logger.info(f"try to parsing input request {request_input}...")
47
+ bbox = request_input["bbox"]
48
+ app_logger.info(f"request bbox: {type(bbox)}, value:{bbox}.")
49
+ ne = bbox["ne"]
50
+ sw = bbox["sw"]
51
+ app_logger.info(f"request ne: {type(ne)}, value:{ne}.")
52
+ app_logger.info(f"request sw: {type(sw)}, value:{sw}.")
53
  ne_latlng = [float(ne["lat"]), float(ne["lng"])]
54
  sw_latlng = [float(sw["lat"]), float(sw["lng"])]
55
  bbox = [ne_latlng, sw_latlng]
 
57
  for prompt in request_input["prompt"]:
58
  app_logger.info(f"current prompt: {type(prompt)}, value:{prompt}.")
59
  data = prompt["data"]
60
+ app_logger.info(f"current data points: {type(data)}, value:{data}.")
61
+ data_ne = data["ne"]
62
+ app_logger.info(f"current data_ne point: {type(data_ne)}, value:{data_ne}.")
63
+ data_sw = data["sw"]
64
+ app_logger.info(f"current data_sw point: {type(data_sw)}, value:{data_sw}.")
65
+
66
+ diff_pixel_coords_origin_data_ne = get_latlng_to_pixel_coordinates(ne, data_ne, zoom, "ne")
67
+ app_logger.info(f'current diff prompt ne: {type(data)}, {data} => {diff_pixel_coords_origin_data_ne}.')
68
+ diff_pixel_coords_origin_data_sw = get_latlng_to_pixel_coordinates(ne, data_sw, zoom, "sw")
69
+ app_logger.info(f'current diff prompt sw: {type(data)}, {data} => {diff_pixel_coords_origin_data_sw}.')
70
+ prompt["data"] = [
71
+ diff_pixel_coords_origin_data_ne["x"], diff_pixel_coords_origin_data_ne["y"],
72
+ diff_pixel_coords_origin_data_sw["x"], diff_pixel_coords_origin_data_sw["y"]
73
+ ]
74
+
75
+ app_logger.info(f"bbox => {bbox}.")
76
+ app_logger.info(f'## request_input["prompt"] updated => {request_input["prompt"]}.')
77
 
78
  app_logger.info(f"unpacking elaborated {request_input}...")
79
  return {
 
91
  app_logger.info(f"event version: {event['version']}.")
92
 
93
  try:
94
+ app_logger.info(f"event:{json.dumps(event)}...")
95
+ app_logger.info(f"context:{context}...")
96
 
97
  try:
98
  body = event["body"]
 
100
  app_logger.error(f"e_constants1:{e_constants1}.")
101
  body = event
102
 
103
+ app_logger.info(f"body, #1: {type(body)}, {body}...")
104
 
105
  if isinstance(body, str):
106
  body_decoded_str = base64_decode(body)
107
+ app_logger.info(f"body_decoded_str: {type(body_decoded_str)}, {body_decoded_str}...")
108
  body = json.loads(body_decoded_str)
109
 
110
  app_logger.info(f"body, #2: {type(body)}, {body}...")
111
 
112
  try:
113
  body_request = get_parsed_bbox_points(body)
114
+ app_logger.info(f"body_request=> {type(body_request)}, {body_request}.")
115
  body_response = samexporter_predict(body_request["bbox"], body_request["prompt"], body_request["zoom"])
116
  app_logger.info(f"output body_response:{body_response}.")
117
  response = get_response(HTTPStatus.OK.value, start_time, context.aws_request_id, body_response)
src/io/coordinates_pixel_conversion.py CHANGED
@@ -33,9 +33,9 @@ def get_latlng2pixel_projection(latlng) -> PixelCoordinate:
33
  def get_point_latlng_to_pixel_coordinates(latlng, zoom: int) -> PixelCoordinate:
34
  try:
35
  world_coordinate: PixelCoordinate = get_latlng2pixel_projection(latlng)
36
- app_logger.debug(f"world_coordinate:{world_coordinate}.")
37
  scale: int = pow(2, zoom)
38
- app_logger.debug(f"scale:{scale}.")
39
  return PixelCoordinate(
40
  x=math.floor(world_coordinate["x"] * scale),
41
  y=math.floor(world_coordinate["y"] * scale)
@@ -45,9 +45,20 @@ def get_point_latlng_to_pixel_coordinates(latlng, zoom: int) -> PixelCoordinate:
45
  raise e_format_latlng_to_pixel_coordinates
46
 
47
 
48
- def get_latlng_to_pixel_coordinates(latlng_origin, latlng_current_point, zoom):
 
 
 
 
49
  latlng_map_origin = get_point_latlng_to_pixel_coordinates(latlng_origin, zoom)
50
  latlng_map_current_point = get_point_latlng_to_pixel_coordinates(latlng_current_point, zoom)
51
  diff_coord_x = abs(latlng_map_origin["x"] - latlng_map_current_point["x"])
52
  diff_coord_y = abs(latlng_map_origin["y"] - latlng_map_current_point["y"])
53
- return PixelCoordinate(x=diff_coord_x, y=diff_coord_y)
 
 
 
 
 
 
 
 
33
  def get_point_latlng_to_pixel_coordinates(latlng, zoom: int) -> PixelCoordinate:
34
  try:
35
  world_coordinate: PixelCoordinate = get_latlng2pixel_projection(latlng)
36
+ app_logger.info(f"world_coordinate:{world_coordinate}.")
37
  scale: int = pow(2, zoom)
38
+ app_logger.info(f"scale:{scale}.")
39
  return PixelCoordinate(
40
  x=math.floor(world_coordinate["x"] * scale),
41
  y=math.floor(world_coordinate["y"] * scale)
 
45
  raise e_format_latlng_to_pixel_coordinates
46
 
47
 
48
+ def get_latlng_to_pixel_coordinates(latlng_origin, latlng_current_point, zoom, k: str):
49
+ # latlng_origin_list = get_latlng_coords_list(latlng_origin, k)
50
+ # latlng_current_point_list = get_latlng_coords_list(latlng_current_point, k)
51
+ app_logger.info(f"latlng_origin - {k}: {type(latlng_origin)}, value:{latlng_origin}.")
52
+ app_logger.info(f"latlng_current_point - {k}: {type(latlng_current_point)}, value:{latlng_current_point}.")
53
  latlng_map_origin = get_point_latlng_to_pixel_coordinates(latlng_origin, zoom)
54
  latlng_map_current_point = get_point_latlng_to_pixel_coordinates(latlng_current_point, zoom)
55
  diff_coord_x = abs(latlng_map_origin["x"] - latlng_map_current_point["x"])
56
  diff_coord_y = abs(latlng_map_origin["y"] - latlng_map_current_point["y"])
57
+ point = PixelCoordinate(x=diff_coord_x, y=diff_coord_y)
58
+ app_logger.info(f"point - {k}: {point}.")
59
+ return point
60
+
61
+
62
+ def get_latlng_coords_list(latlng_point, k: str):
63
+ latlng_current_point = latlng_point[k]
64
+ return [latlng_current_point["lat"], latlng_current_point["lng"]]
src/io/helpers.py ADDED
@@ -0,0 +1,618 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Helpers dedicated to georeferencing duties"""
2
+ import base64
3
+ import glob
4
+ import json
5
+ import os
6
+ import zlib
7
+ from math import log, tan, radians, cos, pi, floor, degrees, atan, sinh
8
+
9
+ import rasterio
10
+
11
+ from src import app_logger
12
+ from src.utilities.constants import GEOJSON_SQUARE_TEMPLATE, OUTPUT_CRS_STRING, INPUT_CRS_STRING, SKIP_CONDITIONS_LIST
13
+ from src.utilities.type_hints import ts_llist_float2, ts_geojson, ts_dict_str2b, ts_tuple_flat2, ts_tuple_flat4, \
14
+ ts_list_float4, ts_llist2, ts_tuple_int4, ts_ddict2
15
+
16
+ ZIPJSON_KEY = 'base64(zip(o))'
17
+
18
+
19
+ def get_geojson_square_angles(bounding_box:ts_llist_float2, name:str="buffer", debug:bool=False) -> ts_geojson:
20
+ """
21
+ Create a geojson-like dict rectangle from the input latitude/longitude bounding box
22
+
23
+ Args:
24
+ bounding_box: float latitude/longitude bounding box
25
+ name: geojson-like rectangle name
26
+ debug: bool, default=False
27
+ logging debug argument
28
+
29
+ Returns:
30
+ dict: geojson-like object rectangle
31
+
32
+ """
33
+ import copy
34
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
35
+
36
+ #app_logger = setup_logging(debug)
37
+ app_logger.info(f"bounding_box:{bounding_box}.")
38
+ top = bounding_box[0][0]
39
+ right = bounding_box[0][1]
40
+ bottom = bounding_box[1][0]
41
+ left = bounding_box[1][1]
42
+ bottom_left = [left, bottom]
43
+ top_left = [left, top]
44
+ top_right = [right, top]
45
+ bottom_right = [right, bottom]
46
+ coords = [bottom_left, top_left, top_right, bottom_right]
47
+ app_logger.info(f"coords:{coords}.")
48
+ geojson = copy.copy(GEOJSON_SQUARE_TEMPLATE)
49
+ geojson["name"] = name
50
+ geojson["features"][0]["geometry"]["coordinates"] = [[coords]]
51
+ app_logger.info(f"geojson:{geojson}.")
52
+ return geojson
53
+
54
+
55
+ def crop_raster(merged_raster_path:str, area_crop_geojson:dict, debug:bool=False) -> ts_dict_str2b:
56
+ """
57
+ Crop a raster using a geojson-like object rectangle
58
+
59
+ Args:
60
+ merged_raster_path: filename path pointing string to the raster to crop
61
+ area_crop_geojson: geojson-like object rectangle
62
+ debug: bool, default=False
63
+ logging debug argument
64
+
65
+ Returns:
66
+ dict: the cropped raster numpy array and the transform object with the georeferencing reference
67
+
68
+ """
69
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
70
+
71
+ #app_logger = setup_logging(debug)
72
+ try:
73
+ import rasterio
74
+ from rasterio.mask import mask
75
+
76
+ app_logger.info(f"area_crop_geojson::{area_crop_geojson}.")
77
+ geojson_reprojected = get_geojson_reprojected(area_crop_geojson, debug=debug)
78
+ shapes = [feature["geometry"] for feature in geojson_reprojected["features"]]
79
+ app_logger.info(f"geojson_reprojected:{geojson_reprojected}.")
80
+
81
+ app_logger.info(f"reading merged_raster_path while masking it from path:{merged_raster_path}.")
82
+ with rasterio.open(merged_raster_path, "r") as src:
83
+ masked_raster, masked_transform = mask(src, shapes, crop=True)
84
+ masked_meta = src.meta
85
+ app_logger.info(f"merged_raster_path, src:{src}.")
86
+ masked_meta.update({
87
+ "driver": "GTiff", "height": masked_raster.shape[1],
88
+ "width": masked_raster.shape[2], "transform": masked_transform}
89
+ )
90
+ return {"masked_raster": masked_raster, "masked_meta": masked_meta, "masked_transform": masked_transform}
91
+ except Exception as e:
92
+ app_logger.error(e)
93
+ raise e
94
+
95
+
96
+ def get_geojson_reprojected(geojson:dict, output_crs:str=OUTPUT_CRS_STRING, debug:bool=False) -> dict:
97
+ """
98
+ change projection for input geojson-like object polygon
99
+
100
+ Args:
101
+ geojson: input geojson-like object polygon
102
+ output_crs: output crs string - Coordinate Reference Systems
103
+ debug: logging debug argument
104
+
105
+ Returns:
106
+ dict: reprojected geojson-like object
107
+
108
+ """
109
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
110
+
111
+ #app_logger = setup_logging(debug)
112
+ if not isinstance(geojson, dict):
113
+ raise ValueError(f"geojson here should be a dict, not of type {type(geojson)}.")
114
+ app_logger.info(f"start reprojecting geojson:{geojson}.")
115
+ try:
116
+ features = geojson['features']
117
+
118
+ output_crs_json = {"type": "name", "properties": {"name": f"urn:ogc:def:crs:{output_crs}"}}
119
+ geojson_output = {'features': [], 'type': 'FeatureCollection', "name": "converted", "crs": output_crs_json}
120
+
121
+ # Iterate through each feature of the feature collection
122
+ for feature in features:
123
+ feature_out = feature.copy()
124
+ new_coords = []
125
+ feat = feature['geometry']
126
+ app_logger.info(f"feat:{feat}.")
127
+ coords = feat['coordinates']
128
+ app_logger.info(f"coordinates:{coords}.")
129
+ # iterate over "coordinates" lists with 3 nested loops, practically with only one element but last loop
130
+ for coord_a in coords:
131
+ new_coords_a = []
132
+ for cord_b in coord_a:
133
+ new_coords_b = []
134
+ # Project/transform coordinate pairs of each ring
135
+ # (iteration required in case geometry type is MultiPolygon, or there are holes)
136
+ for xconv, yconf in cord_b:
137
+ app_logger.info(f"xconv, yconf:{xconv},{yconf}.")
138
+ x2, y2 = latlon_to_mercator(xconv, yconf)
139
+ app_logger.info(f"x2, y2:{x2},{y2}.")
140
+ new_coords_b.append([x2, y2])
141
+ new_coords_a.append(new_coords_b)
142
+ new_coords.append(new_coords_a)
143
+ feature_out['geometry']['coordinates'] = new_coords
144
+ geojson_output['features'].append(feature_out)
145
+ app_logger.info(f"geojson_output:{geojson_output}.")
146
+ return geojson_output
147
+ except KeyError as ke_get_geojson_reprojected:
148
+ msg = f"ke_get_geojson_reprojected:{ke_get_geojson_reprojected}."
149
+ app_logger.error(msg)
150
+ raise KeyError(msg)
151
+
152
+
153
+ def latlon_to_mercator(
154
+ lat:float, lon:float, input_crs:str=INPUT_CRS_STRING, output_crs:str=OUTPUT_CRS_STRING, always_xy:bool=True, debug:bool=False
155
+ ) -> ts_tuple_flat2:
156
+ """
157
+ Return a tuple of latitude, longitude float coordinates values transformed to mercator
158
+
159
+ Args:
160
+
161
+ lat: input latitude float value
162
+ lon: input longitude float value
163
+ input_crs: string, input Coordinate Reference Systems
164
+ output_crs: string, output Coordinate Reference Systems
165
+ always_xy: bool, default=True.
166
+ If true, the transform method will accept as input and return as output
167
+ coordinates using the traditional GIS order, that is longitude, latitude
168
+ for geographic CRS and easting, northing for most projected CRS.
169
+ debug: bool, default=False.
170
+ logging debug argument
171
+
172
+ Returns:
173
+ tuple latitude/longitude float values
174
+
175
+ """
176
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
177
+ #app_logger = setup_logging(debug)
178
+ try:
179
+ from pyproj import Transformer
180
+ app_logger.info(f"lat:{lat},lon:{lon}.")
181
+ transformer = Transformer.from_crs(input_crs, output_crs, always_xy=always_xy)
182
+ out_lat, out_lon = transformer.transform(lat, lon)
183
+ app_logger.info(f"out_lat:{out_lat},out_lon:{out_lon}.")
184
+ return out_lat, out_lon
185
+ except Exception as e_latlon_to_mercator:
186
+ app_logger.error(f"e_latlon_to_mercator:{e_latlon_to_mercator}.")
187
+ raise e_latlon_to_mercator
188
+
189
+
190
+ def sec(x:float) -> float:
191
+ """
192
+ Return secant (the reciprocal of the cosine) for given value
193
+
194
+ Args:
195
+ x: input float value
196
+
197
+ Returns:
198
+ float: secant of given float value
199
+
200
+ """
201
+ return 1 / cos(x)
202
+
203
+
204
+ def latlon_to_xyz(lat:float, lon:float, z:int) -> ts_tuple_flat2:
205
+ """
206
+ Return x/y coordinates points for tiles from latitude/longitude values point.
207
+
208
+ Args:
209
+ lon: float longitude value
210
+ lat: float latitude value
211
+ z: float zoom value
212
+
213
+ Returns:
214
+ tuple: x, y values tiles coordinates
215
+
216
+ """
217
+ tile_count = pow(2, z)
218
+ x = (lon + 180) / 360
219
+ y = (1 - log(tan(radians(lat)) + sec(radians(lat))) / pi) / 2
220
+ return tile_count * x, tile_count * y
221
+
222
+
223
+ def bbox_to_xyz(lon_min:float, lon_max:float, lat_min:float, lat_max:float, z:int) -> ts_tuple_flat4:
224
+ """
225
+ Return xyz reference coordinates for tiles from latitude/longitude min and max values.
226
+
227
+ Args:
228
+ lon_min: float min longitude value
229
+ lon_max: float max longitude value
230
+ lat_min: float min latitude value
231
+ lat_max: float max latitude value
232
+ z: float zoom value
233
+
234
+ Returns:
235
+ tuple: float x min, x max, y min, y max values tiles coordinates
236
+
237
+ """
238
+ x_min, y_max = latlon_to_xyz(lat_min, lon_min, z)
239
+ x_max, y_min = latlon_to_xyz(lat_max, lon_max, z)
240
+ return (floor(x_min), floor(x_max),
241
+ floor(y_min), floor(y_max))
242
+
243
+
244
+ def mercator_to_lat(mercator_y:float) -> float:
245
+ """
246
+ Return latitude value coordinate from mercator coordinate value
247
+
248
+ Args:
249
+ mercator_y: float mercator value coordinate
250
+
251
+ Returns:
252
+ float: latitude value coordinate
253
+
254
+ """
255
+ return degrees(atan(sinh(mercator_y)))
256
+
257
+
258
+ def y_to_lat_edges(y:float, z:int) -> ts_tuple_flat2:
259
+ """
260
+ Return edge float latitude values coordinates from x,z tiles coordinates
261
+
262
+ Args:
263
+ y: float x tile value coordinate
264
+ z: float zoom tile value coordinate
265
+
266
+ Returns:
267
+ tuple: two float latitude values coordinates
268
+
269
+ """
270
+ tile_count = pow(2, z)
271
+ unit = 1 / tile_count
272
+ relative_y1 = y * unit
273
+ relative_y2 = relative_y1 + unit
274
+ lat1 = mercator_to_lat(pi * (1 - 2 * relative_y1))
275
+ lat2 = mercator_to_lat(pi * (1 - 2 * relative_y2))
276
+ return lat1, lat2
277
+
278
+
279
+ def x_to_lon_edges(x:float, z:int) -> ts_tuple_flat2:
280
+ """
281
+ Return edge float longitude values coordinates from x,z tiles coordinates
282
+
283
+ Args:
284
+ x: float x tile value coordinate
285
+ z: float zoom tile value coordinate
286
+
287
+ Returns:
288
+ tuple: two float longitude values coordinates
289
+
290
+ """
291
+ tile_count = pow(2, z)
292
+ unit = 360 / tile_count
293
+ lon1 = -180 + x * unit
294
+ lon2 = lon1 + unit
295
+ return lon1, lon2
296
+
297
+
298
+ def tile_edges(x:float, y:float, z:int) -> ts_list_float4:
299
+ """
300
+ Return edge float latitude/longitude value coordinates from xyz tiles coordinates
301
+
302
+ Args:
303
+ x: float x tile value coordinate
304
+ y: float y tile value coordinate
305
+ z: float zoom tile value coordinate
306
+
307
+ Returns:
308
+ tuple: float latitude/longitude values coordinates
309
+
310
+ """
311
+ lat1, lat2 = y_to_lat_edges(y, z)
312
+ lon1, lon2 = x_to_lon_edges(x, z)
313
+ return [lon1, lat1, lon2, lat2]
314
+
315
+
316
+ def merge_tiles(input_pattern:str, output_path:str, temp_dir:str, debug:bool=False) -> None:
317
+ """
318
+ Merge given raster glob input pattern into one unique georeferenced raster.
319
+
320
+ Args:
321
+ input_pattern: input glob pattern needed for search the raster filenames
322
+ output_path: output path where to write the merged raster
323
+ temp_dir: temporary folder needed for create
324
+ debug: bool, default=False.
325
+ logging debug argument
326
+
327
+ """
328
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
329
+
330
+ #app_logger = setup_logging(debug)
331
+ try:
332
+ from osgeo import gdal
333
+ except ModuleNotFoundError as module_error_merge_tiles:
334
+ msg = f"module_error_merge_tiles:{module_error_merge_tiles}."
335
+ app_logger.error(msg)
336
+ raise module_error_merge_tiles
337
+
338
+ try:
339
+ vrt_path = os.path.join(temp_dir, "tiles.vrt")
340
+ os_list_dir1 = os.listdir(temp_dir)
341
+ app_logger.info(f"os_list_dir1:{os_list_dir1}.")
342
+
343
+ gdal.BuildVRT(vrt_path, glob.glob(input_pattern))
344
+ gdal.Translate(output_path, vrt_path)
345
+
346
+ os_list_dir2 = os.listdir(temp_dir)
347
+ app_logger.info(f"os_list_dir2:{os_list_dir2}.")
348
+ except IOError as ioe_merge_tiles:
349
+ msg = f"ioe_merge_tiles:{ioe_merge_tiles}."
350
+ app_logger.error(msg)
351
+ raise ioe_merge_tiles
352
+
353
+
354
+ def get_lat_lon_coords(bounding_box: ts_llist2) -> ts_tuple_int4:
355
+ """
356
+ Return couples of float latitude/longitude values from bounding box input list.
357
+
358
+ Args:
359
+ bounding_box: bounding box input list of latitude/longitude coordinates
360
+
361
+ Returns:
362
+ tuple: float longitude min, latitude min, longitude max, longitude max values coordinates
363
+
364
+ """
365
+ top_right, bottom_left = bounding_box
366
+ lat_max, lon_max = top_right
367
+ lat_min, lon_min = bottom_left
368
+ if lon_min == lon_max or lat_min == lat_max:
369
+ raise ValueError(f"latitude and/or longitude coordinates should not be equal each others... {bounding_box}.")
370
+ return lon_min, lat_min, lon_max, lat_max
371
+
372
+
373
+ def get_prediction_georeferenced(prediction_obj:dict, transform:rasterio.transform, skip_conditions_list:list=None, debug:bool=False) -> dict:
374
+ """
375
+ Return a georeferenced geojson-like object starting from a dict containing "predictions" -> "points" list.
376
+ Apply the affine transform matrix of georeferenced raster submitted to the machine learning model.
377
+
378
+ Args:
379
+ prediction_obj: input dict
380
+ transform: 'rasterio.transform' or dict list, affine tranform matrix
381
+ skip_conditions_list: dict list, skip condition list
382
+ debug: bool, default=False.
383
+ logging debug argument
384
+
385
+ Returns:
386
+ dict
387
+
388
+ """
389
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
390
+
391
+ if skip_conditions_list is None:
392
+ skip_conditions_list = SKIP_CONDITIONS_LIST
393
+
394
+ #app_logger = setup_logging(debug)
395
+ app_logger.info(f"prediction_obj::{prediction_obj}, transform::{transform}.")
396
+ crs = {"type": "name", "properties": {"name": "urn:ogc:def:crs:EPSG::3857"}}
397
+ geojson_obj = {'features': [], 'type': 'FeatureCollection', "name": "geojson_name", "crs": crs}
398
+ for n, prediction in enumerate(prediction_obj["predictions"]):
399
+ points_dict_ = prediction["points"]
400
+ points_list = [[p["x"], p["y"]] for p in points_dict_]
401
+ app_logger.info(f"points_list::{points_list}.")
402
+ # if check_skip_conditions(prediction, skip_conditions_list, debug=debug):
403
+ # continue
404
+ feature = populate_features_geojson(n, points_list, confidence=prediction["confidence"], geomorphic_class=prediction["class"])
405
+ app_logger.info(f"geojson::feature:{feature}.")
406
+ feature["geometry"] = apply_transform(feature["geometry"], transform, debug=debug)
407
+ geojson_obj["features"].append(feature)
408
+ app_logger.info(f"geojson::post_update:{geojson_obj}.")
409
+ return geojson_obj
410
+
411
+
412
+ def populate_features_geojson(idx: int, coordinates_list: list, **kwargs) -> ts_ddict2:
413
+ """
414
+ Return a list of coordinate points in a geojson-like feature-like object.
415
+
416
+ Args:
417
+ idx: int, feature index
418
+ coordinates_list: dict list, coordinate points
419
+ **kwargs: optional arguments to merge within the geojson properties feature
420
+
421
+ Returns:
422
+ dict
423
+
424
+ """
425
+ return {
426
+ "type": "Feature",
427
+ "properties": {"id": idx, **kwargs},
428
+ "geometry": {
429
+ "type": "MultiPolygon",
430
+ "coordinates": [[coordinates_list]],
431
+ }
432
+ }
433
+
434
+
435
+ def check_skip_conditions(prediction:dict, skip_conditions_list:list, debug:bool=False) -> bool:
436
+ """
437
+ Loop over elements within skip_condition_list and return a boolean if no condition to skip (or exceptions).
438
+
439
+ Args:
440
+ prediction: input dict to check
441
+ skip_conditions_list: dict list with conditions to evaluate
442
+ debug: bool, default=False
443
+ logging debug argument
444
+
445
+ Returns:
446
+ bool
447
+
448
+ """
449
+ for obj in skip_conditions_list:
450
+ return skip_feature(prediction, obj["skip_key"], obj["skip_value"], obj["skip_condition"], debug=debug)
451
+ return False
452
+
453
+
454
+ def skip_feature(prediction:dict, skip_key:float, skip_value:str, skip_condition:str, debug:bool=False) -> bool:
455
+ """
456
+ Return False if values from input dict shouldn't be skipped,
457
+ True in case of exceptions, empty skip_condition or when chosen condition meets skip_value and skip_condition.
458
+
459
+ E.g. confidence should be major than 0.8: if confidence is equal to 0.65 then return True (0.65 < 0.8) and skip!
460
+
461
+ Args:
462
+ prediction: input dict to check
463
+ skip_key: skip condition key string
464
+ skip_value: skip condition value string
465
+ skip_condition: string (major | minor | equal)
466
+ debug: bool, default=False
467
+ logging debug argument
468
+
469
+ Returns:
470
+ bool
471
+
472
+ """
473
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
474
+ #app_logger = setup_logging(debug)
475
+ try:
476
+ v = prediction[skip_key]
477
+ match skip_condition:
478
+ case "major":
479
+ return v > skip_value
480
+ case "minor":
481
+ return v < skip_value
482
+ case "equal":
483
+ return v == skip_value
484
+ case "":
485
+ return False
486
+ except KeyError as ke_filter_feature:
487
+ app_logger.error(f"ke_filter_feature:{ke_filter_feature}.")
488
+ return False
489
+ except Exception as e_filter_feature:
490
+ app_logger.error(f"e_filter_feature:{e_filter_feature}.")
491
+ return False
492
+
493
+
494
+ def apply_transform(geometry:object, transform:list[object], debug:bool=False) -> dict:
495
+ """
496
+ Returns a GeoJSON-like mapping from a transformed geometry using an affine transformation matrix.
497
+
498
+ The coefficient matrix is provided as a list or tuple with 6 items
499
+ for 2D transformations. The 6 parameter matrix is::
500
+
501
+ [a, b, d, e, xoff, yoff]
502
+
503
+ which represents the augmented matrix::
504
+
505
+ [x'] / a b xoff \ [x]
506
+ [y'] = | d e yoff | [y]
507
+ [1 ] \ 0 0 1 / [1]
508
+
509
+ or the equations for the transformed coordinates::
510
+
511
+ x' = a * x + b * y + xoff
512
+ y' = d * x + e * y + yoff
513
+
514
+ Args:
515
+ geometry: geometry value from a geojson dict
516
+ transform: list of float values (affine transformation matrix)
517
+ debug: bool, default=False
518
+ logging debug argument
519
+
520
+ Returns:
521
+ dict
522
+
523
+ """
524
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
525
+
526
+ #app_logger = setup_logging(debug)
527
+
528
+ try:
529
+ from shapely.affinity import affine_transform
530
+ from shapely.geometry import mapping, shape
531
+ try:
532
+ geometry_transformed = affine_transform(shape(geometry), [transform.a, transform.b, transform.d, transform.e, transform.xoff, transform.yoff])
533
+ except AttributeError as ae:
534
+ app_logger.warning(f"ae:{ae}.")
535
+ geometry_transformed = affine_transform(shape(geometry), [transform[0], transform[1], transform[2], transform[3], transform[4], transform[5]])
536
+ geometry_serialized = mapping(geometry_transformed)
537
+ app_logger.info(f"geometry_serialized:{geometry_serialized}.")
538
+ return geometry_serialized
539
+ except ImportError as ie_apply_transform:
540
+ app_logger.error(f"ie_apply_transform:{ie_apply_transform}.")
541
+ raise ie_apply_transform
542
+ except Exception as e_apply_transform:
543
+ app_logger.error(f"e_apply_transform:{e_apply_transform}.")
544
+ raise e_apply_transform
545
+
546
+
547
+ def get_perc(nan_count:int, total_count:int) -> str:
548
+ """
549
+ Return a formatted string with a percentage value representing the ratio between NaN and total number elements within a numpy array
550
+
551
+ Args:
552
+ nan_count: NaN value elements
553
+ total_count: total count of elements
554
+
555
+ Returns:
556
+ str
557
+
558
+ """
559
+ return f"{100*nan_count/total_count:.2f}"
560
+
561
+
562
+ def json_unzip(j:dict, debug:bool=False) -> str:
563
+ """
564
+ Return uncompressed content from input dict using 'zlib' library
565
+
566
+ Args:
567
+ j: input dict to uncompress. key must be 'base64(zip(o))'
568
+ debug: logging debug argument
569
+
570
+ Returns:
571
+ dict: uncompressed dict
572
+
573
+ """
574
+ from json import JSONDecodeError
575
+ from zlib import error as zlib_error
576
+
577
+ #from src.surferdtm_prediction_api.utilities.utilities import setup_logging
578
+
579
+ #app_logger = setup_logging(debug)
580
+
581
+ try:
582
+ j = zlib.decompress(base64.b64decode(j[ZIPJSON_KEY]))
583
+ except KeyError as ke:
584
+ ke_error_msg = f"Could not decode/unzip the content because of wrong/missing dict key:{ke}."
585
+ raise KeyError(ke_error_msg)
586
+ except zlib_error as zlib_error2:
587
+ zlib_error2_msg = f"Could not decode/unzip the content because of:{zlib_error2}."
588
+ app_logger.error(zlib_error2_msg)
589
+ raise RuntimeError(zlib_error2_msg)
590
+
591
+ try:
592
+ j = json.loads(j)
593
+ except JSONDecodeError as json_e1:
594
+ msg = f"Could interpret the unzipped content because of JSONDecodeError with msg:{json_e1.msg}, pos:{json_e1.pos}, broken json:'{json_e1.doc}'"
595
+ app_logger.error(msg)
596
+ raise RuntimeError(msg)
597
+
598
+ return j
599
+
600
+
601
+ def json_zip(j:dict) -> dict[str]:
602
+ """
603
+ Return compressed content from input dict using 'zlib' library
604
+
605
+ Args:
606
+ j: input dict to compress
607
+
608
+ Returns:
609
+ dict: compressed dict
610
+
611
+ """
612
+ return {
613
+ ZIPJSON_KEY: base64.b64encode(
614
+ zlib.compress(
615
+ json.dumps(j).encode('utf-8')
616
+ )
617
+ ).decode('ascii')
618
+ }
src/io/tiles_to_tiff.py ADDED
@@ -0,0 +1,195 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Async download raster tiles"""
2
+ import os
3
+ from pathlib import Path
4
+
5
+ import numpy as np
6
+
7
+ from src import app_logger, PROJECT_ROOT_FOLDER
8
+ from src.io.helpers import get_lat_lon_coords, merge_tiles, get_geojson_square_angles, crop_raster
9
+ from src.io.tms2geotiff import download_extent, save_geotiff_gdal
10
+ from src.utilities.constants import COMPLETE_URL_TILES, DEFAULT_TMS
11
+ from src.utilities.type_hints import ts_llist2
12
+
13
+ COOKIE_SESSION = {
14
+ "Accept": "*/*",
15
+ "Accept-Encoding": "gzip, deflate",
16
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; rv:91.0) Gecko/20100101 Firefox/91.0",
17
+ }
18
+
19
+
20
+ def load_affine_transformation_from_matrix(matrix_source_coeffs):
21
+ from affine import Affine
22
+
23
+ if len(matrix_source_coeffs) != 6:
24
+ raise ValueError(f"Expected 6 coefficients, found {len(matrix_source_coeffs)};"
25
+ f"argument type: {type(matrix_source_coeffs)}.")
26
+
27
+ try:
28
+ a, d, b, e, c, f = (float(x) for x in matrix_source_coeffs)
29
+ center = tuple.__new__(Affine, [a, b, c, d, e, f, 0.0, 0.0, 1.0])
30
+ return center * Affine.translation(-0.5, -0.5)
31
+ except Exception as e:
32
+ app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
33
+
34
+
35
+ # @timing_decorator
36
+ def convert(bounding_box: ts_llist2, zoom: int) -> tuple:
37
+ """
38
+ Starting from a bounding box of two couples of latitude and longitude coordinate values, recognize a stratovolcano from an RGB image. The algorithm
39
+ create the image composing three channels as slope, DEM (Digital Elevation Model) and curvature. In more detail:
40
+
41
+ - download a series of terrain DEM (Digital Elevation Model) raster tiles enclosed within that bounding box
42
+ - merge all the downloaded rasters
43
+ - crop the merged raster
44
+ - process the cropped raster to extract slope and curvature (1st and 2nd degree derivative)
45
+ - produce three raster channels (DEM, slope and curvature rasters) to produce an RGB raster image
46
+ - submit the RGB image to a remote machine learning service to try to recognize a polygon representing a stratovolcano
47
+ - the output of the machine learning service is a json, so we need to georeferencing it
48
+ - finally we return a dict as response containing
49
+ - uploaded_file_name
50
+ - bucket_name
51
+ - prediction georeferenced geojson-like dict
52
+
53
+ Args:
54
+ bounding_box: float latitude/longitude bounding box
55
+ zoom: integer zoom value
56
+
57
+ Returns:
58
+ dict: uploaded_file_name (str), bucket_name (str), prediction_georef (dict), n_total_obj_prediction (str)
59
+
60
+ """
61
+ import tempfile
62
+
63
+ # from src.surferdtm_prediction_api.utilities.constants import NODATA_VALUES
64
+ # from src.surferdtm_prediction_api.utilities.utilities import setup_logging
65
+ # from src.surferdtm_prediction_api.raster.elaborate_images import elaborate_images.get_rgb_prediction_image
66
+ # from src.surferdtm_prediction_api.raster.prediction import model_prediction
67
+ # from src.surferdtm_prediction_api.geo.helpers import get_lat_lon_coords, merge_tiles, crop_raster, get_prediction_georeferenced, \
68
+ # get_geojson_square_angles, get_perc
69
+
70
+ # app_logger = setup_logging(debug)
71
+ ext = "tif"
72
+ debug = False
73
+ tile_source = COMPLETE_URL_TILES
74
+ app_logger.info(f"start_args: tile_source:{tile_source},bounding_box:{bounding_box},zoom:{zoom}.")
75
+
76
+ try:
77
+ import rasterio
78
+
79
+ lon_min, lat_min, lon_max, lat_max = get_lat_lon_coords(bounding_box)
80
+
81
+ with tempfile.TemporaryDirectory() as input_tmp_dir:
82
+ # with tempfile.TemporaryDirectory() as output_tmp_dir:
83
+ output_tmp_dir = input_tmp_dir
84
+ app_logger.info(f'tile_source: {tile_source}!')
85
+ app_logger.info(f'created temporary input/output directory: {input_tmp_dir} => {output_tmp_dir}!')
86
+ pt0, pt1 = bounding_box
87
+ app_logger.info("downloading...")
88
+ img, matrix = download_extent(DEFAULT_TMS, pt0[0], pt0[1], pt1[0], pt1[1], zoom)
89
+
90
+ app_logger.info(f'img: type {type(img)}, len_matrix:{len(matrix)}, matrix {matrix}.')
91
+ app_logger.info(f'img: size (shape if PIL) {img.size}.')
92
+ try:
93
+ np_img = np.array(img)
94
+ app_logger.info(f'img: shape (numpy) {np_img.shape}.')
95
+ except Exception as e_shape:
96
+ app_logger.info(f'e_shape {e_shape}.')
97
+ raise e_shape
98
+ img.save(f"/tmp/downloaded_{pt0[0]}_{pt0[1]}_{pt1[0]}_{pt1[1]}.png")
99
+ app_logger.info("saved PIL image")
100
+
101
+ return img, matrix
102
+ # app_logger.info("prepare writing...")
103
+ # app_logger.info(f'img: type {type(img)}, len_matrix:{len(matrix)}, matrix {matrix}.')
104
+ #
105
+ # rio_output = str(Path(output_tmp_dir) / "downloaded_rio.tif")
106
+ # app_logger.info(f'writing to disk img, output file {rio_output}.')
107
+ # save_geotiff_gdal(img, rio_output, matrix)
108
+ # app_logger.info(f'img written to output file {rio_output}.')
109
+ #
110
+ # source_tiles = os.path.join(input_tmp_dir, f"*.{ext}")
111
+ # suffix_raster_filename = f"{lon_min},{lat_min},{lon_max},{lat_max}_{zoom}"
112
+ # merged_raster_filename = f"merged_{suffix_raster_filename}.{ext}"
113
+ # masked_raster_filename = f"masked_{suffix_raster_filename}.{ext}"
114
+ # output_merged_path = os.path.join(output_tmp_dir, merged_raster_filename)
115
+ #
116
+ # app_logger.info(f"try merging tiles to:{output_merged_path}.")
117
+ # merge_tiles(source_tiles, output_merged_path, input_tmp_dir)
118
+ # app_logger.info(f"Merge complete, try crop...")
119
+ # geojson = get_geojson_square_angles(bounding_box, name=suffix_raster_filename, debug=debug)
120
+ # app_logger.info(f"geojson to convert:{geojson}.")
121
+ #
122
+ # crop_raster_output = crop_raster(output_merged_path, geojson, debug=False)
123
+ # masked_raster = crop_raster_output["masked_raster"]
124
+ # masked_meta = crop_raster_output["masked_meta"]
125
+ # masked_transform = crop_raster_output["masked_transform"]
126
+ #
127
+ # return masked_raster, masked_transform
128
+
129
+ # app_logger.info(f"resampling -32768 values as NaN for file:{masked_raster_filename}.")
130
+ # masked_raster = masked_raster[0].astype(float)
131
+ # masked_raster[masked_raster == NODATA_VALUES] = 0
132
+ # # info
133
+ # nan_count = np.count_nonzero(~np.isnan(masked_raster))
134
+ # total_count = masked_raster.shape[-1] * masked_raster.shape[-2]
135
+ # perc = get_perc(nan_count, total_count)
136
+ # msg = f"img:{masked_raster_filename}, shape:{masked_raster.shape}: found {nan_count} not-NaN values / {total_count} total, %:{perc}."
137
+ # app_logger.info(msg)
138
+ #
139
+ # app_logger.info(f"crop complete, shape:{masked_raster.shape}, dtype:{masked_raster.dtype}. Create RGB image...")
140
+ # # rgb_filename, rgb_path = elaborate_images.get_rgb_prediction_image(masked_raster, slope_cellsize, suffix_raster_filename, output_tmp_dir, debug=debug)
141
+ # # prediction = model_prediction(rgb_path, project_name=model_project_name, version=model_version, api_key=model_api_key, debug=False)
142
+ #
143
+ # mask_vectorizing = np.ones(masked_raster.shape).astype(rasterio.uint8)
144
+ # app_logger.info(f"prediction success, try to geo-referencing it with transform:{masked_transform}.")
145
+ #
146
+ # app_logger.info(
147
+ # f"image/geojson origin matrix:, masked_transform:{masked_transform}: create shapes_generator...")
148
+ # app_logger.info(f"raster mask to vectorize, type:{type(mask_vectorizing)}.")
149
+ # app_logger.info(f"raster mask to vectorize: shape:{mask_vectorizing.shape}, {mask_vectorizing.dtype}.")
150
+ #
151
+ # shapes_generator = ({
152
+ # 'properties': {'raster_val': v}, 'geometry': s}
153
+ # for i, (s, v)
154
+ # in enumerate(shapes(mask_vectorizing, mask=mask_vectorizing, transform=masked_transform))
155
+ # )
156
+ # shapes_list = list(shapes_generator)
157
+ # app_logger.info(f"created {len(shapes_list)} polygons.")
158
+ # gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
159
+ # app_logger.info(f"created a GeoDataFrame: type {type(gpd_polygonized_raster)}.")
160
+ # geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
161
+ # app_logger.info(f"created geojson: type {type(geojson)}, len:{len(geojson)}.")
162
+ # serialized_geojson = serialize.serialize(geojson)
163
+ # app_logger.info(f"created serialized_geojson: type {type(serialized_geojson)}, len:{len(serialized_geojson)}.")
164
+ # loaded_geojson = json.loads(geojson)
165
+ # app_logger.info(f"loaded_geojson: type {type(loaded_geojson)}, loaded_geojson:{loaded_geojson}.")
166
+ # n_feats = len(loaded_geojson["features"])
167
+ # app_logger.info(f"created geojson: n_feats {n_feats}.")
168
+ #
169
+ # output_geojson = str(Path(ROOT) / "geojson_output.json")
170
+ # with open(output_geojson, "w") as jj_out:
171
+ # app_logger.info(f"writing geojson file to {output_geojson}.")
172
+ # json.dump(loaded_geojson, jj_out)
173
+ # app_logger.info(f"geojson file written to {output_geojson}.")
174
+ #
175
+ # # prediction_georef = helpers.get_prediction_georeferenced(prediction, masked_transform, skip_conditions_list, debug=debug)
176
+ # app_logger.info(f"success on geo-referencing prediction.")
177
+ # # app_logger.info(f"success on creating file {rgb_filename}, now try upload it to bucket_name {bucket_name}...")
178
+ # return {
179
+ # # "uploaded_file_name": rgb_filename,
180
+ # "geojson": loaded_geojson,
181
+ # # "prediction_georef": prediction_georef,
182
+ # "n_total_obj_prediction": n_feats
183
+ # }
184
+ except ImportError as e_import_convert:
185
+ app_logger.error(f"e0:{e_import_convert}.")
186
+ raise e_import_convert
187
+
188
+
189
+ if __name__ == '__main__':
190
+ from PIL import Image
191
+
192
+ npy_file = "prediction_masks_46.27697017893455_9.616470336914064_46.11441972281433_9.264907836914064.npy"
193
+ prediction_masks = np.load(Path(PROJECT_ROOT_FOLDER) / "tmp" / "try_by_steps" / "t0" / npy_file)
194
+
195
+ print("#")
src/prediction_api/predictors.py CHANGED
@@ -1,13 +1,18 @@
1
  # Press the green button in the gutter to run the script.
2
- import numpy as np
 
3
  from typing import List
4
 
 
 
 
 
5
  from src import app_logger, MODEL_FOLDER
6
- from src.io.tms2geotiff import download_extent
 
7
  from src.prediction_api.sam_onnx import SegmentAnythingONNX
8
- from src.utilities.constants import MODEL_ENCODER_NAME, ZOOM, DEFAULT_TMS, MODEL_DECODER_NAME
9
  from src.utilities.serialize import serialize
10
- from src.utilities.type_hints import input_float_tuples
11
 
12
 
13
  models_dict = {"fastsam": {"instance": None}}
@@ -46,7 +51,7 @@ def load_affine_transformation_from_matrix(matrix_source_coeffs: List):
46
  app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
47
 
48
 
49
- def samexporter_predict(bbox: input_float_tuples, prompt: list[dict], zoom: float = ZOOM, model_name: str = "fastsam") -> dict:
50
  try:
51
  from rasterio.features import shapes
52
  from geopandas import GeoDataFrame
@@ -61,54 +66,114 @@ def samexporter_predict(bbox: input_float_tuples, prompt: list[dict], zoom: floa
61
  app_logger.info(f"using a {model_name} instance model...")
62
  models_instance = models_dict[model_name]["instance"]
63
 
64
- for coord in bbox:
65
- app_logger.debug(f"bbox coord:{coord}, type:{type(coord)}.")
66
- app_logger.info(f"start download_extent using bbox:{bbox}, type:{type(bbox)}, download image...")
 
67
 
68
- pt0 = bbox[0]
69
- pt1 = bbox[1]
70
- img, matrix = download_extent(DEFAULT_TMS, pt0[0], pt0[1], pt1[0], pt1[1], zoom)
 
71
 
72
- app_logger.info(f"img type {type(img)}, matrix type {type(matrix)}.")
73
- app_logger.debug(f"matrix values: {serialize(matrix)}.")
74
  np_img = np.array(img)
75
- app_logger.debug(f"np_img type {type(np_img)}.")
76
- app_logger.debug(f"np_img dtype {np_img.dtype}, shape {np_img.shape}.")
77
- app_logger.info(f"geotiff created with size/shape {img.size} and transform matrix {str(matrix)}, start to initialize SamGeo instance:")
78
- app_logger.info(f"use {model_name} model, ENCODER model {MODEL_ENCODER_NAME} and {MODEL_DECODER_NAME} from {MODEL_FOLDER}): model instantiated, creating embedding...")
 
 
 
 
 
 
79
  embedding = models_instance.encode(np_img)
80
  app_logger.info(f"embedding created, running predict_masks...")
81
  prediction_masks = models_instance.predict_masks(embedding, prompt)
82
- app_logger.debug(f"predict_masks terminated...")
83
  app_logger.info(f"predict_masks terminated, prediction masks shape:{prediction_masks.shape}, {prediction_masks.dtype}.")
84
-
85
- mask = np.zeros((prediction_masks.shape[2], prediction_masks.shape[3]), dtype=np.uint8)
86
- for m in prediction_masks[0, :, :, :]:
87
- mask[m > 0.0] = 255
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
 
89
  mask_unique_values, mask_unique_values_count = serialize(np.unique(mask, return_counts=True))
90
- app_logger.debug(f"mask_unique_values:{mask_unique_values}.")
91
- app_logger.debug(f"mask_unique_values_count:{mask_unique_values_count}.")
92
-
93
- transform = load_affine_transformation_from_matrix(matrix)
94
- app_logger.info(f"image/geojson origin matrix:{matrix}, transform:{transform}: create shapes_generator...")
95
- shapes_generator = ({
96
- 'properties': {'raster_val': v}, 'geometry': s}
97
- for i, (s, v)
98
- in enumerate(shapes(mask, mask=mask, transform=transform))
99
- )
100
- shapes_list = list(shapes_generator)
101
- app_logger.info(f"created {len(shapes_list)} polygons.")
102
- gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
103
- app_logger.info(f"created a GeoDataFrame...")
104
- geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
105
- app_logger.info(f"created geojson...")
106
-
107
- return {
108
- "geojson": geojson,
109
- "n_shapes_geojson": len(shapes_list),
110
- "n_predictions": len(prediction_masks),
111
- # "n_pixels_predictions": zip_arrays(mask_unique_values, mask_unique_values_count),
112
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
  except ImportError as e:
114
  app_logger.error(f"Error trying import module:{e}.")
 
1
  # Press the green button in the gutter to run the script.
2
+ import json
3
+ from pathlib import Path
4
  from typing import List
5
 
6
+ import numpy as np
7
+ import rasterio
8
+ from PIL import Image
9
+
10
  from src import app_logger, MODEL_FOLDER
11
+ from src.io.tiles_to_tiff import convert
12
+ from src.io.tms2geotiff import save_geotiff_gdal
13
  from src.prediction_api.sam_onnx import SegmentAnythingONNX
14
+ from src.utilities.constants import MODEL_ENCODER_NAME, ZOOM, MODEL_DECODER_NAME, ROOT
15
  from src.utilities.serialize import serialize
 
16
 
17
 
18
  models_dict = {"fastsam": {"instance": None}}
 
51
  app_logger.error(f"exception:{e}, check https://github.com/rasterio/affine project for updates")
52
 
53
 
54
+ def samexporter_predict(bbox, prompt: list[dict], zoom: float = ZOOM, model_name: str = "fastsam") -> dict:
55
  try:
56
  from rasterio.features import shapes
57
  from geopandas import GeoDataFrame
 
66
  app_logger.info(f"using a {model_name} instance model...")
67
  models_instance = models_dict[model_name]["instance"]
68
 
69
+ img, matrix = convert(
70
+ bounding_box=bbox,
71
+ zoom=int(zoom)
72
+ )
73
 
74
+ pt0, pt1 = bbox
75
+ rio_output = f"/tmp/downloaded_rio_{pt0[0]}_{pt0[1]}_{pt1[0]}_{pt1[1]}.tif"
76
+ save_geotiff_gdal(img, rio_output, matrix)
77
+ app_logger.info(f"saved downloaded geotiff image to {rio_output}...")
78
 
 
 
79
  np_img = np.array(img)
80
+ app_logger.info(f"## img type {type(np_img)}, prompt:{prompt}.")
81
+
82
+ app_logger.info(f"onnxruntime input shape/size (shape if PIL) {np_img.size},"
83
+ f"start to initialize SamGeo instance:")
84
+ try:
85
+ app_logger.info(f"onnxruntime input shape (NUMPY) {np_img.shape}.")
86
+ except Exception as e_shape:
87
+ app_logger.error(f"e_shape:{e_shape}.")
88
+ app_logger.info(f"use {model_name} model, ENCODER model {MODEL_ENCODER_NAME} and"
89
+ f" {MODEL_DECODER_NAME} from {MODEL_FOLDER}): model instantiated, creating embedding...")
90
  embedding = models_instance.encode(np_img)
91
  app_logger.info(f"embedding created, running predict_masks...")
92
  prediction_masks = models_instance.predict_masks(embedding, prompt)
93
+ app_logger.info(f"predict_masks terminated...")
94
  app_logger.info(f"predict_masks terminated, prediction masks shape:{prediction_masks.shape}, {prediction_masks.dtype}.")
95
+ pt0, pt1 = bbox
96
+ prediction_masks_output = f"/tmp/prediction_masks_{pt0[0]}_{pt0[1]}_{pt1[0]}_{pt1[1]}.npy"
97
+ np.save(
98
+ prediction_masks_output,
99
+ prediction_masks, allow_pickle=True, fix_imports=True
100
+ )
101
+ app_logger.info(f"saved prediction_masks:{prediction_masks_output}.")
102
+
103
+ # mask = np.zeros((prediction_masks.shape[2], prediction_masks.shape[3]), dtype=np.uint8)
104
+ # app_logger.info(f"output mask shape:{mask.shape}, {mask.dtype}.")
105
+ # ## todo: convert to geojson directly within the loop to avoid merging two objects
106
+ # for n, m in enumerate(prediction_masks[0, :, :, :]):
107
+ # app_logger.info(f"## {n} mask => m shape:{mask.shape}, {mask.dtype}.")
108
+ # mask[m > 0.0] = 255
109
+ prediction_masks0 = prediction_masks[0]
110
+ app_logger.info(f"prediction_masks0 shape:{prediction_masks0.shape}.")
111
+
112
+ try:
113
+ pmf = np.sum(prediction_masks0, axis=0).astype(np.uint8)
114
+ except Exception as e_sum_pmf:
115
+ app_logger.error(f"e_sum_pmf:{e_sum_pmf}.")
116
+ pmf = prediction_masks0[0]
117
+ app_logger.info(f"creating pil image from prediction mask with shape {pmf.shape}.")
118
+ pil_pmf = Image.fromarray(pmf)
119
+ pil_pmf_output = f"/tmp/pil_pmf_{pmf.shape[0]}_{pmf.shape[1]}.png"
120
+ pil_pmf.save(pil_pmf_output)
121
+ app_logger.info(f"saved pil_pmf:{pil_pmf_output}.")
122
+
123
+ mask = np.zeros(pmf.shape, dtype=np.uint8)
124
+ mask[pmf > 0] = 255
125
+
126
+ # cv2.imwrite(f"/tmp/cv2_mask_predicted_{mask.shape[0]}_{mask.shape[1]}_{mask.shape[2]}.png", mask)
127
+ pil_mask = Image.fromarray(mask)
128
+ pil_mask_predicted_output = f"/tmp/pil_mask_predicted_{mask.shape[0]}_{mask.shape[1]}.png"
129
+ pil_mask.save(pil_mask_predicted_output)
130
+ app_logger.info(f"saved pil_mask_predicted:{pil_mask_predicted_output}.")
131
 
132
  mask_unique_values, mask_unique_values_count = serialize(np.unique(mask, return_counts=True))
133
+ app_logger.info(f"mask_unique_values:{mask_unique_values}.")
134
+ app_logger.info(f"mask_unique_values_count:{mask_unique_values_count}.")
135
+
136
+ app_logger.info(f"read geotiff:{rio_output}: create shapes_generator...")
137
+ # app_logger.info(f"image/geojson transform:{transform}: create shapes_generator...")
138
+ with rasterio.open(rio_output, "r", driver="GTiff") as rio_src:
139
+ band = rio_src.read()
140
+ try:
141
+ transform = load_affine_transformation_from_matrix(matrix)
142
+ app_logger.info(f"geotiff band:{band.shape}, type: {type(band)}, dtype: {band.dtype}.")
143
+ app_logger.info(f"geotiff band:{mask.shape}.")
144
+ app_logger.info(f"transform from matrix:{transform}.")
145
+ app_logger.info(f"rio_src crs:{rio_src.crs}.")
146
+ app_logger.info(f"rio_src transform:{rio_src.transform}.")
147
+ except Exception as e_shape_band:
148
+ app_logger.error(f"e_shape_band:{e_shape_band}.")
149
+ raise e_shape_band
150
+ # mask_band = band != 0
151
+ shapes_generator = ({
152
+ 'properties': {'raster_val': v}, 'geometry': s}
153
+ for i, (s, v)
154
+ # in enumerate(shapes(mask, mask=(band != 0), transform=rio_src.transform))
155
+ # use mask=None to avoid using source
156
+ in enumerate(shapes(mask, mask=None, transform=rio_src.transform))
157
+ )
158
+ app_logger.info(f"created shapes_generator.")
159
+ shapes_list = list(shapes_generator)
160
+ app_logger.info(f"created {len(shapes_list)} polygons.")
161
+ gpd_polygonized_raster = GeoDataFrame.from_features(shapes_list, crs="EPSG:3857")
162
+ app_logger.info(f"created a GeoDataFrame...")
163
+ geojson = gpd_polygonized_raster.to_json(to_wgs84=True)
164
+ app_logger.info(f"created geojson...")
165
+
166
+ output_geojson = str(Path(ROOT) / "geojson_output.json")
167
+ with open(output_geojson, "w") as jj_out:
168
+ app_logger.info(f"writing geojson file to {output_geojson}.")
169
+ json.dump(json.loads(geojson), jj_out)
170
+ app_logger.info(f"geojson file written to {output_geojson}.")
171
+
172
+ return {
173
+ "geojson": geojson,
174
+ "n_shapes_geojson": len(shapes_list),
175
+ "n_predictions": len(prediction_masks),
176
+ # "n_pixels_predictions": zip_arrays(mask_unique_values, mask_unique_values_count),
177
+ }
178
  except ImportError as e:
179
  app_logger.error(f"Error trying import module:{e}.")
src/prediction_api/sam_onnx.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from copy import deepcopy
2
 
3
  import cv2
@@ -5,6 +6,7 @@ import numpy as np
5
  import onnxruntime
6
 
7
  from src import app_logger
 
8
 
9
 
10
  class SegmentAnythingONNX:
@@ -145,12 +147,35 @@ class SegmentAnythingONNX:
145
  batch_masks = []
146
  for mask_id in range(masks.shape[1]):
147
  mask = masks[batch, mask_id]
148
- mask = cv2.warpAffine(
149
- mask,
150
- transform_matrix[:2],
151
- (original_size[1], original_size[0]),
152
- flags=cv2.INTER_LINEAR,
153
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
154
  batch_masks.append(mask)
155
  output_masks.append(batch_masks)
156
  return np.array(output_masks)
@@ -172,12 +197,36 @@ class SegmentAnythingONNX:
172
  [0, 0, 1],
173
  ]
174
  )
175
- cv_image = cv2.warpAffine(
176
- cv_image,
177
- transform_matrix[:2],
178
- (self.input_size[1], self.input_size[0]),
179
- flags=cv2.INTER_LINEAR,
180
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
181
 
182
  encoder_inputs = {
183
  self.encoder_input_name: cv_image.astype(np.float32),
 
1
+ import json
2
  from copy import deepcopy
3
 
4
  import cv2
 
6
  import onnxruntime
7
 
8
  from src import app_logger
9
+ from src.utilities.serialize import serialize
10
 
11
 
12
  class SegmentAnythingONNX:
 
147
  batch_masks = []
148
  for mask_id in range(masks.shape[1]):
149
  mask = masks[batch, mask_id]
150
+ try:
151
+ try:
152
+ app_logger.info(f"mask_shape transform_masks:{mask.shape}, dtype:{mask.dtype}.")
153
+ except Exception as e_mask_shape_transform_masks:
154
+ app_logger.error(f"e_mask_shape_transform_masks:{e_mask_shape_transform_masks}.")
155
+ # raise e_mask_shape_transform_masks
156
+ output_filename = f"2_cv2img_{'_'.join([str(s) for s in mask.shape])}.npy"
157
+ np.save(output_filename, np.array(mask), allow_pickle=True, fix_imports=True)
158
+ app_logger.info(f"written: /tmp/{output_filename} ...")
159
+ with open("/tmp/2_args.json", "w") as jj_out_dst:
160
+ json.dump({
161
+ "transform_matrix": serialize(transform_matrix),
162
+ "M": serialize(transform_matrix[:2]),
163
+ "original_size": serialize(original_size),
164
+ "dsize": serialize((original_size[1], original_size[0])),
165
+ "flags": cv2.INTER_LINEAR
166
+ }, jj_out_dst)
167
+ app_logger.info(f"written: /tmp/jj_out.json")
168
+ mask = cv2.warpAffine(
169
+ mask,
170
+ transform_matrix[:2],
171
+ (original_size[1], original_size[0]),
172
+ flags=cv2.INTER_LINEAR,
173
+ )
174
+ except Exception as e_warp_affine1:
175
+ app_logger.error(f"e_warp_affine1 mask shape:{mask.shape}, dtype:{mask.dtype}.")
176
+ app_logger.error(f"e_warp_affine1 transform_matrix:{transform_matrix}, [:2] {transform_matrix[:2]}.")
177
+ app_logger.error(f"e_warp_affine1 original_size:{original_size}.")
178
+ raise e_warp_affine1
179
  batch_masks.append(mask)
180
  output_masks.append(batch_masks)
181
  return np.array(output_masks)
 
197
  [0, 0, 1],
198
  ]
199
  )
200
+ try:
201
+ np_cv_image = np.array(cv_image)
202
+ try:
203
+ app_logger.info(f"cv_image shape_encode:{np_cv_image.shape}, dtype:{np_cv_image.dtype}.")
204
+ except Exception as e_cv_image_shape_encode:
205
+ app_logger.error(f"e_cv_image_shape_encode:{e_cv_image_shape_encode}.")
206
+ # raise e_cv_image_shape_encode
207
+ output_filename = f"/tmp/1_cv2img_{'_'.join([str(s) for s in np_cv_image.shape])}.npy"
208
+ np.save(output_filename, np_cv_image, allow_pickle=True, fix_imports=True)
209
+ app_logger.info(f"written: /tmp/{output_filename} ...")
210
+ with open("/tmp/1_args.json", "w") as jj_out_dst:
211
+ json.dump({
212
+ "transform_matrix": serialize(transform_matrix),
213
+ "M": serialize(transform_matrix[:2]),
214
+ "flags": cv2.INTER_LINEAR
215
+ }, jj_out_dst)
216
+ app_logger.info(f"written: /tmp/jj_out.json")
217
+ cv_image = cv2.warpAffine(
218
+ cv_image,
219
+ transform_matrix[:2],
220
+ (self.input_size[1], self.input_size[0]),
221
+ flags=cv2.INTER_LINEAR,
222
+ )
223
+ except Exception as e_warp_affine2:
224
+ app_logger.error(f"e_warp_affine2:{e_warp_affine2}.")
225
+ np_cv_image = np.array(cv_image)
226
+ app_logger.error(f"e_warp_affine2 cv_image shape:{np_cv_image.shape}, dtype:{np_cv_image.dtype}.")
227
+ app_logger.error(f"e_warp_affine2 transform_matrix:{transform_matrix}, [:2] {transform_matrix[:2]}")
228
+ app_logger.error(f"e_warp_affine2 self.input_size:{self.input_size}.")
229
+ raise e_warp_affine2
230
 
231
  encoder_inputs = {
232
  self.encoder_input_name: cv_image.astype(np.float32),
src/utilities/constants.py CHANGED
@@ -1,9 +1,14 @@
1
  """Project constants"""
2
  CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
3
- INPUT_CRS_STRING = "EPSG:3857"
4
- OUTPUT_CRS_STRING = "EPSG:4326"
 
 
 
5
  ROOT = "/tmp"
6
  NODATA_VALUES = -32768
 
 
7
  SKIP_CONDITIONS_LIST = [{"skip_key": "confidence", "skip_value": 0.5, "skip_condition": "major"}]
8
  FEATURE_SQUARE_TEMPLATE = [
9
  {'type': 'Feature', 'properties': {'id': 1},
@@ -33,3 +38,4 @@ WKT_3857 = 'PROJCS["WGS 84 / Pseudo-Mercator",GEOGCS["WGS 84",DATUM["WGS_1984",S
33
  WKT_3857 += 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],'
34
  WKT_3857 += 'PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4",'
35
  WKT_3857 += '"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]]'
 
 
1
  """Project constants"""
2
  CHANNEL_EXAGGERATIONS_LIST = [2.5, 1.1, 2.0]
3
+ INPUT_CRS_STRING = "EPSG:4326"
4
+ OUTPUT_CRS_STRING = "EPSG:3857"
5
+ # DOMAIN_URL_TILES = "elevation-tiles-prod-eu.s3.eu-central-1.amazonaws.com"
6
+ # RELATIVE_URL_TILES = "geotiff/{z}/{x}/{y}.tif"
7
+ # COMPLETE_URL_TILES = f"https://{DOMAIN_URL_TILES}/{RELATIVE_URL_TILES}"
8
  ROOT = "/tmp"
9
  NODATA_VALUES = -32768
10
+ MODEL_PROJECT_NAME = "surferdtm"
11
+ MODEL_VERSION = 4
12
  SKIP_CONDITIONS_LIST = [{"skip_key": "confidence", "skip_value": 0.5, "skip_condition": "major"}]
13
  FEATURE_SQUARE_TEMPLATE = [
14
  {'type': 'Feature', 'properties': {'id': 1},
 
38
  WKT_3857 += 'AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Mercator_1SP"],PARAMETER["central_meridian",0],'
39
  WKT_3857 += 'PARAMETER["scale_factor",1],PARAMETER["false_easting",0],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["X",EAST],AXIS["Y",NORTH],EXTENSION["PROJ4",'
40
  WKT_3857 += '"+proj=merc +a=6378137 +b=6378137 +lat_ts=0.0 +lon_0=0.0 +x_0=0.0 +y_0=0 +k=1.0 +units=m +nadgrids=@null +wktext +no_defs"],AUTHORITY["EPSG","3857"]]'
41
+ COMPLETE_URL_TILES = DEFAULT_TMS
src/utilities/type_hints.py CHANGED
@@ -1,8 +1,24 @@
1
  """custom type hints"""
2
  from typing import List, Tuple
 
3
 
4
- input_floatlist = List[float]
5
- input_floatlist2 = List[input_floatlist]
6
- input_float_tuples = List[Tuple[float, float]]
 
 
 
 
7
  ts_dict_str2 = dict[str, str]
8
  ts_dict_str3 = dict[str, str, any]
 
 
 
 
 
 
 
 
 
 
 
 
1
  """custom type hints"""
2
  from typing import List, Tuple
3
+ import numpy as np
4
 
5
+ ts_list_str1 = list[str]
6
+ ts_http2 = tuple[ts_list_str1, ts_list_str1]
7
+ ts_list_float2 = list[float, float]
8
+ ts_llist_float2 = list[ts_list_float2, ts_list_float2]
9
+ ts_geojson = dict[str, str, dict[str, dict[str]], list[str, dict[int], dict[str, list]]]
10
+ ts_float64_1 = tuple[np.float64, np.float64, np.float64, np.float64, np.float64, np.float64]
11
+ ts_float64_2 = tuple[np.float64, np.float64, np.float64, np.float64, np.float64, np.float64, np.float64]
12
  ts_dict_str2 = dict[str, str]
13
  ts_dict_str3 = dict[str, str, any]
14
+ ts_dict_str2b = dict[str, any]
15
+ ts_ddict1 = dict[str, dict[str, any], dict, dict, any]
16
+ ts_ddict2 = dict[str, dict, dict[str, list]]
17
+ ts_tuple_str2 = tuple[str, str]
18
+ ts_tuple_arr2 = tuple[np.ndarray, np.ndarray]
19
+ ts_tuple_flat2 = tuple[float, float]
20
+ ts_tuple_flat4 = tuple[float, float, float, float]
21
+ ts_list_float4 = list[float, float, float, float]
22
+ ts_tuple_int4 = tuple[int, int, int, int]
23
+ ts_llist2 = list[[int, int], [int, int]]
24
+ ts_ddict3 = dict[list[dict[float | int | str]], dict[float | int]]
src/utilities/utilities.py CHANGED
@@ -1,5 +1,8 @@
1
  """Various utilities (logger, time benchmark, args dump, numerical and stats info)"""
 
 
2
  from src import app_logger
 
3
 
4
 
5
  def is_base64(sb):
@@ -57,11 +60,10 @@ def get_constants(event: dict, debug=False) -> dict:
57
  """
58
  import json
59
 
60
- local_logger = setup_logging(debug)
61
  try:
62
  body = event["body"]
63
  except Exception as e_constants1:
64
- local_logger.error(f"e_constants1:{e_constants1}.")
65
  body = event
66
 
67
  if isinstance(body, str):
@@ -69,11 +71,10 @@ def get_constants(event: dict, debug=False) -> dict:
69
 
70
  try:
71
  debug = body["debug"]
72
- local_logger.info(f"re-try get debug value:{debug}, log_level:{local_logger.level}.")
73
- local_logger = setup_logging(debug)
74
  except KeyError:
75
- local_logger.error("get_constants:: no debug key, pass...")
76
- local_logger.debug(f"constants debug:{debug}, log_level:{local_logger.level}, body:{body}.")
77
 
78
  try:
79
  return {
@@ -82,8 +83,102 @@ def get_constants(event: dict, debug=False) -> dict:
82
  "debug": debug
83
  }
84
  except KeyError as e_key_constants2:
85
- local_logger.error(f"e_key_constants2:{e_key_constants2}.")
86
  raise KeyError(f"e_key_constants2:{e_key_constants2}.")
87
  except Exception as e_constants2:
88
- local_logger.error(f"e_constants2:{e_constants2}.")
89
  raise e_constants2
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  """Various utilities (logger, time benchmark, args dump, numerical and stats info)"""
2
+ import numpy as np
3
+
4
  from src import app_logger
5
+ from src.utilities.type_hints import ts_float64_1, ts_float64_2
6
 
7
 
8
  def is_base64(sb):
 
60
  """
61
  import json
62
 
 
63
  try:
64
  body = event["body"]
65
  except Exception as e_constants1:
66
+ app_logger.error(f"e_constants1:{e_constants1}.")
67
  body = event
68
 
69
  if isinstance(body, str):
 
71
 
72
  try:
73
  debug = body["debug"]
74
+ app_logger.info(f"re-try get debug value:{debug}, log_level:{app_logger.level}.")
 
75
  except KeyError:
76
+ app_logger.error("get_constants:: no debug key, pass...")
77
+ app_logger.info(f"constants debug:{debug}, log_level:{app_logger.level}, body:{body}.")
78
 
79
  try:
80
  return {
 
83
  "debug": debug
84
  }
85
  except KeyError as e_key_constants2:
86
+ app_logger.error(f"e_key_constants2:{e_key_constants2}.")
87
  raise KeyError(f"e_key_constants2:{e_key_constants2}.")
88
  except Exception as e_constants2:
89
+ app_logger.error(f"e_constants2:{e_constants2}.")
90
  raise e_constants2
91
+
92
+
93
+ def get_rasters_info(rasters_list:list, names_list:list, title:str="", debug:bool=False) -> str:
94
+ """
95
+ Analyze numpy arrays' list to extract a string containing some useful information. For every raster:
96
+
97
+ - type of raster
98
+ - raster.dtype if that's instance of np.ndarray
99
+ - raster shape
100
+ - min of raster value, over all axis (flattening the array)
101
+ - max of raster value, over all axis (flattening the array)
102
+ - mean of raster value, over all axis (flattening the array)
103
+ - median of raster value, over all axis (flattening the array)
104
+ - standard deviation of raster value, over all axis (flattening the array)
105
+ - variance of raster value, over all axis (flattening the array)
106
+
107
+ Raises:
108
+ ValueError if raster_list and names_list have a different number of elements
109
+
110
+ Args:
111
+ rasters_list: list of numpy array raster to analyze
112
+ names_list: string list of numpy array
113
+ title: title of current analytic session
114
+ debug: logging debug argument
115
+
116
+ Returns:
117
+ str: the collected information
118
+
119
+ """
120
+
121
+ msg = f"get_rasters_info::title:{title},\n"
122
+ if not len(rasters_list) == len(names_list):
123
+ msg = "raster_list and names_list should have the same number of elements:\n"
124
+ msg += f"len(rasters_list):{len(rasters_list)}, len(names_list):{len(names_list)}."
125
+ raise ValueError(msg)
126
+ try:
127
+ for raster, name in zip(rasters_list, names_list):
128
+ try:
129
+ if isinstance(raster, np.ndarray):
130
+ shape_or_len = raster.shape
131
+ elif isinstance(raster, list):
132
+ shape_or_len = len(raster)
133
+ else:
134
+ raise ValueError(f"wrong argument type:{raster}, variable:{raster}.")
135
+ zmin, zmax, zmean, zmedian, zstd, zvar = get_stats_raster(raster, debug=debug)
136
+ msg += "name:{}:type:{},dtype:{},shape:{},min:{},max:{},mean:{},median:{},std:{},var:{}\n".format(
137
+ name, type(raster), raster.dtype if isinstance(raster, np.ndarray) else None, shape_or_len, zmin,
138
+ zmax, zmean, zmedian, zstd, zvar
139
+ )
140
+ except Exception as get_rasters_types_e:
141
+ msg = f"get_rasters_types_e::{get_rasters_types_e}, type_raster:{type(raster)}."
142
+ app_logger.error(msg)
143
+ raise ValueError(msg)
144
+ except IndexError as get_rasters_types_ie:
145
+ app_logger.error(f"get_rasters_types::len:rasters_list:{len(rasters_list)}, len_names_list:{len(names_list)}.")
146
+ raise get_rasters_types_ie
147
+ return msg + "\n=============================\n"
148
+
149
+
150
+ def get_stats_raster(raster: np.ndarray, get_rms:bool=False, debug:bool=False) -> ts_float64_1 or ts_float64_2:
151
+ """
152
+ Analyze a numpy arrays to extract a tuple of useful information:
153
+
154
+ - type of raster
155
+ - raster.dtype if that's instance of np.ndarray
156
+ - raster shape
157
+ - min of raster value, over all axis (flattening the array)
158
+ - max of raster value, over all axis (flattening the array)
159
+ - mean of raster value, over all axis (flattening the array)
160
+ - median of raster value, over all axis (flattening the array)
161
+ - standard deviation of raster value, over all axis (flattening the array)
162
+ - variance of raster value, over all axis (flattening the array)
163
+
164
+ Args:
165
+ raster: numpy array to analyze
166
+ get_rms: bool to get Root Mean Square Error
167
+ debug: logging debug argument
168
+
169
+ Returns:
170
+ tuple: float values (min, max, mean, median, standard deviation, variance of raster)
171
+
172
+ """
173
+ std = np.nanstd(raster)
174
+ if get_rms:
175
+ try:
176
+ rms = np.sqrt(np.nanmean(np.square(raster)))
177
+ except Exception as rms_e:
178
+ rms = None
179
+ app_logger.error(f"get_stats_raster::rms_Exception:{rms_e}.")
180
+ app_logger.info(f"nanmin:{type(np.nanmin(raster))}.")
181
+ return (np.nanmin(raster), np.nanmax(raster), np.nanmean(raster), np.nanmedian(raster), std,
182
+ np.nanvar(raster), rms)
183
+ return (np.nanmin(raster), np.nanmax(raster), np.nanmean(raster), np.nanmedian(raster), np.nanstd(raster),
184
+ np.nanvar(raster))