alessandro trinca tornidor commited on
Commit
0677d1d
·
1 Parent(s): 0e9a197

feat: remove loguru and change logging method (use the @session_logger.set_uuid_logging decorator on functions with logs)

Browse files
app.py CHANGED
@@ -1,11 +1,10 @@
1
  import json
 
2
  import os
3
  import pathlib
4
- import uuid
5
  from typing import Callable, NoReturn
6
 
7
  import gradio as gr
8
- from samgis_lisa_on_zero.utilities.constants import GRADIO_EXAMPLE_BODY, GRADIO_EXAMPLES_TEXT_LIST, GRADIO_MARKDOWN
9
  import spaces
10
  import uvicorn
11
  from fastapi import FastAPI, HTTPException, Request, status
@@ -13,16 +12,16 @@ from fastapi.exceptions import RequestValidationError
13
  from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
14
  from fastapi.staticfiles import StaticFiles
15
  from fastapi.templating import Jinja2Templates
16
- from lisa_on_cuda.utils import app_helpers, frontend_builder, create_folders_and_variables_if_not_exists
17
  from pydantic import ValidationError
18
- from samgis_core.utilities.fastapi_logger import setup_logging
19
  from samgis_lisa_on_zero import PROJECT_ROOT_FOLDER, WORKDIR
20
- from samgis_lisa_on_zero.utilities.type_hints import ApiRequestBody, StringPromptApiRequestBody
 
21
 
22
 
23
  loglevel = os.getenv('LOGLEVEL', 'INFO').upper()
24
- app_logger = setup_logging(debug=loglevel == "DEBUG")
25
-
26
  VITE_INDEX_URL = os.getenv("VITE_INDEX_URL", "/")
27
  VITE_SAMGIS_URL = os.getenv("VITE_SAMGIS_URL", "/samgis")
28
  VITE_LISA_URL = os.getenv("VITE_LISA_URL", "/lisa")
@@ -32,8 +31,9 @@ app = FastAPI(title=FASTAPI_TITLE, version="1.0")
32
 
33
 
34
  @spaces.GPU
 
35
  def gpu_initialization() -> None:
36
- app_logger.info("GPU initialization...")
37
 
38
 
39
  def get_example_complete(example_text):
@@ -67,45 +67,27 @@ def get_gradio_interface_geojson(fn_inference: Callable):
67
  return gradio_app
68
 
69
 
 
70
  def handle_exception_response(exception: Exception) -> NoReturn:
71
  import subprocess
72
  project_root_folder_content = subprocess.run(
73
  f"ls -l {PROJECT_ROOT_FOLDER}/", shell=True, universal_newlines=True, stdout=subprocess.PIPE
74
  )
75
- app_logger.error(f"project_root folder 'ls -l' command output: {project_root_folder_content.stdout}.")
76
  workdir_folder_content = subprocess.run(
77
  f"ls -l {WORKDIR}/", shell=True, universal_newlines=True, stdout=subprocess.PIPE
78
  )
79
- app_logger.error(f"workdir folder 'ls -l' command stdout: {workdir_folder_content.stdout}.")
80
- app_logger.error(f"workdir folder 'ls -l' command stderr: {workdir_folder_content.stderr}.")
81
- app_logger.error(f"inference error:{exception}.")
82
  raise HTTPException(
83
  status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal server error on inference"
84
  )
85
 
86
 
87
- @app.middleware("http")
88
- async def request_middleware(request, call_next):
89
- request_id = str(uuid.uuid4())
90
- with app_logger.contextualize(request_id=request_id):
91
- app_logger.info("Request started")
92
-
93
- try:
94
- response = await call_next(request)
95
-
96
- except Exception as ex_middleware_http:
97
- app_logger.error(f"Request failed, ex_middleware_http: {ex_middleware_http}")
98
- response = JSONResponse(content={"success": False}, status_code=500)
99
-
100
- finally:
101
- response.headers["X-Request-ID"] = request_id
102
- app_logger.info("Request ended")
103
-
104
- return response
105
-
106
-
107
  @app.get("/health")
108
- async def health() -> JSONResponse:
 
109
  import importlib.metadata
110
  from importlib.metadata import PackageNotFoundError
111
 
@@ -115,73 +97,76 @@ async def health() -> JSONResponse:
115
  lisa_on_cuda_version = importlib.metadata.version('lisa-on-cuda')
116
  samgis_lisa_on_cuda_version = importlib.metadata.version('samgis-lisa-on-zero')
117
  except PackageNotFoundError as pe:
118
- app_logger.error(f"pe:{pe}.")
119
 
120
  msg = "still alive, "
121
  msg += f"""version:{samgis_lisa_on_cuda_version}, core version:{core_version},"""
122
  msg += f"""lisa-on-cuda version:{lisa_on_cuda_version},"""
123
 
124
- app_logger.info(msg)
125
  return JSONResponse(status_code=200, content={"msg": "still alive..."})
126
 
127
 
128
- def infer_lisa_gradio(request_input: StringPromptApiRequestBody) -> JSONResponse:
129
- from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_string_prompt, get_source_name
 
130
  from samgis_lisa_on_zero.prediction_api import lisa
131
  from samgis_lisa_on_zero.utilities.constants import LISA_INFERENCE_FN
132
 
133
- app_logger.info("starting lisa inference request...")
134
 
135
  try:
136
  import time
137
 
138
  time_start_run = time.time()
139
  body_request = get_parsed_bbox_points_with_string_prompt(request_input)
140
- app_logger.info(f"lisa body_request:{body_request}.")
141
  try:
142
  source = body_request["source"]
143
  source_name = body_request["source_name"]
144
- app_logger.debug(f"body_request:type(source):{type(source)}, source:{source}.")
145
- app_logger.debug(f"body_request:type(source_name):{type(source_name)}, source_name:{source_name}.")
146
- app_logger.debug(f"lisa module:{lisa}.")
147
  gpu_initialization()
148
  output = lisa.lisa_predict(
149
  bbox=body_request["bbox"], prompt=body_request["prompt"], zoom=body_request["zoom"],
150
  source=source, source_name=source_name, inference_function_name_key=LISA_INFERENCE_FN
151
  )
152
  duration_run = time.time() - time_start_run
153
- app_logger.info(f"duration_run:{duration_run}.")
154
  body = {
155
  "duration_run": duration_run,
156
  "output": output
157
  }
158
  dumped = json.dumps(body)
159
- app_logger.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
160
- app_logger.debug(f"complete json.dumps(body):{dumped}.")
161
  return dumped
162
  except Exception as inference_exception:
163
  handle_exception_response(inference_exception)
164
  except ValidationError as va1:
165
- app_logger.error(f"validation error: {str(va1)}.")
166
  raise ValidationError("Unprocessable Entity")
167
 
168
 
 
169
  @app.post("/infer_lisa")
170
  def infer_lisa(request_input: StringPromptApiRequestBody) -> JSONResponse:
171
  dumped = infer_lisa_gradio(request_input=request_input)
172
- app_logger.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
173
- app_logger.debug(f"complete json.dumps(body):{dumped}.")
174
  return JSONResponse(status_code=200, content={"body": dumped})
175
 
176
 
177
  @app.exception_handler(RequestValidationError)
178
- async def request_validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
179
- app_logger.error(f"exception errors: {exc.errors()}.")
180
- app_logger.error(f"exception body: {exc.body}.")
 
181
  headers = request.headers.items()
182
- app_logger.error(f'request header: {dict(headers)}.')
183
  params = request.query_params.items()
184
- app_logger.error(f'request query params: {dict(params)}.')
185
  return JSONResponse(
186
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
187
  content={"msg": "Error - Unprocessable Entity"}
@@ -189,12 +174,13 @@ async def request_validation_exception_handler(request: Request, exc: RequestVal
189
 
190
 
191
  @app.exception_handler(HTTPException)
192
- async def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
193
- app_logger.error(f"exception: {str(exc)}.")
 
194
  headers = request.headers.items()
195
- app_logger.error(f'request header: {dict(headers)}.')
196
  params = request.query_params.items()
197
- app_logger.error(f'request query params: {dict(params)}.')
198
  return JSONResponse(
199
  status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
200
  content={"msg": "Error - Internal Server Error"}
@@ -202,21 +188,21 @@ async def http_exception_handler(request: Request, exc: HTTPException) -> JSONRe
202
 
203
 
204
  write_tmp_on_disk = os.getenv("WRITE_TMP_ON_DISK", "")
205
- app_logger.info(f"write_tmp_on_disk:{write_tmp_on_disk}.")
206
  if bool(write_tmp_on_disk):
207
  try:
208
  path_write_tmp_on_disk = pathlib.Path(write_tmp_on_disk)
209
  try:
210
  pathlib.Path.unlink(path_write_tmp_on_disk, missing_ok=True)
211
  except (IsADirectoryError, PermissionError, OSError) as err:
212
- app_logger.error(f"{err} while removing old write_tmp_on_disk:{write_tmp_on_disk}.")
213
- app_logger.error(f"is file?{path_write_tmp_on_disk.is_file()}.")
214
- app_logger.error(f"is symlink?{path_write_tmp_on_disk.is_symlink()}.")
215
- app_logger.error(f"is folder?{path_write_tmp_on_disk.is_dir()}.")
216
  os.makedirs(write_tmp_on_disk, exist_ok=True)
217
  app.mount("/vis_output", StaticFiles(directory=write_tmp_on_disk), name="vis_output")
218
  except RuntimeError as runtime_error:
219
- app_logger.error(f"{runtime_error} while loading the folder write_tmp_on_disk:{write_tmp_on_disk}...")
220
  raise runtime_error
221
  templates = Jinja2Templates(directory=WORKDIR / "static")
222
 
@@ -239,7 +225,7 @@ frontend_builder.build_frontend(
239
  )
240
  create_folders_and_variables_if_not_exists.folders_creation()
241
 
242
- app_logger.info("build_frontend ok!")
243
 
244
  templates = Jinja2Templates(directory="templates")
245
 
@@ -272,12 +258,12 @@ async def index() -> FileResponse:
272
  return FileResponse(path=static_dist_folder / "index.html", media_type="text/html")
273
 
274
 
275
- app_helpers.app_logger.info(f"creating gradio interface...")
276
  io = get_gradio_interface_geojson(infer_lisa_gradio)
277
- app_helpers.app_logger.info(
278
  f"gradio interface created, mounting gradio app on url {VITE_GRADIO_URL} within FastAPI...")
279
  app = gr.mount_gradio_app(app, io, path=VITE_GRADIO_URL)
280
- app_helpers.app_logger.info("mounted gradio app within fastapi")
281
 
282
 
283
  if __name__ == '__main__':
 
1
  import json
2
+ import logging
3
  import os
4
  import pathlib
 
5
  from typing import Callable, NoReturn
6
 
7
  import gradio as gr
 
8
  import spaces
9
  import uvicorn
10
  from fastapi import FastAPI, HTTPException, Request, status
 
12
  from fastapi.responses import FileResponse, HTMLResponse, JSONResponse
13
  from fastapi.staticfiles import StaticFiles
14
  from fastapi.templating import Jinja2Templates
15
+ from lisa_on_cuda.utils import app_helpers, frontend_builder, create_folders_and_variables_if_not_exists, session_logger
16
  from pydantic import ValidationError
17
+
18
  from samgis_lisa_on_zero import PROJECT_ROOT_FOLDER, WORKDIR
19
+ from samgis_lisa_on_zero.utilities.constants import GRADIO_EXAMPLE_BODY, GRADIO_EXAMPLES_TEXT_LIST, GRADIO_MARKDOWN
20
+ from samgis_lisa_on_zero.utilities.type_hints import StringPromptApiRequestBody
21
 
22
 
23
  loglevel = os.getenv('LOGLEVEL', 'INFO').upper()
24
+ session_logger.change_logging(loglevel)
 
25
  VITE_INDEX_URL = os.getenv("VITE_INDEX_URL", "/")
26
  VITE_SAMGIS_URL = os.getenv("VITE_SAMGIS_URL", "/samgis")
27
  VITE_LISA_URL = os.getenv("VITE_LISA_URL", "/lisa")
 
31
 
32
 
33
  @spaces.GPU
34
+ @session_logger.set_uuid_logging
35
  def gpu_initialization() -> None:
36
+ logging.info("GPU initialization...")
37
 
38
 
39
  def get_example_complete(example_text):
 
67
  return gradio_app
68
 
69
 
70
+ @session_logger.set_uuid_logging
71
  def handle_exception_response(exception: Exception) -> NoReturn:
72
  import subprocess
73
  project_root_folder_content = subprocess.run(
74
  f"ls -l {PROJECT_ROOT_FOLDER}/", shell=True, universal_newlines=True, stdout=subprocess.PIPE
75
  )
76
+ logging.error(f"project_root folder 'ls -l' command output: {project_root_folder_content.stdout}.")
77
  workdir_folder_content = subprocess.run(
78
  f"ls -l {WORKDIR}/", shell=True, universal_newlines=True, stdout=subprocess.PIPE
79
  )
80
+ logging.error(f"workdir folder 'ls -l' command stdout: {workdir_folder_content.stdout}.")
81
+ logging.error(f"workdir folder 'ls -l' command stderr: {workdir_folder_content.stderr}.")
82
+ logging.error(f"inference error:{exception}.")
83
  raise HTTPException(
84
  status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail="Internal server error on inference"
85
  )
86
 
87
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
88
  @app.get("/health")
89
+ @session_logger.set_uuid_logging
90
+ def health() -> JSONResponse:
91
  import importlib.metadata
92
  from importlib.metadata import PackageNotFoundError
93
 
 
97
  lisa_on_cuda_version = importlib.metadata.version('lisa-on-cuda')
98
  samgis_lisa_on_cuda_version = importlib.metadata.version('samgis-lisa-on-zero')
99
  except PackageNotFoundError as pe:
100
+ logging.error(f"pe:{pe}.")
101
 
102
  msg = "still alive, "
103
  msg += f"""version:{samgis_lisa_on_cuda_version}, core version:{core_version},"""
104
  msg += f"""lisa-on-cuda version:{lisa_on_cuda_version},"""
105
 
106
+ logging.info(msg)
107
  return JSONResponse(status_code=200, content={"msg": "still alive..."})
108
 
109
 
110
+ @session_logger.set_uuid_logging
111
+ def infer_lisa_gradio(request_input: StringPromptApiRequestBody) -> str:
112
+ from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_string_prompt
113
  from samgis_lisa_on_zero.prediction_api import lisa
114
  from samgis_lisa_on_zero.utilities.constants import LISA_INFERENCE_FN
115
 
116
+ logging.info("starting lisa inference request...")
117
 
118
  try:
119
  import time
120
 
121
  time_start_run = time.time()
122
  body_request = get_parsed_bbox_points_with_string_prompt(request_input)
123
+ logging.info(f"lisa body_request:{body_request}.")
124
  try:
125
  source = body_request["source"]
126
  source_name = body_request["source_name"]
127
+ logging.debug(f"body_request:type(source):{type(source)}, source:{source}.")
128
+ logging.debug(f"body_request:type(source_name):{type(source_name)}, source_name:{source_name}.")
129
+ logging.debug(f"lisa module:{lisa}.")
130
  gpu_initialization()
131
  output = lisa.lisa_predict(
132
  bbox=body_request["bbox"], prompt=body_request["prompt"], zoom=body_request["zoom"],
133
  source=source, source_name=source_name, inference_function_name_key=LISA_INFERENCE_FN
134
  )
135
  duration_run = time.time() - time_start_run
136
+ logging.info(f"duration_run:{duration_run}.")
137
  body = {
138
  "duration_run": duration_run,
139
  "output": output
140
  }
141
  dumped = json.dumps(body)
142
+ logging.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
143
+ logging.debug(f"complete json.dumps(body):{dumped}.")
144
  return dumped
145
  except Exception as inference_exception:
146
  handle_exception_response(inference_exception)
147
  except ValidationError as va1:
148
+ logging.error(f"validation error: {str(va1)}.")
149
  raise ValidationError("Unprocessable Entity")
150
 
151
 
152
+ @session_logger.set_uuid_logging
153
  @app.post("/infer_lisa")
154
  def infer_lisa(request_input: StringPromptApiRequestBody) -> JSONResponse:
155
  dumped = infer_lisa_gradio(request_input=request_input)
156
+ logging.info(f"json.dumps(body) type:{type(dumped)}, len:{len(dumped)}.")
157
+ logging.debug(f"complete json.dumps(body):{dumped}.")
158
  return JSONResponse(status_code=200, content={"body": dumped})
159
 
160
 
161
  @app.exception_handler(RequestValidationError)
162
+ @session_logger.set_uuid_logging
163
+ def request_validation_exception_handler(request: Request, exc: RequestValidationError) -> JSONResponse:
164
+ logging.error(f"exception errors: {exc.errors()}.")
165
+ logging.error(f"exception body: {exc.body}.")
166
  headers = request.headers.items()
167
+ logging.error(f'request header: {dict(headers)}.')
168
  params = request.query_params.items()
169
+ logging.error(f'request query params: {dict(params)}.')
170
  return JSONResponse(
171
  status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
172
  content={"msg": "Error - Unprocessable Entity"}
 
174
 
175
 
176
  @app.exception_handler(HTTPException)
177
+ @session_logger.set_uuid_logging
178
+ def http_exception_handler(request: Request, exc: HTTPException) -> JSONResponse:
179
+ logging.error(f"exception: {str(exc)}.")
180
  headers = request.headers.items()
181
+ logging.error(f'request header: {dict(headers)}.')
182
  params = request.query_params.items()
183
+ logging.error(f'request query params: {dict(params)}.')
184
  return JSONResponse(
185
  status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
186
  content={"msg": "Error - Internal Server Error"}
 
188
 
189
 
190
  write_tmp_on_disk = os.getenv("WRITE_TMP_ON_DISK", "")
191
+ logging.info(f"write_tmp_on_disk:{write_tmp_on_disk}.")
192
  if bool(write_tmp_on_disk):
193
  try:
194
  path_write_tmp_on_disk = pathlib.Path(write_tmp_on_disk)
195
  try:
196
  pathlib.Path.unlink(path_write_tmp_on_disk, missing_ok=True)
197
  except (IsADirectoryError, PermissionError, OSError) as err:
198
+ logging.error(f"{err} while removing old write_tmp_on_disk:{write_tmp_on_disk}.")
199
+ logging.error(f"is file?{path_write_tmp_on_disk.is_file()}.")
200
+ logging.error(f"is symlink?{path_write_tmp_on_disk.is_symlink()}.")
201
+ logging.error(f"is folder?{path_write_tmp_on_disk.is_dir()}.")
202
  os.makedirs(write_tmp_on_disk, exist_ok=True)
203
  app.mount("/vis_output", StaticFiles(directory=write_tmp_on_disk), name="vis_output")
204
  except RuntimeError as runtime_error:
205
+ logging.error(f"{runtime_error} while loading the folder write_tmp_on_disk:{write_tmp_on_disk}...")
206
  raise runtime_error
207
  templates = Jinja2Templates(directory=WORKDIR / "static")
208
 
 
225
  )
226
  create_folders_and_variables_if_not_exists.folders_creation()
227
 
228
+ logging.info("build_frontend ok!")
229
 
230
  templates = Jinja2Templates(directory="templates")
231
 
 
258
  return FileResponse(path=static_dist_folder / "index.html", media_type="text/html")
259
 
260
 
261
+ app_helpers.logging.info(f"creating gradio interface...")
262
  io = get_gradio_interface_geojson(infer_lisa_gradio)
263
+ app_helpers.logging.info(
264
  f"gradio interface created, mounting gradio app on url {VITE_GRADIO_URL} within FastAPI...")
265
  app = gr.mount_gradio_app(app, io, path=VITE_GRADIO_URL)
266
+ app_helpers.logging.info("mounted gradio app within fastapi")
267
 
268
 
269
  if __name__ == '__main__':
samgis_lisa_on_zero/io_package/coordinates_pixel_conversion.py CHANGED
@@ -1,12 +1,13 @@
1
  """functions useful to convert to/from latitude-longitude coordinates to pixel image coordinates"""
 
2
  from samgis_core.utilities.type_hints import TupleFloat, TupleFloatAny
3
-
4
  from samgis_lisa_on_zero import app_logger
5
  from samgis_lisa_on_zero.utilities.constants import TILE_SIZE, EARTH_EQUATORIAL_RADIUS
6
  from samgis_lisa_on_zero.utilities.type_hints import ImagePixelCoordinates
7
  from samgis_lisa_on_zero.utilities.type_hints import LatLngDict
8
 
9
 
 
10
  def _get_latlng2pixel_projection(latlng: LatLngDict) -> ImagePixelCoordinates:
11
  from math import log, pi, sin
12
 
@@ -30,6 +31,7 @@ def _get_latlng2pixel_projection(latlng: LatLngDict) -> ImagePixelCoordinates:
30
  raise e_get_latlng2pixel_projection
31
 
32
 
 
33
  def _get_point_latlng_to_pixel_coordinates(latlng: LatLngDict, zoom: int | float) -> ImagePixelCoordinates:
34
  from math import floor
35
 
@@ -50,6 +52,7 @@ def _get_point_latlng_to_pixel_coordinates(latlng: LatLngDict, zoom: int | float
50
  raise e_format_latlng_to_pixel_coordinates
51
 
52
 
 
53
  def get_latlng_to_pixel_coordinates(
54
  latlng_origin_ne: LatLngDict,
55
  latlng_origin_sw: LatLngDict,
 
1
  """functions useful to convert to/from latitude-longitude coordinates to pixel image coordinates"""
2
+ from lisa_on_cuda.utils import session_logger
3
  from samgis_core.utilities.type_hints import TupleFloat, TupleFloatAny
 
4
  from samgis_lisa_on_zero import app_logger
5
  from samgis_lisa_on_zero.utilities.constants import TILE_SIZE, EARTH_EQUATORIAL_RADIUS
6
  from samgis_lisa_on_zero.utilities.type_hints import ImagePixelCoordinates
7
  from samgis_lisa_on_zero.utilities.type_hints import LatLngDict
8
 
9
 
10
+ @session_logger.set_uuid_logging
11
  def _get_latlng2pixel_projection(latlng: LatLngDict) -> ImagePixelCoordinates:
12
  from math import log, pi, sin
13
 
 
31
  raise e_get_latlng2pixel_projection
32
 
33
 
34
+ @session_logger.set_uuid_logging
35
  def _get_point_latlng_to_pixel_coordinates(latlng: LatLngDict, zoom: int | float) -> ImagePixelCoordinates:
36
  from math import floor
37
 
 
52
  raise e_format_latlng_to_pixel_coordinates
53
 
54
 
55
+ @session_logger.set_uuid_logging
56
  def get_latlng_to_pixel_coordinates(
57
  latlng_origin_ne: LatLngDict,
58
  latlng_origin_sw: LatLngDict,
samgis_lisa_on_zero/io_package/geo_helpers.py CHANGED
@@ -2,10 +2,12 @@
2
  from affine import Affine
3
  from numpy import ndarray as np_ndarray
4
 
 
5
  from samgis_core.utilities.type_hints import ListFloat, TupleFloat, DictStrInt
6
  from samgis_lisa_on_zero import app_logger
7
 
8
 
 
9
  def load_affine_transformation_from_matrix(matrix_source_coefficients: ListFloat) -> Affine:
10
  """
11
  Wrapper for rasterio.Affine.from_gdal() method
@@ -44,6 +46,7 @@ def get_affine_transform_from_gdal(matrix_source_coefficients: ListFloat or Tupl
44
  return Affine.from_gdal(*matrix_source_coefficients)
45
 
46
 
 
47
  def get_vectorized_raster_as_geojson(mask: np_ndarray, transform: TupleFloat) -> DictStrInt:
48
  """
49
  Get shapes and values of connected regions in a dataset or array
 
2
  from affine import Affine
3
  from numpy import ndarray as np_ndarray
4
 
5
+ from lisa_on_cuda.utils import session_logger
6
  from samgis_core.utilities.type_hints import ListFloat, TupleFloat, DictStrInt
7
  from samgis_lisa_on_zero import app_logger
8
 
9
 
10
+ @session_logger.set_uuid_logging
11
  def load_affine_transformation_from_matrix(matrix_source_coefficients: ListFloat) -> Affine:
12
  """
13
  Wrapper for rasterio.Affine.from_gdal() method
 
46
  return Affine.from_gdal(*matrix_source_coefficients)
47
 
48
 
49
+ @session_logger.set_uuid_logging
50
  def get_vectorized_raster_as_geojson(mask: np_ndarray, transform: TupleFloat) -> DictStrInt:
51
  """
52
  Get shapes and values of connected regions in a dataset or array
samgis_lisa_on_zero/io_package/raster_helpers.py CHANGED
@@ -3,7 +3,7 @@ import numpy as np
3
  from numpy import ndarray, bitwise_not
4
  from rasterio import open as rasterio_open
5
 
6
- from samgis_lisa_on_zero import PROJECT_ROOT_FOLDER
7
  from samgis_lisa_on_zero import app_logger
8
  from samgis_lisa_on_zero.utilities.constants import OUTPUT_CRS_STRING
9
  from samgis_lisa_on_zero.utilities.type_hints import XYZTerrainProvidersNames
@@ -71,6 +71,7 @@ def get_raster_terrain_rgb_like(arr: ndarray, xyz_provider_name, nan_value_int:
71
  return output
72
 
73
 
 
74
  def get_rgb_prediction_image(raster_cropped: ndarray, slope_cellsize: int, invert_image: bool = True) -> ndarray:
75
  """
76
  Return an RGB image from input numpy array
@@ -100,6 +101,7 @@ def get_rgb_prediction_image(raster_cropped: ndarray, slope_cellsize: int, inver
100
  raise ve_get_rgb_prediction_image
101
 
102
 
 
103
  def get_rgb_image(arr_channel0: ndarray, arr_channel1: ndarray, arr_channel2: ndarray,
104
  invert_image: bool = True) -> ndarray:
105
  """
@@ -139,6 +141,7 @@ def get_rgb_image(arr_channel0: ndarray, arr_channel1: ndarray, arr_channel2: nd
139
  raise ve_get_rgb_image
140
 
141
 
 
142
  def get_slope_curvature(dem: ndarray, slope_cellsize: int, title: str = "") -> tuple[ndarray, ndarray]:
143
  """
144
  Return a tuple of two numpy arrays representing slope and curvature (1st grade derivative and 2nd grade derivative)
@@ -171,6 +174,7 @@ def get_slope_curvature(dem: ndarray, slope_cellsize: int, title: str = "") -> t
171
  raise ve_get_slope_curvature
172
 
173
 
 
174
  def calculate_slope(dem_array: ndarray, cell_size: int, calctype: str = "degree") -> ndarray:
175
  """
176
  Return a numpy array representing slope (1st grade derivative)
@@ -198,6 +202,7 @@ def calculate_slope(dem_array: ndarray, cell_size: int, calctype: str = "degree"
198
  raise ve_calculate_slope
199
 
200
 
 
201
  def normalize_array(arr: ndarray, high: int = 255, norm_type: str = "float", invert: bool = False, title: str = "") -> ndarray:
202
  """
203
  Return normalized numpy array between 0 and 'high' value. Default normalization type is int
@@ -239,6 +244,7 @@ def normalize_array(arr: ndarray, high: int = 255, norm_type: str = "float", inv
239
  raise ValueError(msg)
240
 
241
 
 
242
  def normalize_array_list(arr_list: list[ndarray], exaggerations_list: list[float] = None, title: str = "") -> ndarray:
243
  """
244
  Return a normalized numpy array from a list of numpy array and an optional list of exaggeration values.
@@ -266,6 +272,7 @@ def normalize_array_list(arr_list: list[ndarray], exaggerations_list: list[float
266
  return arr_tmp / len(arr_list)
267
 
268
 
 
269
  def check_empty_array(arr: ndarray, val: float) -> bool:
270
  """
271
  Return True if the input numpy array is empy. Check if
@@ -297,6 +304,7 @@ def check_empty_array(arr: ndarray, val: float) -> bool:
297
  return check1 or check2 or check3 or check4 or check5
298
 
299
 
 
300
  def write_raster_png(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
301
  from pathlib import Path
302
  from rasterio.plot import reshape_as_raster
@@ -315,6 +323,7 @@ def write_raster_png(arr, transform, prefix: str, suffix: str, folder_output_pat
315
  app_logger.info(f"written:{output_filename} as PNG, use {OUTPUT_CRS_STRING} as CRS.")
316
 
317
 
 
318
  def write_raster_tiff(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
319
  from pathlib import Path
320
  output_filename = Path(folder_output_path) / f"{prefix}_{suffix}.tiff"
 
3
  from numpy import ndarray, bitwise_not
4
  from rasterio import open as rasterio_open
5
 
6
+ from lisa_on_cuda.utils import session_logger
7
  from samgis_lisa_on_zero import app_logger
8
  from samgis_lisa_on_zero.utilities.constants import OUTPUT_CRS_STRING
9
  from samgis_lisa_on_zero.utilities.type_hints import XYZTerrainProvidersNames
 
71
  return output
72
 
73
 
74
+ @session_logger.set_uuid_logging
75
  def get_rgb_prediction_image(raster_cropped: ndarray, slope_cellsize: int, invert_image: bool = True) -> ndarray:
76
  """
77
  Return an RGB image from input numpy array
 
101
  raise ve_get_rgb_prediction_image
102
 
103
 
104
+ @session_logger.set_uuid_logging
105
  def get_rgb_image(arr_channel0: ndarray, arr_channel1: ndarray, arr_channel2: ndarray,
106
  invert_image: bool = True) -> ndarray:
107
  """
 
141
  raise ve_get_rgb_image
142
 
143
 
144
+ @session_logger.set_uuid_logging
145
  def get_slope_curvature(dem: ndarray, slope_cellsize: int, title: str = "") -> tuple[ndarray, ndarray]:
146
  """
147
  Return a tuple of two numpy arrays representing slope and curvature (1st grade derivative and 2nd grade derivative)
 
174
  raise ve_get_slope_curvature
175
 
176
 
177
+ @session_logger.set_uuid_logging
178
  def calculate_slope(dem_array: ndarray, cell_size: int, calctype: str = "degree") -> ndarray:
179
  """
180
  Return a numpy array representing slope (1st grade derivative)
 
202
  raise ve_calculate_slope
203
 
204
 
205
+ @session_logger.set_uuid_logging
206
  def normalize_array(arr: ndarray, high: int = 255, norm_type: str = "float", invert: bool = False, title: str = "") -> ndarray:
207
  """
208
  Return normalized numpy array between 0 and 'high' value. Default normalization type is int
 
244
  raise ValueError(msg)
245
 
246
 
247
+ @session_logger.set_uuid_logging
248
  def normalize_array_list(arr_list: list[ndarray], exaggerations_list: list[float] = None, title: str = "") -> ndarray:
249
  """
250
  Return a normalized numpy array from a list of numpy array and an optional list of exaggeration values.
 
272
  return arr_tmp / len(arr_list)
273
 
274
 
275
+ @session_logger.set_uuid_logging
276
  def check_empty_array(arr: ndarray, val: float) -> bool:
277
  """
278
  Return True if the input numpy array is empy. Check if
 
304
  return check1 or check2 or check3 or check4 or check5
305
 
306
 
307
+ @session_logger.set_uuid_logging
308
  def write_raster_png(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
309
  from pathlib import Path
310
  from rasterio.plot import reshape_as_raster
 
323
  app_logger.info(f"written:{output_filename} as PNG, use {OUTPUT_CRS_STRING} as CRS.")
324
 
325
 
326
+ @session_logger.set_uuid_logging
327
  def write_raster_tiff(arr, transform, prefix: str, suffix: str, folder_output_path="/tmp"):
328
  from pathlib import Path
329
  output_filename = Path(folder_output_path) / f"{prefix}_{suffix}.tiff"
samgis_lisa_on_zero/io_package/tms2geotiff.py CHANGED
@@ -4,8 +4,10 @@ from numpy import ndarray
4
  from samgis_core.utilities.type_hints import TupleFloat
5
  from xyzservices import TileProvider
6
 
 
7
  from samgis_lisa_on_zero import app_logger
8
- from samgis_lisa_on_zero.utilities.constants import (OUTPUT_CRS_STRING, DRIVER_RASTERIO_GTIFF, N_MAX_RETRIES, N_CONNECTION, N_WAIT,
 
9
  ZOOM_AUTO, BOOL_USE_CACHE)
10
  from samgis_lisa_on_zero.utilities.type_hints import tuple_ndarray_transform
11
 
@@ -17,6 +19,7 @@ n_wait = int(os.getenv("N_WAIT", N_WAIT))
17
  zoom_auto_string = os.getenv("ZOOM_AUTO", ZOOM_AUTO)
18
 
19
 
 
20
  def download_extent(w: float, s: float, e: float, n: float, zoom: int or str = zoom_auto_string,
21
  source: TileProvider or str = None,
22
  wait: int = n_wait, max_retries: int = n_max_retries, n_connections: int = n_connection,
@@ -70,6 +73,7 @@ def download_extent(w: float, s: float, e: float, n: float, zoom: int or str = z
70
  raise e_download_extent
71
 
72
 
 
73
  def crop_raster(w: float, s: float, e: float, n: float, raster: ndarray, raster_bbox: TupleFloat,
74
  crs: str = OUTPUT_CRS_STRING, driver: str = DRIVER_RASTERIO_GTIFF) -> tuple_ndarray_transform:
75
  """
@@ -134,6 +138,7 @@ def crop_raster(w: float, s: float, e: float, n: float, raster: ndarray, raster_
134
  raise e_crop_raster
135
 
136
 
 
137
  def get_transform_raster(raster: ndarray, raster_bbox: TupleFloat) -> tuple_ndarray_transform:
138
  """
139
  Convert the input raster image to RGB and extract the Affine
@@ -170,6 +175,7 @@ def get_transform_raster(raster: ndarray, raster_bbox: TupleFloat) -> tuple_ndar
170
  raise e_get_transform_raster
171
 
172
 
 
173
  def reshape_as_image(arr):
174
  try:
175
  from numpy import swapaxes
 
4
  from samgis_core.utilities.type_hints import TupleFloat
5
  from xyzservices import TileProvider
6
 
7
+ from lisa_on_cuda.utils import session_logger
8
  from samgis_lisa_on_zero import app_logger
9
+ from samgis_lisa_on_zero.utilities.constants import (OUTPUT_CRS_STRING, DRIVER_RASTERIO_GTIFF, N_MAX_RETRIES,
10
+ N_CONNECTION, N_WAIT,
11
  ZOOM_AUTO, BOOL_USE_CACHE)
12
  from samgis_lisa_on_zero.utilities.type_hints import tuple_ndarray_transform
13
 
 
19
  zoom_auto_string = os.getenv("ZOOM_AUTO", ZOOM_AUTO)
20
 
21
 
22
+ @session_logger.set_uuid_logging
23
  def download_extent(w: float, s: float, e: float, n: float, zoom: int or str = zoom_auto_string,
24
  source: TileProvider or str = None,
25
  wait: int = n_wait, max_retries: int = n_max_retries, n_connections: int = n_connection,
 
73
  raise e_download_extent
74
 
75
 
76
+ @session_logger.set_uuid_logging
77
  def crop_raster(w: float, s: float, e: float, n: float, raster: ndarray, raster_bbox: TupleFloat,
78
  crs: str = OUTPUT_CRS_STRING, driver: str = DRIVER_RASTERIO_GTIFF) -> tuple_ndarray_transform:
79
  """
 
138
  raise e_crop_raster
139
 
140
 
141
+ @session_logger.set_uuid_logging
142
  def get_transform_raster(raster: ndarray, raster_bbox: TupleFloat) -> tuple_ndarray_transform:
143
  """
144
  Convert the input raster image to RGB and extract the Affine
 
175
  raise e_get_transform_raster
176
 
177
 
178
+ @session_logger.set_uuid_logging
179
  def reshape_as_image(arr):
180
  try:
181
  from numpy import swapaxes
samgis_lisa_on_zero/io_package/wrappers_helpers.py CHANGED
@@ -1,52 +1,18 @@
1
  """lambda helper functions"""
2
- import logging
3
- from sys import stdout
4
  from typing import Dict
5
 
6
- import loguru
7
  from xyzservices import providers, TileProvider
8
 
 
9
  from lisa_on_cuda.utils.app_helpers import get_cleaned_input
10
  from samgis_lisa_on_zero import app_logger
11
  from samgis_lisa_on_zero.io_package.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
12
- from samgis_lisa_on_zero.utilities.constants import COMPLETE_URL_TILES_MAPBOX, COMPLETE_URL_TILES_NEXTZEN, CUSTOM_RESPONSE_MESSAGES
13
- from samgis_lisa_on_zero.utilities.type_hints import ApiRequestBody, ContentTypes, XYZTerrainProvidersNames, \
14
- XYZDefaultProvidersNames, StringPromptApiRequestBody
15
- from samgis_core.utilities.utilities import base64_decode
16
-
17
-
18
- def get_response(status: int, start_time: float, request_id: str, response_body: Dict = None) -> str:
19
- """
20
- Response composer
21
-
22
- Args:
23
- status: status response
24
- start_time: request start time (float)
25
- request_id: str
26
- response_body: dict we embed into our response
27
-
28
- Returns:
29
- json response
30
-
31
- """
32
- from json import dumps
33
- from time import time
34
-
35
- app_logger.debug(f"response_body:{response_body}.")
36
- response_body["duration_run"] = time() - start_time
37
- response_body["message"] = CUSTOM_RESPONSE_MESSAGES[status]
38
- response_body["request_id"] = request_id
39
-
40
- response = {
41
- "statusCode": status,
42
- "header": {"Content-Type": ContentTypes.APPLICATION_JSON},
43
- "body": dumps(response_body),
44
- "isBase64Encoded": False
45
- }
46
- app_logger.debug(f"response type:{type(response)} => {response}.")
47
- return dumps(response)
48
 
49
 
 
50
  def get_parsed_bbox_points_with_string_prompt(request_input: StringPromptApiRequestBody) -> Dict:
51
  """
52
  Parse the raw input request into bbox, prompt string and zoom
@@ -88,6 +54,7 @@ def get_parsed_bbox_points_with_string_prompt(request_input: StringPromptApiRequ
88
  }
89
 
90
 
 
91
  def get_parsed_bbox_points_with_dictlist_prompt(request_input: ApiRequestBody) -> Dict:
92
  """
93
  Parse the raw input request into bbox, prompt and zoom
@@ -125,6 +92,7 @@ def get_parsed_bbox_points_with_dictlist_prompt(request_input: ApiRequestBody) -
125
  }
126
 
127
 
 
128
  def _get_parsed_prompt_list(bbox_ne, bbox_sw, zoom, prompt_list):
129
  new_prompt_list = []
130
  for prompt in prompt_list:
@@ -144,12 +112,14 @@ def _get_parsed_prompt_list(bbox_ne, bbox_sw, zoom, prompt_list):
144
  return new_prompt_list
145
 
146
 
 
147
  def _get_new_prompt_data_point(bbox_ne, bbox_sw, prompt, zoom):
148
  current_point = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data, zoom, prompt.type)
149
  app_logger.debug(f"current prompt: {type(current_point)}, value:{current_point}, label: {prompt.label}.")
150
  return [current_point['x'], current_point['y']]
151
 
152
 
 
153
  def _get_new_prompt_data_rectangle(bbox_ne, bbox_sw, prompt, zoom):
154
  current_point_ne = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data.ne, zoom, prompt.type)
155
  app_logger.debug(
@@ -166,51 +136,6 @@ def _get_new_prompt_data_rectangle(bbox_ne, bbox_sw, prompt, zoom):
166
  ]
167
 
168
 
169
- def get_parsed_request_body(event: Dict or str) -> ApiRequestBody:
170
- """
171
- Validator for the raw input request lambda event
172
-
173
- Args:
174
- event: input dict
175
-
176
- Returns:
177
- parsed request input
178
- """
179
- from json import dumps, loads
180
- from logging import getLevelName
181
-
182
- def _get_current_log_level(logger: loguru.logger) -> [str, loguru._logger.Level]:
183
- levels = logger._core.levels
184
- current_log_level = logger._core.min_level
185
- level_filt = [l for l in levels.items() if l[1].no == current_log_level]
186
- return level_filt[0]
187
-
188
- app_logger.info(f"event:{dumps(event)}...")
189
- try:
190
- raw_body = event["body"]
191
- except Exception as e_constants1:
192
- app_logger.error(f"e_constants1:{e_constants1}.")
193
- raw_body = event
194
- app_logger.debug(f"raw_body, #1: {type(raw_body)}, {raw_body}...")
195
- if isinstance(raw_body, str):
196
- body_decoded_str = base64_decode(raw_body)
197
- app_logger.debug(f"body_decoded_str: {type(body_decoded_str)}, {body_decoded_str}...")
198
- raw_body = loads(body_decoded_str)
199
- app_logger.info(f"body, #2: {type(raw_body)}, {raw_body}...")
200
-
201
- parsed_body = ApiRequestBody.model_validate(raw_body)
202
- log_level = "DEBUG" if parsed_body.debug else "INFO"
203
- app_logger.remove()
204
- app_logger.add(stdout, level=log_level)
205
- try:
206
- current_log_level_name, _ = _get_current_log_level(app_logger)
207
- app_logger.warning(f"set log level to {getLevelName(current_log_level_name)}.")
208
- except Exception as ex:
209
- print("failing setting parsing bbox, logger is ok? ex:", ex, "#")
210
-
211
- return parsed_body
212
-
213
-
214
  mapbox_terrain_rgb = TileProvider(
215
  name=XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME,
216
  url=COMPLETE_URL_TILES_MAPBOX,
@@ -223,6 +148,7 @@ nextzen_terrain_rgb = TileProvider(
223
  )
224
 
225
 
 
226
  def get_url_tile(source_type: str):
227
  try:
228
  match source_type.lower():
@@ -246,6 +172,7 @@ def check_source_type_is_terrain(source: str | TileProvider):
246
  return isinstance(source, TileProvider) and source.name in list(XYZTerrainProvidersNames)
247
 
248
 
 
249
  def get_source_name(source: str | TileProvider) -> str | bool:
250
  try:
251
  match source.lower():
 
1
  """lambda helper functions"""
 
 
2
  from typing import Dict
3
 
 
4
  from xyzservices import providers, TileProvider
5
 
6
+ from lisa_on_cuda.utils import session_logger
7
  from lisa_on_cuda.utils.app_helpers import get_cleaned_input
8
  from samgis_lisa_on_zero import app_logger
9
  from samgis_lisa_on_zero.io_package.coordinates_pixel_conversion import get_latlng_to_pixel_coordinates
10
+ from samgis_lisa_on_zero.utilities.constants import COMPLETE_URL_TILES_MAPBOX, COMPLETE_URL_TILES_NEXTZEN
11
+ from samgis_lisa_on_zero.utilities.type_hints import (
12
+ ApiRequestBody, XYZTerrainProvidersNames, XYZDefaultProvidersNames, StringPromptApiRequestBody)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
13
 
14
 
15
+ @session_logger.set_uuid_logging
16
  def get_parsed_bbox_points_with_string_prompt(request_input: StringPromptApiRequestBody) -> Dict:
17
  """
18
  Parse the raw input request into bbox, prompt string and zoom
 
54
  }
55
 
56
 
57
+ @session_logger.set_uuid_logging
58
  def get_parsed_bbox_points_with_dictlist_prompt(request_input: ApiRequestBody) -> Dict:
59
  """
60
  Parse the raw input request into bbox, prompt and zoom
 
92
  }
93
 
94
 
95
+ @session_logger.set_uuid_logging
96
  def _get_parsed_prompt_list(bbox_ne, bbox_sw, zoom, prompt_list):
97
  new_prompt_list = []
98
  for prompt in prompt_list:
 
112
  return new_prompt_list
113
 
114
 
115
+ @session_logger.set_uuid_logging
116
  def _get_new_prompt_data_point(bbox_ne, bbox_sw, prompt, zoom):
117
  current_point = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data, zoom, prompt.type)
118
  app_logger.debug(f"current prompt: {type(current_point)}, value:{current_point}, label: {prompt.label}.")
119
  return [current_point['x'], current_point['y']]
120
 
121
 
122
+ @session_logger.set_uuid_logging
123
  def _get_new_prompt_data_rectangle(bbox_ne, bbox_sw, prompt, zoom):
124
  current_point_ne = get_latlng_to_pixel_coordinates(bbox_ne, bbox_sw, prompt.data.ne, zoom, prompt.type)
125
  app_logger.debug(
 
136
  ]
137
 
138
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  mapbox_terrain_rgb = TileProvider(
140
  name=XYZTerrainProvidersNames.MAPBOX_TERRAIN_TILES_NAME,
141
  url=COMPLETE_URL_TILES_MAPBOX,
 
148
  )
149
 
150
 
151
+ @session_logger.set_uuid_logging
152
  def get_url_tile(source_type: str):
153
  try:
154
  match source_type.lower():
 
172
  return isinstance(source, TileProvider) and source.name in list(XYZTerrainProvidersNames)
173
 
174
 
175
+ @session_logger.set_uuid_logging
176
  def get_source_name(source: str | TileProvider) -> str | bool:
177
  try:
178
  match source.lower():
samgis_lisa_on_zero/prediction_api/lisa.py CHANGED
@@ -1,15 +1,19 @@
1
  from datetime import datetime
2
- from spaces import GPU as SPACES_GPU
3
 
 
4
  from samgis_core.utilities.type_hints import LlistFloat, DictStrInt
 
 
5
  from samgis_lisa_on_zero.io_package.geo_helpers import get_vectorized_raster_as_geojson
6
  from samgis_lisa_on_zero.io_package.raster_helpers import write_raster_png, write_raster_tiff
7
  from samgis_lisa_on_zero.io_package.tms2geotiff import download_extent
8
  from samgis_lisa_on_zero.utilities.constants import DEFAULT_URL_TILES, LISA_INFERENCE_FN
9
 
 
10
  msg_write_tmp_on_disk = "found option to write images and geojson output..."
11
 
12
 
 
13
  def load_model_and_inference_fn(inference_function_name_key: str):
14
  from samgis_lisa_on_zero import app_logger
15
  from lisa_on_cuda.utils import app_helpers
@@ -28,6 +32,7 @@ def load_model_and_inference_fn(inference_function_name_key: str):
28
  models_dict[inference_function_name_key]["inference"] = inference_fn
29
 
30
 
 
31
  def lisa_predict(
32
  bbox: LlistFloat,
33
  prompt: str,
 
1
  from datetime import datetime
 
2
 
3
+ from lisa_on_cuda.utils import session_logger
4
  from samgis_core.utilities.type_hints import LlistFloat, DictStrInt
5
+ from spaces import GPU as SPACES_GPU
6
+
7
  from samgis_lisa_on_zero.io_package.geo_helpers import get_vectorized_raster_as_geojson
8
  from samgis_lisa_on_zero.io_package.raster_helpers import write_raster_png, write_raster_tiff
9
  from samgis_lisa_on_zero.io_package.tms2geotiff import download_extent
10
  from samgis_lisa_on_zero.utilities.constants import DEFAULT_URL_TILES, LISA_INFERENCE_FN
11
 
12
+
13
  msg_write_tmp_on_disk = "found option to write images and geojson output..."
14
 
15
 
16
+ @session_logger.set_uuid_logging
17
  def load_model_and_inference_fn(inference_function_name_key: str):
18
  from samgis_lisa_on_zero import app_logger
19
  from lisa_on_cuda.utils import app_helpers
 
32
  models_dict[inference_function_name_key]["inference"] = inference_fn
33
 
34
 
35
+ @session_logger.set_uuid_logging
36
  def lisa_predict(
37
  bbox: LlistFloat,
38
  prompt: str,
samgis_lisa_on_zero/prediction_api/predictors.py CHANGED
@@ -1,4 +1,9 @@
1
  """functions using machine learning instance model(s)"""
 
 
 
 
 
2
  from samgis_lisa_on_zero import app_logger, MODEL_FOLDER
3
  from samgis_lisa_on_zero.io_package.geo_helpers import get_vectorized_raster_as_geojson
4
  from samgis_lisa_on_zero.io_package.raster_helpers import get_raster_terrain_rgb_like, get_rgb_prediction_image
@@ -6,11 +11,9 @@ from samgis_lisa_on_zero.io_package.tms2geotiff import download_extent
6
  from samgis_lisa_on_zero.io_package.wrappers_helpers import check_source_type_is_terrain
7
  from samgis_lisa_on_zero.prediction_api.global_models import models_dict, embedding_dict
8
  from samgis_lisa_on_zero.utilities.constants import DEFAULT_URL_TILES, SLOPE_CELLSIZE
9
- from samgis_core.prediction_api import sam_onnx2, sam_onnx_inference
10
- from samgis_core.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME, DEFAULT_INPUT_SHAPE
11
- from samgis_core.utilities.type_hints import LlistFloat, DictStrInt, ListDict
12
 
13
 
 
14
  def samexporter_predict(
15
  bbox: LlistFloat,
16
  prompt: ListDict,
 
1
  """functions using machine learning instance model(s)"""
2
+ from lisa_on_cuda.utils import session_logger
3
+ from samgis_core.prediction_api import sam_onnx2, sam_onnx_inference
4
+ from samgis_core.utilities.constants import MODEL_ENCODER_NAME, MODEL_DECODER_NAME, DEFAULT_INPUT_SHAPE
5
+ from samgis_core.utilities.type_hints import LlistFloat, DictStrInt, ListDict
6
+
7
  from samgis_lisa_on_zero import app_logger, MODEL_FOLDER
8
  from samgis_lisa_on_zero.io_package.geo_helpers import get_vectorized_raster_as_geojson
9
  from samgis_lisa_on_zero.io_package.raster_helpers import get_raster_terrain_rgb_like, get_rgb_prediction_image
 
11
  from samgis_lisa_on_zero.io_package.wrappers_helpers import check_source_type_is_terrain
12
  from samgis_lisa_on_zero.prediction_api.global_models import models_dict, embedding_dict
13
  from samgis_lisa_on_zero.utilities.constants import DEFAULT_URL_TILES, SLOPE_CELLSIZE
 
 
 
14
 
15
 
16
+ @session_logger.set_uuid_logging
17
  def samexporter_predict(
18
  bbox: LlistFloat,
19
  prompt: ListDict,
samgis_lisa_on_zero/utilities/session_logger.py CHANGED
@@ -3,21 +3,19 @@ import logging
3
  from functools import wraps
4
  from typing import Callable, Tuple
5
 
 
6
  logging_uuid = contextvars.ContextVar("uuid")
7
  default_formatter = '%(asctime)s | %(uuid)s [%(pathname)s:%(module)s %(lineno)d] %(levelname)s | %(message)s'
8
-
9
-
10
  loggingType = logging.CRITICAL | logging.ERROR | logging.WARNING | logging.INFO | logging.DEBUG
11
 
12
 
13
- def setup_logging(
14
- debug: bool = False, formatter: str = default_formatter, name: str = "logger"
15
  ) -> Tuple[logging, contextvars.ContextVar]:
16
  """
17
  Create a logging instance with log string formatter.
18
 
19
  Args:
20
- debug: logging debug argument
21
  formatter: log string formatter
22
  name: logger name
23
 
@@ -36,13 +34,13 @@ def setup_logging(
36
  return record
37
 
38
  logging.setLogRecordFactory(record_factory)
39
- logging.basicConfig(level=logging.DEBUG, format=default_formatter, force=True)
40
 
41
  logger = logging.getLogger(name=name)
42
 
43
  # create a console handler
44
  ch = logging.StreamHandler()
45
- ch.setLevel(logging.DEBUG)
46
 
47
  # create formatter and add to the console
48
  formatter = logging.Formatter(formatter)
 
3
  from functools import wraps
4
  from typing import Callable, Tuple
5
 
6
+
7
  logging_uuid = contextvars.ContextVar("uuid")
8
  default_formatter = '%(asctime)s | %(uuid)s [%(pathname)s:%(module)s %(lineno)d] %(levelname)s | %(message)s'
 
 
9
  loggingType = logging.CRITICAL | logging.ERROR | logging.WARNING | logging.INFO | logging.DEBUG
10
 
11
 
12
+ def setup_logging(log_level: loggingType, formatter: str = default_formatter, name: str = "logger"
 
13
  ) -> Tuple[logging, contextvars.ContextVar]:
14
  """
15
  Create a logging instance with log string formatter.
16
 
17
  Args:
18
+ log_level: logging level
19
  formatter: log string formatter
20
  name: logger name
21
 
 
34
  return record
35
 
36
  logging.setLogRecordFactory(record_factory)
37
+ logging.basicConfig(level="DEBUG", format=default_formatter, force=True)
38
 
39
  logger = logging.getLogger(name=name)
40
 
41
  # create a console handler
42
  ch = logging.StreamHandler()
43
+ ch.setLevel("DEBUG")
44
 
45
  # create formatter and add to the console
46
  formatter = logging.Formatter(formatter)
tests/io_package/test_wrappers_helpers.py CHANGED
@@ -1,55 +1,14 @@
1
  import json
2
- import time
3
  import unittest
4
-
5
- from http import HTTPStatus
6
  from unittest.mock import patch
7
 
8
  from samgis_lisa_on_zero.io_package import wrappers_helpers
9
- from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_dictlist_prompt, get_parsed_request_body, get_response
10
  from samgis_lisa_on_zero.utilities.type_hints import ApiRequestBody
11
  from tests import TEST_EVENTS_FOLDER
12
 
13
 
14
  class WrappersHelpersTest(unittest.TestCase):
15
- @patch.object(time, "time")
16
- def test_get_response(self, time_mocked):
17
- time_diff = 108
18
- end_run = 1000
19
- time_mocked.return_value = end_run
20
- start_time = end_run - time_diff
21
- aws_request_id = "test_invoke_id"
22
-
23
- with open(TEST_EVENTS_FOLDER / "get_response.json") as tst_json:
24
- inputs_outputs = json.load(tst_json)
25
-
26
- response_type = "200"
27
- body_response = inputs_outputs[response_type]["input"]
28
- output = get_response(HTTPStatus.OK.value, start_time, aws_request_id, body_response)
29
- assert json.loads(output) == inputs_outputs[response_type]["output"]
30
-
31
- response_type = "400"
32
- response_400 = get_response(HTTPStatus.BAD_REQUEST.value, start_time, aws_request_id, {})
33
- assert response_400 == inputs_outputs[response_type]["output"]
34
-
35
- response_type = "422"
36
- response_422 = get_response(HTTPStatus.UNPROCESSABLE_ENTITY.value, start_time, aws_request_id, {})
37
- assert response_422 == inputs_outputs[response_type]["output"]
38
-
39
- response_type = "500"
40
- response_500 = get_response(HTTPStatus.INTERNAL_SERVER_ERROR.value, start_time, aws_request_id, {})
41
- assert response_500 == inputs_outputs[response_type]["output"]
42
-
43
- @staticmethod
44
- def test_get_parsed_bbox_points():
45
- with open(TEST_EVENTS_FOLDER / "get_parsed_bbox_prompts_single_point.json") as tst_json:
46
- inputs_outputs = json.load(tst_json)
47
- for k, input_output in inputs_outputs.items():
48
- print(f"k:{k}.")
49
- raw_body = get_parsed_request_body(**input_output["input"])
50
- output = get_parsed_bbox_points_with_dictlist_prompt(raw_body)
51
- assert output == input_output["output"]
52
-
53
  @staticmethod
54
  def test_get_parsed_bbox_other_inputs():
55
  for json_filename in ["single_rectangle", "multi_prompt"]:
@@ -59,39 +18,6 @@ class WrappersHelpersTest(unittest.TestCase):
59
  output = get_parsed_bbox_points_with_dictlist_prompt(parsed_input)
60
  assert output == inputs_outputs["output"]
61
 
62
- @staticmethod
63
- def test_get_parsed_request_body():
64
- from samgis_core.utilities.utilities import base64_encode
65
-
66
- input_event = {
67
- "event": {
68
- "bbox": {
69
- "ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
70
- "sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
71
- },
72
- "prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
73
- "zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
74
- }
75
- }
76
- expected_output_dict = {
77
- "bbox": {
78
- "ne": {"lat": 38.03932961278458, "lng": 15.36808069832851},
79
- "sw": {"lat": 37.455509218936974, "lng": 14.632807441554068}
80
- },
81
- "prompt": [{"type": "point", "data": {"lat": 37.0, "lng": 15.0}, "label": 0}],
82
- "zoom": 10, "source_type": "OpenStreetMap.Mapnik", "debug": True
83
- }
84
- output = get_parsed_request_body(input_event["event"])
85
- assert output == ApiRequestBody.model_validate(input_event["event"])
86
-
87
- input_event_str = json.dumps(input_event["event"])
88
- output = get_parsed_request_body(input_event_str)
89
- assert output == ApiRequestBody.model_validate(expected_output_dict)
90
-
91
- event = {"body": base64_encode(input_event_str).decode("utf-8")}
92
- output = get_parsed_request_body(event)
93
- assert output == ApiRequestBody.model_validate(expected_output_dict)
94
-
95
  def test_get_parsed_bbox_points_with_string_prompt(self):
96
  from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_string_prompt
97
  req = {
 
1
  import json
 
2
  import unittest
 
 
3
  from unittest.mock import patch
4
 
5
  from samgis_lisa_on_zero.io_package import wrappers_helpers
6
+ from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_dictlist_prompt
7
  from samgis_lisa_on_zero.utilities.type_hints import ApiRequestBody
8
  from tests import TEST_EVENTS_FOLDER
9
 
10
 
11
  class WrappersHelpersTest(unittest.TestCase):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12
  @staticmethod
13
  def test_get_parsed_bbox_other_inputs():
14
  for json_filename in ["single_rectangle", "multi_prompt"]:
 
18
  output = get_parsed_bbox_points_with_dictlist_prompt(parsed_input)
19
  assert output == inputs_outputs["output"]
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  def test_get_parsed_bbox_points_with_string_prompt(self):
22
  from samgis_lisa_on_zero.io_package.wrappers_helpers import get_parsed_bbox_points_with_string_prompt
23
  req = {