File size: 13,357 Bytes
83c7368
7a95d7a
206c195
12ca36b
83c7368
12ca36b
96e83f4
83c7368
 
 
12ca36b
7a95d7a
12ca36b
77d952e
 
 
206c195
7a95d7a
83c7368
12ca36b
d03f5ec
12ca36b
d03f5ec
12ca36b
d03f5ec
 
 
 
 
12ca36b
 
 
d03f5ec
12ca36b
 
 
 
 
 
 
 
 
 
d03f5ec
 
 
 
 
 
 
 
 
 
 
12ca36b
 
 
 
 
 
d03f5ec
 
12ca36b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d03f5ec
 
 
12ca36b
 
 
 
d03f5ec
12ca36b
 
 
206c195
12ca36b
d03f5ec
12ca36b
 
 
 
 
 
 
d03f5ec
12ca36b
 
 
 
 
 
 
 
 
 
 
d03f5ec
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
12ca36b
d03f5ec
12ca36b
 
 
d03f5ec
 
206c195
12ca36b
 
 
 
 
 
d03f5ec
 
 
 
 
 
 
 
 
12ca36b
 
 
d03f5ec
12ca36b
d03f5ec
12ca36b
 
 
9324baf
 
27f7acd
d03f5ec
12ca36b
206c195
12ca36b
 
 
d03f5ec
 
206c195
12ca36b
d03f5ec
 
 
 
12ca36b
d03f5ec
12ca36b
 
 
 
 
 
d03f5ec
 
12ca36b
7a95d7a
 
 
206c195
d03f5ec
7a95d7a
d03f5ec
12ca36b
d03f5ec
 
12ca36b
 
d03f5ec
12ca36b
 
 
206c195
12ca36b
fdda1da
d03f5ec
12ca36b
d03f5ec
206c195
12ca36b
 
206c195
d03f5ec
 
 
 
 
 
 
12ca36b
 
 
96e83f4
d03f5ec
12ca36b
 
 
 
 
 
d03f5ec
 
12ca36b
d03f5ec
 
 
 
 
 
12ca36b
d03f5ec
 
12ca36b
 
d03f5ec
 
12ca36b
d03f5ec
 
 
 
 
 
12ca36b
 
d03f5ec
12ca36b
 
 
d03f5ec
 
 
12ca36b
 
 
d03f5ec
12ca36b
 
 
 
d03f5ec
 
 
12ca36b
 
d03f5ec
 
12ca36b
 
d03f5ec
 
 
 
 
 
12ca36b
 
 
206c195
12ca36b
 
d03f5ec
12ca36b
 
d03f5ec
 
12ca36b
 
d03f5ec
 
206c195
fdda1da
d03f5ec
4b7975c
 
 
 
77d952e
 
fdda1da
7a95d7a
12ca36b
7a95d7a
 
12ca36b
9324baf
12ca36b
 
 
 
 
 
 
 
 
 
 
 
fdda1da
12ca36b
 
 
 
77d952e
 
12ca36b
 
fdda1da
12ca36b
 
 
 
 
 
 
 
 
 
d03f5ec
 
 
 
 
12ca36b
 
fdda1da
4b7975c
 
 
 
 
77d952e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
fdda1da
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
import datetime
import json
import logging
import os

import duckdb
import ee
import gradio as gr
import pandas as pd
import plotly.graph_objects as go
import yaml
from google.oauth2 import service_account


from utils.js import get_window_url_params

# Logging
logging.basicConfig(format="%(levelname)s:%(message)s", level=logging.INFO)

# Define constants
DATE = "2020-01-01"
YEAR = 2020
LOCATION = [-74.653370, 5.845328]
ROI_RADIUS = 20000
GEE_SERVICE_ACCOUNT = (
    "climatebase-july-2023@ee-geospatialml-aquarry.iam.gserviceaccount.com"
)
GEE_SERVICE_ACCOUNT_CREDENTIALS_FILE = "ee_service_account.json"
INDICES_FILE = "indices.yaml"
START_YEAR = 2015
END_YEAR = 2022


class IndexGenerator:
    """
    A class to generate indices and compute zonal means.

        Args:
            centroid (tuple): The centroid coordinates (latitude, longitude) of the region of interest.
            year (int): The year for which indices are generated.
            roi_radius (int, optional): The radius (in meters) for creating a buffer around the centroid as the region of interest. Defaults to 20000.
            project_name (str, optional): The name of the project. Defaults to "".
            map (geemap.Map, optional): Map object for mapping. Defaults to None (i.e. no map created)
    """

    def __init__(
        self,
        centroid,
        roi_radius,
        year,
        indices_file,
        project_name="",
        map=None,
    ):
        self.indices = self._load_indices(indices_file)
        self.centroid = centroid
        self.roi = ee.Geometry.Point(*centroid).buffer(roi_radius)
        self.year = year
        self.start_date = str(datetime.date(self.year, 1, 1))
        self.end_date = str(datetime.date(self.year, 12, 31))
        self.daterange = [self.start_date, self.end_date]
        self.project_name = project_name
        self.map = map
        if self.map is not None:
            self.show = True
        else:
            self.show = False

    def _cloudfree(self, gee_path):
        """
        Internal method to generate a cloud-free composite.

        Args:
            gee_path (str): The path to the Google Earth Engine (GEE) image or image collection.

        Returns:
            ee.Image: The cloud-free composite clipped to the region of interest.
        """
        # Load a raw Landsat ImageCollection for a single year.
        collection = (
            ee.ImageCollection(gee_path)
            .filterDate(*self.daterange)
            .filterBounds(self.roi)
        )

        # Create a cloud-free composite with custom parameters for cloud score threshold and percentile.
        composite_cloudfree = ee.Algorithms.Landsat.simpleComposite(
            **{"collection": collection, "percentile": 75, "cloudScoreRange": 5}
        )
        return composite_cloudfree.clip(self.roi)

    def _load_indices(self, indices_file):
        # Read index configurations
        with open(indices_file, "r") as stream:
            try:
                return yaml.safe_load(stream)
            except yaml.YAMLError as e:
                logging.error(e)
                return None

    def show_map(self, map=None):
        if map is not None:
            self.map = map
            self.show = True

    def disable_map(self):
        self.show = False

    def generate_index(self, index_config):
        """
        Generates an index based on the provided index configuration.

        Args:
            index_config (dict): Configuration for generating the index.

        Returns:
            ee.Image: The generated index clipped to the region of interest.
        """
        match index_config["gee_type"]:
            case "image":
                dataset = ee.Image(index_config["gee_path"]).clip(self.roi)
                if index_config.get("select"):
                    dataset = dataset.select(index_config["select"])
            case "image_collection":
                dataset = (
                    ee.ImageCollection(index_config["gee_path"])
                    .filterBounds(self.roi)
                    .map(lambda image: image.clip(self.roi))
                    .mean()
                )
                if index_config.get("select"):
                    dataset = dataset.select(index_config["select"])
            case "feature_collection":
                dataset = (
                    ee.Image()
                    .float()
                    .paint(
                        ee.FeatureCollection(index_config["gee_path"]),
                        index_config["select"],
                    )
                    .clip(self.roi)
                )
            case "algebraic":
                image = self._cloudfree(index_config["gee_path"])
                dataset = image.normalizedDifference(["B4", "B3"])
            case _:
                dataset = None

        if not dataset:
            raise Exception("Failed to generate dataset.")
        if self.show and index_config.get("show"):
            map.addLayer(dataset, index_config["viz"], index_config["name"])
        logging.info(f"Generated index: {index_config['name']}")
        return dataset

    def zonal_mean_index(self, index_key):
        index_config = self.indices[index_key]
        dataset = self.generate_index(index_config)
        # zm = self._zonal_mean(single, index_config.get('bandname') or 'constant')
        out = dataset.reduceRegion(
            **{
                "reducer": ee.Reducer.mean(),
                "geometry": self.roi,
                "scale": 200,  # map scale
            }
        ).getInfo()
        if index_config.get("bandname"):
            return out[index_config.get("bandname")]
        return out

    def generate_composite_index_df(self, indices=[]):
        data = {
            "metric": indices,
            "year": self.year,
            "centroid": str(self.centroid),
            "project_name": self.project_name,
            "value": list(map(self.zonal_mean_index, indices)),
            "area": self.roi.area().getInfo(),  # m^2
            "geojson": str(self.roi.getInfo()),
            # to-do: coefficient
        }

        logging.info("data", data)
        df = pd.DataFrame(data)
        return df


def set_up_duckdb():
    logging.info("set up duckdb")
    # use `climatebase` db
    if not os.getenv("motherduck_token"):
        raise Exception(
            "No motherduck token found. Please set the `motherduck_token` environment variable."
        )
    else:
        con = duckdb.connect("md:climatebase")
        con.sql("USE climatebase;")

    # load extensions
    con.sql("""INSTALL spatial; LOAD spatial;""")

    return con


def authenticate_gee(gee_service_account, gee_service_account_credentials_file):
    """
    Huggingface Spaces does not support secret files, therefore authenticate with an environment variable containing the JSON.
    """
    logging.info("authenticate_gee")
    credentials = ee.ServiceAccountCredentials(
        gee_service_account, key_data=os.environ["ee_service_account"]
    )
    ee.Initialize(credentials)


def load_indices(indices_file):
    # Read index configurations
    with open(indices_file, "r") as stream:
        try:
            return yaml.safe_load(stream)
        except yaml.YAMLError as e:
            logging.error(e)
            return None


def create_dataframe(years, project_name):
    dfs = []
    logging.info(years)
    indices = load_indices(INDICES_FILE)
    for year in years:
        logging.info(year)
        ig = IndexGenerator(
            centroid=LOCATION,
            roi_radius=ROI_RADIUS,
            year=year,
            indices_file=INDICES_FILE,
            project_name=project_name,
        )
        df = ig.generate_composite_index_df(list(indices.keys()))
        dfs.append(df)
    return pd.concat(dfs)


# def preview_table():
#     con.sql("FROM bioindicator;").show()

# if __name__ == '__main__':


# Map = geemap.Map()


# # Create a cloud-free composite with custom parameters for cloud score threshold and percentile.
# composite_cloudfree = ee.Algorithms.Landsat.simpleComposite(**{
#   'collection': collection,
#   'percentile': 75,
#   'cloudScoreRange': 5
# })

# Map.addLayer(composite_cloudfree, {'bands': ['B4', 'B3', 'B2'], 'max': 128}, 'Custom TOA composite')
# Map.centerObject(roi, 14)


# ig = IndexGenerator(centroid=LOCATION, year=2015, indices_file=INDICES_FILE, project_name='Test Project', map=Map)
# dataset = ig.generate_index(indices['Air'])

# minMax = dataset.clip(roi).reduceRegion(
#   geometry = roi,
#   reducer = ee.Reducer.minMax(),
#   scale= 3000,
#   maxPixels= 10e3,
# )


# minMax.getInfo()
def calculate_biodiversity_score(start_year, end_year, project_name):
    years = []
    for year in range(start_year, end_year):
        row_exists = con.sql(
            f"SELECT COUNT(1) FROM bioindicator WHERE (year = {year} AND project_name = '{project_name}')"
        ).fetchall()[0][0]
        if not row_exists:
            years.append(year)

    if len(years) > 0:
        df = create_dataframe(years, project_name)
        # con.sql('FROM df LIMIT 5').show()

        # Write score table to `_temptable`
        con.sql(
            "CREATE OR REPLACE TABLE _temptable AS SELECT *, (value * area) AS score FROM (SELECT year, project_name, AVG(value) AS value, area  FROM df GROUP BY year, project_name, area ORDER BY project_name)"
        )

        # Create `bioindicator` table IF NOT EXISTS.
        con.sql(
            """
            USE climatebase;
            CREATE TABLE IF NOT EXISTS bioindicator (year BIGINT, project_name VARCHAR(255), value DOUBLE, area DOUBLE, score DOUBLE, CONSTRAINT unique_year_project_name UNIQUE (year, project_name));
        """
        )

    return con.sql(
        f"SELECT * FROM bioindicator WHERE (year > {start_year} AND year <= {end_year} AND project_name = '{project_name}')"
    ).df()


def view_all():
    logging.info("view_all")
    return con.sql(f"SELECT * FROM bioindicator").df()


def push_to_md():
    # UPSERT project record
    con.sql(
        """
        INSERT INTO bioindicator FROM _temptable
        ON CONFLICT (year, project_name) DO UPDATE SET value = excluded.value;
    """
    )
    logging.info("upsert records into motherduck")


def motherduck_list_projects(author_id):
    return con.sql(f"""
        SELECT DISTINCT name FROM project WHERE authorId = '{author_id}'
    """).df()


with gr.Blocks() as demo:
    # Environment setup
    authenticate_gee(GEE_SERVICE_ACCOUNT, GEE_SERVICE_ACCOUNT_CREDENTIALS_FILE)
    con = set_up_duckdb()

    # Create circle buffer over point
    roi = ee.Geometry.Point(*LOCATION).buffer(ROI_RADIUS)

    # # Load a raw Landsat ImageCollection for a single year.
    # start_date = str(datetime.date(YEAR, 1, 1))
    # end_date = str(datetime.date(YEAR, 12, 31))
    # collection = (
    #     ee.ImageCollection('LANDSAT/LC08/C02/T1')
    #     .filterDate(start_date, end_date)
    #     .filterBounds(roi)
    # )

    # indices = load_indices(INDICES_FILE)
    # push_to_md(START_YEAR, END_YEAR, 'Test Project')
    with gr.Column():
        # map = gr.Plot().style()
        with gr.Row():
            start_year = gr.Number(value=2017, label="Start Year", precision=0)
            end_year = gr.Number(value=2022, label="End Year", precision=0)
            # project_name = gr.Textbox(label="Project Name")
            project_name = gr.Dropdown([], label="Project", value="Select project")
        # boroughs = gr.CheckboxGroup(choices=["Queens", "Brooklyn", "Manhattan", "Bronx", "Staten Island"], value=["Queens", "Brooklyn"], label="Select Methodology:")
        # btn = gr.Button(value="Update Filter")
        with gr.Row():
            calc_btn = gr.Button(value="Calculate!")
            view_btn = gr.Button(value="View all")
            save_btn = gr.Button(value="Save")
        results_df = gr.Dataframe(
            headers=["Year", "Project Name", "Score"],
            datatype=["number", "str", "number"],
            label="Biodiversity scores by year",
        )
    # demo.load(filter_map, [min_price, max_price, boroughs], map)
    # btn.click(filter_map, [min_price, max_price, boroughs], map)
    calc_btn.click(
        calculate_biodiversity_score,
        inputs=[start_year, end_year, project_name],
        outputs=results_df,
    )
    view_btn.click(view_all, outputs=results_df)
    save_btn.click(push_to_md)

    def update_project_dropdown_list(url_params):
        print('url_params', url_params['username'])

        projects = motherduck_list_projects(author_id = url_params['username'])
        print('projects', projects)
        # to-do: filter projects based on user
        return gr.Dropdown.update(choices=projects["name"].tolist())

    # Get url params
    url_params = gr.JSON({"username": "default"}, visible=False, label="URL Params")

    # Gradio has a bug
    # For dropdown to update by demo.load, dropdown value must be called downstream
    b1 = gr.Button("Hidden button that fixes bug.", visible=False)
    b1.click(lambda x: x, inputs=project_name, outputs=[])

    # Update project dropdown list on page load
    demo.load(
        fn=update_project_dropdown_list,
        inputs=[url_params],
        outputs=[project_name],
        _js=get_window_url_params,
        queue=False,
    )


demo.launch()