Dingyi6 commited on
Commit
f3c64af
·
1 Parent(s): 922d426

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +516 -121
app.py CHANGED
@@ -1,155 +1,550 @@
1
- from pathlib import Path
2
- from typing import List, Dict, Tuple
3
- import matplotlib.colors as mpl_colors
 
4
 
5
- import pandas as pd
6
- import seaborn as sns
7
- import shinyswatch
8
 
9
- import shiny.experimental as x
10
- from shiny import App, Inputs, Outputs, Session, reactive, render, req, ui
 
11
 
12
- sns.set_theme()
13
 
14
- www_dir = Path(__file__).parent.resolve() / "www"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
15
 
16
- df = pd.read_csv(Path(__file__).parent / "penguins.csv", na_values="NA")
17
- numeric_cols: List[str] = df.select_dtypes(include=["float64"]).columns.tolist()
18
- species: List[str] = df["Species"].unique().tolist()
19
- species.sort()
20
 
21
- app_ui = x.ui.page_fillable(
22
- shinyswatch.theme.minty(),
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
23
  ui.layout_sidebar(
24
  ui.panel_sidebar(
25
- # Artwork by @allison_horst
26
- ui.input_selectize(
27
- "xvar",
28
- "X variable",
29
- numeric_cols,
30
- selected="Bill Length (mm)",
31
- ),
32
- ui.input_selectize(
33
- "yvar",
34
- "Y variable",
35
- numeric_cols,
36
- selected="Bill Depth (mm)",
37
- ),
38
- ui.input_checkbox_group(
39
- "species", "Filter by species", species, selected=species
40
  ),
41
- ui.hr(),
42
- ui.input_switch("by_species", "Show species", value=True),
43
- ui.input_switch("show_margins", "Show marginal plots", value=True),
44
- width=2,
45
  ),
46
  ui.panel_main(
47
- ui.output_ui("value_boxes"),
48
- x.ui.output_plot("scatter", fill=True),
49
- ui.help_text(
50
- "Artwork by ",
51
- ui.a("@allison_horst", href="https://twitter.com/allison_horst"),
52
- class_="text-end",
 
53
  ),
54
  ),
55
  ),
 
 
 
 
 
 
 
56
  )
57
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
58
 
59
- def server(input: Inputs, output: Outputs, session: Session):
60
- @reactive.Calc
61
- def filtered_df() -> pd.DataFrame:
62
- """Returns a Pandas data frame that includes only the desired rows"""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
63
 
64
- # This calculation "req"uires that at least one species is selected
65
- req(len(input.species()) > 0)
 
 
 
 
 
66
 
67
- # Filter the rows so we only include the desired species
68
- return df[df["Species"].isin(input.species())]
 
 
69
 
70
- @output
71
- @render.plot
72
- def scatter():
73
- """Generates a plot for Shiny to display to the user"""
74
-
75
- # The plotting function to use depends on whether margins are desired
76
- plotfunc = sns.jointplot if input.show_margins() else sns.scatterplot
77
-
78
- plotfunc(
79
- data=filtered_df(),
80
- x=input.xvar(),
81
- y=input.yvar(),
82
- palette=palette,
83
- hue="Species" if input.by_species() else None,
84
- hue_order=species,
85
- legend=False,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
86
  )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
87
 
 
 
88
  @output
89
  @render.ui
90
- def value_boxes():
91
- df = filtered_df()
92
-
93
- def penguin_value_box(title: str, count: int, bgcol: str, showcase_img: str):
94
- return x.ui.value_box(
95
- title,
96
- count,
97
- {"class_": "pt-1 pb-0"},
98
- showcase=x.ui.as_fill_item(
99
- ui.tags.img(
100
- {"style": "object-fit:contain;"},
101
- src=showcase_img,
102
- )
103
- ),
104
- theme_color=None,
105
- style=f"background-color: {bgcol};",
106
- )
107
 
108
- if not input.by_species():
109
- return penguin_value_box(
110
- "Penguins",
111
- len(df.index),
112
- bg_palette["default"],
113
- # Artwork by @allison_horst
114
- showcase_img="penguins.png",
115
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
116
 
117
- value_boxes = [
118
- penguin_value_box(
119
- name,
120
- len(df[df["Species"] == name]),
121
- bg_palette[name],
122
- # Artwork by @allison_horst
123
- showcase_img=f"{name}.png",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
  )
125
- for name in species
126
- # Only include boxes for _selected_ species
127
- if name in input.species()
128
- ]
129
 
130
- return x.ui.layout_column_wrap(1 / len(value_boxes), *value_boxes)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
131
 
 
132
 
133
- # "darkorange", "purple", "cyan4"
134
- colors = [[255, 140, 0], [160, 32, 240], [0, 139, 139]]
135
- colors = [(r / 255.0, g / 255.0, b / 255.0) for r, g, b in colors]
 
 
 
 
136
 
137
- palette: Dict[str, Tuple[float, float, float]] = {
138
- "Adelie": colors[0],
139
- "Chinstrap": colors[1],
140
- "Gentoo": colors[2],
141
- "default": sns.color_palette()[0], # type: ignore
142
- }
143
 
144
- bg_palette = {}
145
- # Use `sns.set_style("whitegrid")` to help find approx alpha value
146
- for name, col in palette.items():
147
- # Adjusted n_colors until `axe` accessibility did not complain about color contrast
148
- bg_palette[name] = mpl_colors.to_hex(sns.light_palette(col, n_colors=7)[1]) # type: ignore
 
 
 
149
 
150
 
151
- app = App(
152
- app_ui,
153
- server,
154
- static_assets=str(www_dir),
155
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import time
4
+ import io
5
 
6
+ # Add Google Service account credential. Authenticates to the Earth Engine servers.
7
+ os.environ['GOOGLE_APPLICATION_CREDENTIALS'] = 'grounded-nebula-392621-f192b882c364.json'
 
8
 
9
+ # Add the parent directory to the Python path
10
+ parent_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
11
+ sys.path.append(parent_dir)
12
 
13
+ import ee
14
 
15
+ import math
16
+ from shiny import App, render, ui, reactive, Inputs, Outputs, Session
17
+ import ipyleaflet as L
18
+ from htmltools import css
19
+ import numpy as np
20
+ # from PIL import Image
21
+ from shinywidgets import output_widget, reactive_read, register_widget
22
+ from geopy.geocoders import Nominatim
23
+ import json
24
+ import requests
25
+ import traceback
26
+ from datetime import datetime, date
27
+ from typing import List
28
+ from utils import print_with_line_number
29
+ from timezonefinder import TimezoneFinder
30
 
31
+ # Library for ANN model loading
32
+ import tensorflow
33
+ import joblib
 
34
 
35
+ # Sentinel 2 Bands
36
+ # Sentinel-2 carries the Multispectral Imager (MSI). This sensor delivers 13 spectral bands ranging from 10 to 60-meter pixel size.
37
+
38
+ # Its blue (B2), green (B3), red (B4), and near-infrared (B8) channels have a 10-meter resolution.
39
+ # Next, its red edge (B5), near-infrared NIR (B6, B7, and B8A), and short-wave infrared SWIR (B11 and B12) have a ground sampling distance of 20 meters.
40
+ # Finally, its coastal aerosol (B1) and cirrus band (B10) have a 60-meter pixel size.
41
+ # Band Resolution Central Wavelength Description
42
+ # B1 60 m 443 nm Ultra Blue (Coastal and Aerosol)
43
+ # B2 10 m 490 nm Blue
44
+ # B3 10 m 560 nm Green
45
+ # B4 10 m 665 nm Red
46
+ # B5 20 m 705 nm Visible and Near Infrared (VNIR)
47
+ # B6 20 m 740 nm Visible and Near Infrared (VNIR)
48
+ # B7 20 m 783 nm Visible and Near Infrared (VNIR)
49
+ # B8 10 m 842 nm Visible and Near Infrared (VNIR)
50
+ # B8a 20 m 865 nm Visible and Near Infrared (VNIR)
51
+ # B9 60 m 940 nm Short Wave Infrared (SWIR)
52
+ # B10 60 m 1375 nm Short Wave Infrared (SWIR) - excluded
53
+ # B11 20 m 1610 nm Short Wave Infrared (SWIR)
54
+ # B12 20 m 2190 nm Short Wave Infrared (SWIR)
55
+
56
+ tf = TimezoneFinder()
57
+
58
+ # You can use different URLs to load remote sensing image data from various sources
59
+ # In this example, we use image data from Google Earth Engine
60
+ GEEurl = 'https://earthengine.googleapis.com/v1alpha/projects/earthengine-legacy/maps/{mapid}/tiles/{z}/{x}/{y}?token={token}'
61
+ GEEmap_id = '' # Replace with your Google Earth Engine Map ID
62
+ GEEtoken = '' # Replace with your Google Earth Engine Token
63
+
64
+ # Custom Loss Function with Covariance Penalty
65
+ def custom_loss(lam, cov_real_data):
66
+ def loss(y_true, y_pred):
67
+ mse_loss = tensorflow.reduce_mean(tensorflow.square(y_true - y_pred))
68
+ cov_pred = tensorflow.linalg.matmul(tensorflow.transpose(y_pred - tensorflow.reduce_mean(y_pred, axis=0)),
69
+ (y_pred - tensorflow.reduce_mean(y_pred, axis=0))) / tensorflow.cast(tensorflow.shape(y_pred)[0], tensorflow.float32)
70
+ cov_penalty = tensorflow.reduce_sum(tensorflow.square(cov_pred - cov_real_data))
71
+ return mse_loss + lam * cov_penalty
72
+ return loss
73
+
74
+ def load_model_and_preprocessors(model_path, cov_real_data_path, scaler_X_path, scaler_y_path):
75
+ # Load the covariance matrix
76
+ cov_real_data = np.load(cov_real_data_path)
77
+ # Load the trained model
78
+ model = tensorflow.keras.models.load_model(model_path, custom_objects={'loss': custom_loss(1e-6, cov_real_data)})
79
+ # Load the input data scaler
80
+ scaler_X = joblib.load(scaler_X_path)
81
+ # Load the output data scaler
82
+ scaler_Y = joblib.load(scaler_y_path)
83
+ return model, scaler_X, scaler_Y
84
+
85
+ # Load ANN model and preprocessors
86
+ model, loaded_scaler_X, loaded_scaler_Y = load_model_and_preprocessors(
87
+ r"ANN_assests\model",
88
+ r"ANN_assests\cov_real_data.npy",
89
+ r"ANN_assests\scaler_X.pkl",
90
+ r"ANN_assests\scaler_y.pkl"
91
+ )
92
+
93
+ X_labels = ['B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B8A', 'B9', 'B11', 'B12']
94
+
95
+ output_labels = ["N", "Cab", "Ccx", "Cw", "Cm"]
96
+
97
+ layer_names = ["structure parameter", "Chlorophylla+b content (µg/cm2)", "Carotenoids content (µg/cm2)", "Equivalent Water content (cm)", "Leaf Mass per Area (g/cm2)"]
98
+
99
+ data_to_map = {
100
+ "structure parameter": "N",
101
+ "Chlorophylla+b content (µg/cm2)": "Cab",
102
+ "Carotenoids content (µg/cm2)": "Ccx",
103
+ "Equivalent Water content (cm)": "Cw",
104
+ "Leaf Mass per Area (g/cm2)": "Cm"
105
+ }
106
+
107
+ # gradient_settings = {
108
+ # "structure parameter": {0: 'blue', 0.6: 'cyan', 1.0: 'lime'},
109
+ # "Chlorophylla+b content (µg/cm2)": {0: 'green', 0.6: 'lime', 1.0: 'yellow'},
110
+ # "Carotenoids content (µg/cm2)": {0: 'orange', 0.6: 'red', 1.0: 'maroon'},
111
+ # "Equivalent Water content (cm)": {0: 'navy', 0.6: 'blue', 1.0: 'aqua'},
112
+ # "Leaf Mass per Area (g/cm2)": {0: 'purple', 0.6: 'fuchsia', 1.0: 'pink'}
113
+ # }
114
+
115
+ gradient_settings = {
116
+ "structure parameter": {0.2: 'rgba(0, 0, 255, 1.0)', 0.6: 'rgba(0, 255, 255, 1.0)', 1.0: 'rgba(0, 255, 0, 1.0)'},
117
+ "Chlorophylla+b content (µg/cm2)": {0.2: 'rgba(0, 128, 0, 1.0)', 0.6: 'rgba(127, 255, 0, 1.0)', 1.0: 'rgba(255, 255, 0, 1.0)'},
118
+ "Carotenoids content (µg/cm2)": {0.2: 'rgba(255, 69, 0, 1.0)', 0.6: 'rgba(255, 0, 0, 1.0)', 1.0: 'rgba(139, 0, 0, 1.0)'},
119
+ "Equivalent Water content (cm)": {0.2: 'rgba(0, 0, 139, 1.0)', 0.6: 'rgba(65, 105, 225, 1.0)', 1.0: 'rgba(0, 191, 255, 1.0)'},
120
+ "Leaf Mass per Area (g/cm2)": {0.2: 'rgba(75, 0, 130, 1.0)', 0.6: 'rgba(148, 0, 211, 1.0)', 1.0: 'rgba(255, 20, 147, 1.0)'}
121
+ }
122
+
123
+ print_with_line_number("Finish loading the ANN model!")
124
+
125
+ def runModel(input_data, scaler_X, scaler_Y, ANNmodel):
126
+ # Preprocess the Input Data
127
+ # Scale the input features using the previously saved scaler for X
128
+ input_data_scaled = scaler_X.transform(input_data)
129
+
130
+ # Use the Model for Prediction
131
+ # Predict the output values (N, Cab, Ccx, Cw, Cm) for each pixel block
132
+ output_data_scaled = ANNmodel.predict(input_data_scaled)
133
+
134
+ # Post-process the Output Data
135
+ # Inverse scale the output data using the previously saved scaler for Y
136
+ output_data = scaler_Y.inverse_transform(output_data_scaled)
137
+
138
+ # Organize the Output Results and Coordinates
139
+ # Create datasets for each output label and one for coordinates
140
+ # Each dataset contains corresponding data for all pixel blocks
141
+ datasets = {}
142
+ for i, label in enumerate(output_labels):
143
+ datasets[label] = output_data[:, i]
144
+
145
+ # Print the results for verification
146
+ for label, data in datasets.items():
147
+ print(label, data)
148
+
149
+ return datasets
150
+
151
+ def getGPS():
152
+ GPSurl = 'https://www.googleapis.com/geolocation/v1/geolocate?key=AIzaSyAnHc2yRD53vlzHrj7qQ6OLFiX-iGsqFyM'
153
+ data = {'homeMobileCountryCode': 310, 'homeMobileNetworkCode': 410, 'considerIp': 'True'}
154
+ response = requests.post(GPSurl, data=json.dumps(data))
155
+ result = json.loads(response.content)
156
+ return result
157
+
158
+ def get_location(lat, lon):
159
+ geolocator = Nominatim(timeout=120, user_agent="when-to-fly")
160
+ location = geolocator.reverse(f"{lat},{lon}")
161
+ return location.address
162
+
163
+ app_ui = ui.page_fluid(
164
+ ui.div(
165
+ ui.strong("Tips:"),
166
+ ui.br(),
167
+ ui.span("1.Click the polygon icon on the map to draw a polygon, the circular icon to mark a location, the line icon to measure distance, and the icon in the top left corner of the map to select the layers you want to display."),
168
+ ui.br(),
169
+ ui.span("2.After selecting an area, click the 'Analyze' button to analyze the leaf-level feature data for that area. The results are presented as heat maps, with brighter areas indicating values closer to the maximum."),
170
+ ui.br(),
171
+ ui.span("3.Currently, the analysis does not support multiple polygons. The application will only recognize the last polygoned area."),
172
+ ui.br(),
173
+ ui.span("4.After analyzing the data of the drawn area, the webpage may experience slower loading speeds and delays. Please be patient and wait after performing an operation."),
174
+ ui.br(),
175
+ ui.span("5.If you are unable to zoom in or out of the map using the mouse scroll wheel, please use the slide bar provided above to zoom directly."),
176
+ ),
177
  ui.layout_sidebar(
178
  ui.panel_sidebar(
179
+ ui.div(
180
+ ui.input_slider("zoom", "Map zoom level", value=12, min=1, max=18),
181
+ ui.output_ui("map_bounds"),
182
+ style=css(display="flex", justify_content="center", align_items="center", gap="2rem"),
 
 
 
 
 
 
 
 
 
 
 
183
  ),
 
 
 
 
184
  ),
185
  ui.panel_main(
186
+ ui.div(
187
+ ui.output_text("N_range"),
188
+ ui.output_text("Cab_range"),
189
+ ui.output_text("Ccx_range"),
190
+ ui.output_text("Cw_range"),
191
+ ui.output_text("Cm_range"),
192
+ style=css(display="flex", justify_content="center", align_items="center", gap="2rem"),
193
  ),
194
  ),
195
  ),
196
+ output_widget("map"),
197
+ ui.strong("Must analyze (to renew the image information) before downloading polygoned area tif file."),
198
+ ui.div(
199
+ ui.input_action_button("analyze", "Analyze", class_="btn-success"),
200
+ ui.download_button("download_polygon", "Download polygoned area data as tif", class_="btn-success"),
201
+ style=css(display="flex", justify_content="center", align_items="center", gap="2rem"),
202
+ ),
203
  )
204
 
205
+ # re-run when a user using the application
206
+ def server(input, output, session):
207
+ global address_line, polygoned_image
208
+ address_line = None
209
+ polygoned_image = None
210
+ polygon_data = reactive.Value([])
211
+ N = reactive.Value("structure parameter")
212
+ Cab = reactive.Value("Chlorophylla+b content (µg/cm2)")
213
+ Ccx = reactive.Value("Carotenoids content (µg/cm2)")
214
+ Cw = reactive.Value("Equivalent Water content (cm)")
215
+ Cm = reactive.Value("Leaf Mass per Area (g/cm2)")
216
+ m = ui.modal(
217
+ "Please wait for progress...",
218
+ easy_close=False,
219
+ size="s",
220
+ footer=None,
221
+ fade=True
222
+ )
223
+
224
+ @output
225
+ @render.text
226
+ def N_range():
227
+ return N.get()
228
 
229
+ @output
230
+ @render.text
231
+ def Cab_range():
232
+ return Cab.get()
233
+
234
+ @output
235
+ @render.text
236
+ def Ccx_range():
237
+ return Ccx.get()
238
+
239
+ @output
240
+ @render.text
241
+ def Cw_range():
242
+ return Cw.get()
243
+
244
+ @output
245
+ @render.text
246
+ def Cm_range():
247
+ return Cm.get()
248
 
249
+ def handle_draw(self, action, geo_json):
250
+ print("运行handle_draw")
251
+ if geo_json['type'] == 'Feature':
252
+ # Check if the drawn shape is a polygon
253
+ if geo_json['geometry']['type'] == 'Polygon':
254
+ # Get the coordinates of the polygon's vertices
255
+ coordinates = geo_json['geometry']['coordinates'][0]
256
 
257
+ # Extract latitude and longitude values from each vertex
258
+ # For GeoJSON, coordinates are represented as [longitude, latitude]
259
+ # (note the reverse order compared to traditional [latitude, longitude])
260
+ polygon_data.set([(lon, lat) for lon, lat in coordinates])
261
 
262
+ # Process the polygon_data as per your requirement
263
+ # For example, print the coordinates
264
+ print("Polygon Vertex Coordinates:")
265
+ for lon, lat in polygon_data.get():
266
+ print(f"Latitude: {lat}, Longitude: {lon}")
267
+
268
+ ui.modal_show(m)
269
+
270
+ # Initialize Earth Engine
271
+ ee.Initialize()
272
+
273
+ # Check API status
274
+ asset_roots = ee.data.getAssetRoots()
275
+ if asset_roots:
276
+ print("Active Project ID:", asset_roots[0]['id'])
277
+ print("API is connected and working: ", asset_roots)
278
+ else:
279
+ print("API is not connected or not working.")
280
+
281
+ try:
282
+ # Get the user's current geoinformation
283
+ current_gps = getGPS()
284
+ print_with_line_number(current_gps)
285
+ current_location = get_location(current_gps['location']['lat'], current_gps['location']['lng'])
286
+ print_with_line_number(current_location)
287
+ ui.update_text(id="address",
288
+ label="Data for",
289
+ value=current_location)
290
+
291
+ # Initialize and display when the session starts (1)
292
+ map = L.Map(center=(current_gps['location']['lat'], current_gps['location']['lng']), zoom=12, scroll_wheel_zoom=True)
293
+ map.add_layer(L.TileLayer(url='https://mt1.google.com/vt/lyrs=s&x={x}&y={y}&z={z}'), name='Natural Map')
294
+
295
+ # Add a distance scale
296
+ map.add_control(L.leaflet.ScaleControl(position="bottomleft"))
297
+ layer_control = L.LayersControl(position='topright')
298
+ map.add_control(layer_control)
299
+
300
+ # Add the DrawControl widget to the map
301
+ draw_control = L.DrawControl(
302
+ polygon = {
303
+ "shapeOptions": {
304
+ "fillColor": "transparent",
305
+ "fillOpacity": 0.0
306
+ }
307
+ }
308
  )
309
+ map.add_control(draw_control)
310
+ # Attach the handle_draw function to the on_draw event
311
+ draw_control.on_draw(handle_draw)
312
+ register_widget("map", map)
313
+
314
+ ui.modal_remove()
315
+
316
+ except Exception as e:
317
+ ui.modal_remove()
318
+ error_modal = ui.modal(
319
+ str(e),
320
+ title="An Error occured. Please refresh",
321
+ easy_close=True,
322
+ size="xl",
323
+ footer=None,
324
+ fade=True
325
+ )
326
+ # print_with_line_number("Show error modal")
327
+ ui.modal_show(error_modal)
328
+ traceback.print_exc()
329
+
330
+ # When the slider changes, update the map's zoom attribute (2)
331
+ @reactive.Effect
332
+ def _():
333
+ map.zoom = input.zoom()
334
+
335
+ # When zooming directly on the map, update the slider's value (2 and 3)
336
+ @reactive.Effect
337
+ def _():
338
+ ui.update_slider("zoom", value=reactive_read(map, "zoom"))
339
 
340
+ # Everytime the map's bounds change, update the output message (3)
341
+ # rerun when a user do some reactive changes.
342
  @output
343
  @render.ui
344
+ async def map_bounds():
345
+ center = reactive_read(map, "center")
346
+ if len(center) == 0:
347
+ return
 
 
 
 
 
 
 
 
 
 
 
 
 
348
 
349
+ lat = round(center[0], 4)
350
+ lon = (center[1] + 180) % 360 - 180
351
+ lon = round(lon, 4)
352
+
353
+ return ui.p(f"Longitude: {lon}", ui.br(), f"Latitude: {lat}")
354
+
355
+ def update_or_create_heatmaps(output_datasets, scale):
356
+ """
357
+ Check if a heatmap layer exists for each dataset in output_datasets.
358
+ If it exists, update the heatmap, otherwise create a new heatmap.
359
+
360
+ Parameters:
361
+ output_datasets (list of dict): The datasets for creating/updating heatmaps
362
+ """
363
+ # Iterate over each dataset in output_datasets
364
+ existing_layers = {layer.name: layer for layer in map.layers}
365
+ print_with_line_number(existing_layers)
366
+
367
+ for layer_name in layer_names:
368
+ # Check if a heatmap layer with this name already exists
369
+ if layer_name in existing_layers:
370
+ print("deleting ", layer_name)
371
+ map.remove_layer(existing_layers[layer_name])
372
+
373
+ heatmap_data = []
374
+ data_values = output_datasets[data_to_map[layer_name]]
375
+ min_value = min(data_values)
376
+ max_value = max(data_values)
377
 
378
+ if (data_to_map[layer_name] == "N"):
379
+ N.set(layer_name + ": " + str(min_value) + " ~ " + str(max_value))
380
+ elif (data_to_map[layer_name] == "Cab"):
381
+ Cab.set(layer_name + ": " + str(min_value) + " ~ " + str(max_value))
382
+ elif (data_to_map[layer_name] == "Ccx"):
383
+ Ccx.set(layer_name + ": " + str(min_value) + " ~ " + str(max_value))
384
+ elif (data_to_map[layer_name] == "Cw"):
385
+ Cw.set(layer_name + ": " + str(min_value) + " ~ " + str(max_value))
386
+ else:
387
+ Cm.set(layer_name + ": " + str(min_value) + " ~ " + str(max_value))
388
+
389
+ for coord, n in zip(output_datasets["Coordinates"], data_values):
390
+ normalized_value = (n - min_value) / (max_value - min_value)
391
+ heatmap_data.append([coord[1], coord[0], normalized_value])
392
+
393
+ # Generate new heatmap for this dataset
394
+ heatmap = L.Heatmap(
395
+ locations=heatmap_data,
396
+ radius=scale * 1.4,
397
+ gradient=gradient_settings[layer_name],
398
+ max=1,
399
+ blur=scale / 2,
400
+ name=layer_name
401
  )
 
 
 
 
402
 
403
+ # Add the new heatmap layer to the map
404
+ map.add_layer(heatmap)
405
+
406
+ @reactive.Effect
407
+ @reactive.event(input.analyze, ignore_none=True, ignore_init=True)
408
+ def _():
409
+ if not polygon_data.get():
410
+ return
411
+ ui.modal_show(m)
412
+ global polygoned_image
413
+ polygon = ee.Geometry.Polygon(polygon_data.get())
414
+ print("Polygon Data: " , polygon_data.get())
415
+ print("Polygon: " , polygon)
416
+
417
+ # Define Sentinel-2 image collection ("2021-01-01", "2021-12-31")
418
+ today = ee.Date(datetime.today().strftime('%Y-%m-%d')) # 获取当天日期并转换为ee.Date格式
419
+ start_date = today.advance(-15, 'day')
420
 
421
+ print("Start Date: ", start_date.format('YYYY-MM-dd').getInfo(), "| End Date: ", today.format('YYYY-MM-dd').getInfo())
422
 
423
+ sentinel2 = ee.ImageCollection("COPERNICUS/S2_SR")\
424
+ .filterDate(start_date, today)\
425
+ .filterBounds(polygon)\
426
+ .sort('CLOUDY_PIXEL_PERCENTAGE', True)
427
+ # .first() # Retrieve the first image from the ImageCollection
428
+ # sentinel2 = sentinel2.sort('CLOUDY_PIXEL_PERCENTAGE', True)
429
+ polygoned_image = sentinel2.first()
430
 
431
+ # Make sure the polygoned_image is available
432
+ if not polygoned_image:
433
+ print("No image available for download.")
434
+ return
 
 
435
 
436
+ retry = 5
437
+ while(sentinel2.size().getInfo() == 0):
438
+ if(retry == 0):
439
+ print("fail to fecth image.")
440
+ return
441
+ print("wait for fetching.")
442
+ time.sleep(2)
443
+ retry -= 1
444
 
445
 
446
+ print_with_line_number("Type of sentinel2: " + str(type(sentinel2)))
447
+ print("Counts of Fetched image: ", sentinel2.size().getInfo())
448
+
449
+ # Clip the image to the extent of the polygon
450
+ clipped_image = polygoned_image.clip(polygon)
451
+
452
+ # Get meta data about the image object
453
+ # bands = clipped_image.bandNames().getInfo()
454
+ # print_with_line_number(bands)
455
+
456
+ # Calculate suitable pixel number. GEE service allow fecthing 5000 pixels at most for one call. So we use the "polygoned area / 4999" to decide a rational pixel scale.
457
+ scale=1
458
+ polygon_area = polygon.area().getInfo()
459
+ num = math.ceil(polygon_area / scale / scale)
460
+ if (num > 4999):
461
+ per_area = math.ceil(polygon_area / 4999)
462
+ scale = math.ceil(math.pow(per_area, 1.0/2))
463
+
464
+ print("polygon_area(m2): ", polygon_area, "scale: ", scale)
465
+
466
+ # Fetch reflectance of B1-B12
467
+ spectral_values = clipped_image.select('B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B8A', 'B9', 'B11', 'B12').sample(
468
+ region=polygon,
469
+ scale=scale,
470
+ numPixels=4999,
471
+ geometries=True
472
+ )
473
+
474
+ print_with_line_number("Pre-process the bands data.")
475
+ # print_with_line_number(type(spectral_values))
476
+ spectral_values = spectral_values.getInfo()
477
+ # print_with_line_number(type(spectral_values))
478
+ spectral_values_json = json.dumps(spectral_values)
479
+ # print(spectral_values_json)
480
+ spectral_values_dict = json.loads(spectral_values_json)
481
+ features = spectral_values_dict['features']
482
+
483
+ print_with_line_number("Extract the center coordinates and values of B1-B12 for each pixel block")
484
+ coords = []
485
+ input_data = []
486
+ for feature in features:
487
+ coords.append(feature['geometry']['coordinates'])
488
+ props = feature['properties']
489
+ input_data.append([props[b] for b in X_labels])
490
+
491
+ # Convert to NumPy arrays
492
+ coords = np.array(coords)
493
+ print("coords: ", coords)
494
+ input_data = np.array(input_data)
495
+ print("input_bands: ", input_data)
496
+
497
+ output_datasets = runModel(input_data, loaded_scaler_X, loaded_scaler_Y, model)
498
+
499
+ print_with_line_number("Add a dataset for the coordinates")
500
+ output_datasets['Coordinates'] = coords
501
+
502
+ update_or_create_heatmaps(output_datasets, scale)
503
+ register_widget("map", map)
504
+ ui.modal_remove()
505
+
506
+ @reactive.Effect
507
+ def _():
508
+ print("Current navbar page: ", input.navbar_id())
509
+
510
+ @session.download(
511
+ filename=lambda: f"image-{date.today().isoformat()}-{np.random.randint(100, 999)}.tif"
512
+ )
513
+ async def download_polygon():
514
+ # # Replace this with your ee.Image object
515
+ # image_id = "COPERNICUS/S2_SR/20230728T184921_20230728T190044_T10SFH"
516
+ # image = ee.Image(image_id)
517
+ # Make sure the polygoned_image is available
518
+ if not polygoned_image:
519
+ print("No image available for download.")
520
+ return
521
+
522
+ # Clip the image to the extent of the polygon
523
+ clipped_image = polygoned_image.clip(ee.Geometry.Polygon(polygon_data.get()))
524
+ print("clipped_image: ", clipped_image)
525
+
526
+ # Define export parameters
527
+ download_params = {
528
+ 'scale': 10,
529
+ 'region': polygon_data.get(), # ee.Geometry object defining the region to export
530
+ 'format': 'GeoTIFF',
531
+ }
532
+
533
+ # Generate download URL for the GeoTIFF image
534
+ download_url = clipped_image.getDownloadURL(download_params)
535
+
536
+ # Send a request to download the image
537
+ response = requests.get(download_url)
538
+
539
+ # Create a BytesIO buffer
540
+ with io.BytesIO() as buf:
541
+ # Write the image content to the buffer
542
+ buf.write(response.content)
543
+ buf.seek(0) # Move the buffer's position to the beginning
544
+
545
+ # Yield the buffer's content as a downloadable file
546
+ yield buf.getvalue()
547
+
548
+ print("Image downloaded successfully!")
549
+
550
+ app = App(app_ui, server)