import time import ee import geemap from datetime import datetime, timedelta import matplotlib import pandas as pd import geemap.foliumap as geemap from datetime import datetime matplotlib.use('agg') # service_account = 'isronrsc@isro-407105.iam.gserviceaccount.com' # credentials = ee.ServiceAccountCredentials(service_account, 'isro-407105-31fe627b6f09.json') # ee.Initialize(credentials) class MapVisualizer: def __init__(self): self.sar_collection = None self.selected_roi = None self.start_date = None self.end_date = None self.lang = None self.S1_chunks = None def import_and_add_layers(self, asset_id, predefined_layers=None): shp = ee.FeatureCollection(asset_id).geometry() if predefined_layers: shp = shp.map(lambda feature: feature.set(predefined_layers)) return shp def add_sar_layer_to_roi(self, shapefile, start_date, end_date, map_obj): sar_collection = self.load_sar_collection(self.start_date, self.end_date) sar_vv = sar_collection.filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV')) \ .filter(ee.Filter.eq('instrumentMode', 'IW')).mean().clip(shapefile.geometry()) map_obj.addLayer(sar_vv, {'bands': ['VV'], 'min': -20, 'max': 0, 'gamma': 1.4}, 'Clipped SAR (VV) Layer') return sar_vv def filterSpeckles(self, image): vv = image.select('VV') vv_smoothed = vv.focal_median(25, 'circle', 'meters').rename('VV_Filtered') return image.addBands(vv_smoothed) def classifyWater(self, image): vv = image.select('VV_Filtered') water = vv.lt(-15).rename('Water') return image.addBands(water) def reduce_region_band(self, image): water_pixel_count = image.select('Water').reduceRegion( reducer=ee.Reducer.sum(), geometry=self.selected_roi, maxPixels=1e9 ).get('Water') return image.set('water_pixel_count', water_pixel_count) def load_sar_collection(self, start_date, end_date): sar_collection = ee.ImageCollection('COPERNICUS/S1_GRD') \ .filterBounds(self.selected_roi) \ .filterDate(self.start_date, self.end_date) \ .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV'))\ .filter(ee.Filter.eq('instrumentMode', 'IW'))\ .filter(ee.Filter.contains('.geo', self.selected_roi)) self.sar_collection = sar_collection return sar_collection # Function to calculate water spread area def calculate_water_spread(self, image, threshold): water_mask = image.select('Water').eq(1) water_area_m2 = water_mask.multiply(ee.Image.pixelArea()).reduceRegion( reducer=ee.Reducer.sum(), geometry=self.selected_roi, maxPixels=1e9 ).get('Water') water_area_km2 = ee.Number(water_area_m2).divide(1e6) # Check if water_area_km2 is valid if water_area_km2.getInfo() is None: return None, None water_area_km2 = water_area_km2.getInfo() if water_area_km2 < 1: return water_area_m2.getInfo(), "m" return water_area_km2, "km" def process_each_chunk(self, num_chunks, i): start_index = i * 50 end_index = (i + 1) * 50 # Get the chunk chunk = ee.ImageCollection(self.S1_chunks.slice(start_index, end_index)) # Print the number of images in the chunk num_images_chunk = chunk.size().getInfo() print(f"Number of images in chunk {i+1}:", num_images_chunk) # Classify water threshold = -15 S1_classified = chunk.map(lambda img: img.addBands(img.select('VV_Filtered').lt(threshold).rename('Water'))) # Sort the collection by system time S1_sorted = S1_classified.sort('system:time_start') # Calculate buffer radius based on the square root of ROI area buffer_radius = self.selected_roi.area().sqrt().divide(ee.Number(2)) # Define the region to export region = self.selected_roi.buffer(buffer_radius) # Export a video using ImageCollection.getVideoThumbURL video_params = { 'dimensions': 600, 'region': region, 'framesPerSecond': 5, 'bands': ['VV_Filtered'], 'min': -25, 'max': 0 } # Get the video thumbnail URL video_thumb_url = S1_sorted.getVideoThumbURL(video_params) return num_chunks, video_thumb_url, i+1 def timelapse(self, asset_ids, selected_roi_name): df = pd.read_csv("ISROP.csv") valid_roi_names = df['ROI_Name'].tolist() if selected_roi_name in valid_roi_names: selected_roi_index = valid_roi_names.index(selected_roi_name) self.selected_roi = ee.FeatureCollection(asset_ids[selected_roi_index]).geometry() S1 = (ee.ImageCollection('COPERNICUS/S1_GRD') .filterBounds(self.selected_roi) .filterDate(self.start_date, self.end_date) .filter(ee.Filter.listContains('transmitterReceiverPolarisation', 'VV')) .filter(ee.Filter.eq('instrumentMode', 'IW')) .filter(ee.Filter.contains('.geo', self.selected_roi))) # Apply speckle filtering with increased scale S1_filtered = S1.map(lambda img: img.select('VV') .focal_median(25, 'circle', 'meters') .rename('VV_Filtered')) print("Number of images:", S1_filtered.size().getInfo()) # Get the number of images in the filtered collection num_images = S1_filtered.size().getInfo() # Split image collection into chunks of approximately 50 images each num_chunks = num_images // 50 if(num_images % 50 == 0): num_chunks -= 1 self.S1_chunks = S1_filtered.toList(num_images) # Print the total number of chunks print("Total number of chunks:", num_chunks) return self.process_each_chunk(num_chunks, 0) def run_analysis(self, asset_ids, selected_roi_name, start_date, end_date, csv_file_path, lang): conclusion = "" df = pd.read_csv("ISROP.csv") valid_roi_names = df['ROI_Name'].tolist() self.lang = lang if selected_roi_name in valid_roi_names: selected_roi_index = valid_roi_names.index(selected_roi_name) self.selected_roi = self.import_and_add_layers(asset_ids[selected_roi_index]) # Set the start_date and end_date attributes self.start_date = datetime.strptime(start_date, "%Y-%m-%d") self.end_date = datetime.strptime(end_date, "%Y-%m-%d") static_map = self.load_sar_collection(self.start_date, self.end_date) static_map = static_map.map(self.filterSpeckles).map(self.classifyWater) static_map_reduced = static_map.map(self.reduce_region_band) static_map_sorted = static_map_reduced.sort('water_pixel_count', False) # Get the image with the maximum water pixel count max_water_image = static_map_sorted.first() # Calculate buffer radius based on the square root of ROI area buffer_radius = self.selected_roi.area().sqrt().divide(ee.Number(2)) # Check the number of images in the collection num_images = static_map_sorted.size().getInfo() if num_images > 1: dates = static_map_sorted.aggregate_array('system:time_start').getInfo() water_pixel_counts = static_map_sorted.aggregate_array('water_pixel_count').getInfo() threshold_max = -15 water_spread_area_max, dis = self.calculate_water_spread(max_water_image, threshold_max) max_water_date = max_water_image.get('system:time_start').getInfo() max_water_date = datetime.utcfromtimestamp(max_water_date / 1000).date() res = "kilometers" if dis == "m": res = "meters" dates = [datetime.utcfromtimestamp(date / 1000) for date in dates] if dis == "km": water_pixel_counts = [water / 1e4 for water in water_pixel_counts] timestamp = str(int(time.time())) chart_file_path = f"static/assets/plot/chart_{timestamp}.json" df = pd.DataFrame({'Date': dates, 'Water Pixel Count': water_pixel_counts}) df = df.sort_values(by='Date') df.to_json(chart_file_path, orient="records") conclusion += f'The Maximum water spread of {selected_roi_name.capitalize()} during given period is {round(water_spread_area_max, 2)} square {res} ({max_water_date}). ' min_water_image = static_map_sorted.sort('water_pixel_count').first() threshold_min = -15 water_spread_area_min, dis = self.calculate_water_spread(min_water_image, threshold_min) min_water_date = min_water_image.get('system:time_start').getInfo() min_water_date = datetime.utcfromtimestamp(min_water_date / 1000).date() conclusion += f'The Minimum water spread is {round(water_spread_area_min, 2)} square {res} ({min_water_date})' # Display the SAR layer with max water pixel count on the map sar_band_clipped_max = max_water_image.select('VV_Filtered').clip(self.selected_roi.buffer(buffer_radius)) Map = geemap.Map() Map.centerObject(self.selected_roi, 10) Map.addLayer(sar_band_clipped_max, {'min': -25, 'max': 0}, 'SAR Layer (max Water Pixels) - Clipped') # Add layer controls Map.addLayerControl() return Map, conclusion, chart_file_path, "ok", "" elif num_images == 1: max_water_image = static_map_sorted.first() threshold_max = -15 water_spread_area_single, dis = self.calculate_water_spread(max_water_image, threshold_max) max_water_date = max_water_image.get('system:time_start').getInfo() res = "kilometers" if dis == "m": res = "meters" max_water_date = datetime.utcfromtimestamp(max_water_date / 1000).date() conclusion += f'The Maximum water spread of {selected_roi_name.capitalize()} during given period is {round(water_spread_area_single, 2)} square {res} ({max_water_date})' # Display the map for the single image Map_single = geemap.Map() Map_single.centerObject(self.selected_roi, 10) Map_single.addLayer(max_water_image.select('VV_Filtered').clip(self.selected_roi.buffer(buffer_radius)), {'min': -25, 'max': 0}, 'SAR Layer (Water Pixels) - Clipped') Map_single.addLayerControl() return Map_single, conclusion, "", "ok", "" else: # Extend the start date by 15 days self.start_date -= timedelta(days=30) self.start_date = self.start_date.strftime("%Y-%m-%d") self.end_date = self.end_date.strftime("%Y-%m-%d") static_map = self.load_sar_collection(self.start_date, self.end_date) static_map = static_map.map(self.filterSpeckles).map(self.classifyWater) static_map_reduced = static_map.map(self.reduce_region_band) static_map_sorted = static_map_reduced.sort('water_pixel_count', False) # Get the image with the maximum water pixel count max_water_image = static_map_sorted.first() # Calculate buffer radius based on the square root of ROI area buffer_radius = self.selected_roi.area().sqrt().divide(ee.Number(2)) # Check the number of images in the collection num_images = static_map_sorted.size().getInfo() if num_images >= 1: return None, self.start_date, "", f"We could not find the images for given period, would you like to extend the period from {self.start_date} till {self.end_date}.", "" else: return None, None, "", "", "We could not find the images for given period, please try again with different peroid." return None, None, "", "", "" # map = MapVisualizer() # map.run_analysis(['projects/isro-407105/assets/bhakra', 'projects/isro-407105/assets/kangsabati'], "bhakra", "2023-01-05", "2023-02-19", "ISROP.csv", "english")