{ "cells": [ { "cell_type": "code", "execution_count": null, "id": "d4b6549c", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Generating dataset for location: Aletsch\n", "Exporting DEM for location: Aletsch\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230305, 20230305\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230317, 20230317\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230516, 20230516\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230504, 20230504\n", "Generating dataset for location: PleineMorte\n", "Exporting DEM for location: PleineMorte\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Anzere\n", "Exporting DEM for location: Anzere\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Diablerets\n", "Exporting DEM for location: Diablerets\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Gorner\n", "Exporting DEM for location: Gorner\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230305, 20230305\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230317, 20230317\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230516, 20230516\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230504, 20230504\n", "Generating dataset for location: Rhone\n", "Exporting DEM for location: Rhone\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230305, 20230305\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230317, 20230317\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230516, 20230516\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230504, 20230504\n", "Generating dataset for location: Moiry\n", "Exporting DEM for location: Moiry\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Zmutt\n", "Exporting DEM for location: Zmutt\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Saas-Tal\n", "Exporting DEM for location: Saas-Tal\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230305, 20230305\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230317, 20230317\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230516, 20230516\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230504, 20230504\n", "Generating dataset for location: Gorbassiere\n", "Exporting DEM for location: Gorbassiere\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Generating dataset for location: Allalin\n", "Exporting DEM for location: Allalin\n", "Processing year: 2023 from 2023-02-01 to 2023-05-30\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230426, 20230426\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230419, 20230419\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230218, 20230218\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230225, 20230225\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230302, 20230302\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230305, 20230305\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230317, 20230317\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230521, 20230521\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230501, 20230501\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230509, 20230509\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230322, 20230322\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230414, 20230414\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230310, 20230310\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230516, 20230516\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230213, 20230213\n", "Exporting Sentinel-1 and Sentinel-2 for date: 20230504, 20230504\n" ] } ], "source": [ "import ee\n", "import sys\n", "import datetime\n", "ee.Authenticate()\n", "ee.Initialize()\n", "\n", "YEARS = [2025]\n", "MONTHS_START = 2\n", "MONTHS_END = 8\n", "\n", "LOCATION = {\n", " # 'Aletsch': ee.Geometry.Polygon([\n", " # [[7.9, 46.35],\n", " # [7.9, 46.6],\n", " # [8.2, 46.6],\n", " # [8.2, 46.35]]\n", " # ]),\n", " # 'PleineMorte': ee.Geometry.Polygon([\n", " # [[7.472076, 46.371332],\n", " # [7.472076, 46.395963],\n", " # [7.558594, 46.395963],\n", " # [7.558594, 46.371332]]\n", " # ]),\n", " # 'Anzere': ee.Geometry.Polygon([\n", " # [[7.315350, 46.344202],\n", " # [7.315350, 46.371451],\n", " # [7.387362, 46.371451],\n", " # [7.387362, 46.344202]]\n", " # ]),\n", " # 'Diablerets': ee.Geometry.Polygon([\n", " # [[7.174759, 46.302236],\n", " # [7.174759, 46.330632],\n", " # [7.245140, 46.330632],\n", " # [7.245140, 46.302236]]\n", " # ]),\n", " # 'Gorner': ee.Geometry.Polygon([\n", " # [[7.737808, 45.889453],\n", " # [7.737808, 45.977836],\n", " # [7.891617, 45.977836],\n", " # [7.891617, 45.889453]]\n", " # ]),\n", " # 'Rhone': ee.Geometry.Polygon([\n", " # [[8.362999, 46.580103],\n", " # [8.362999, 46.648493],\n", " # [8.421364, 46.648493],\n", " # [8.421364, 46.580103]]\n", " # ]),\n", " # 'Moiry': ee.Geometry.Polygon([\n", " # [[7.562027, 46.049170],\n", " # [7.562027, 46.097876],\n", " # [7.637730, 46.097876],\n", " # [7.637730, 46.049170]]\n", " # ]),\n", " # 'Zmutt': ee.Geometry.Polygon([\n", " # [[7.558765, 45.969049],\n", " # [7.558765, 46.027959],\n", " # [7.665539, 46.027959],\n", " # [7.665539, 45.969049]]\n", " # ]),\n", " # 'Saas-Tal': ee.Geometry.Polygon([\n", " # [[7.821545, 46.011491],\n", " # [7.821545, 46.105634],\n", " # [7.942223, 46.105634],\n", " # [7.942223, 46.011491]]\n", " # ]),\n", " # 'Gorbassiere': ee.Geometry.Polygon([\n", " # [[7.246376, 45.934768],\n", " # [7.246376, 46.003639],\n", " # [7.326027, 46.003639],\n", " # [7.326027, 45.934768]]\n", " # ]),\n", " # 'Diablerets': ee.Geometry.Polygon([\n", " # [[7.189934, 46.301615],\n", " # [7.189934, 46.330259],\n", " # [7.243320, 46.330259],\n", " # [7.243320, 46.301615]]\n", " # ]),\n", " # 'Allalin': ee.Geometry.Polygon([\n", " # [[7.868614, 46.016011],\n", " # [7.868614, 46.052493],\n", " # [7.946205, 46.052493],\n", " # [7.946205, 46.016011]]\n", " # ]),\n", " 'Praflleuri': ee.Geometry.Polygon([\n", " [[7.328739, 46.059534],\n", " [7.328739, 46.078470],\n", " [7.370968, 46.078470],\n", " [7.370968, 46.059534]]\n", " ]),\n", "}\n", "\n", "\n", "# ---------------------------\n", "# Sentinel-2 collection\n", "# ---------------------------\n", "def s2_preprocessing(img, roi):\n", " scl = img.select('SCL')\n", " # Mask out cloudy, cirrus, snow, and shadow classes\n", " cloud_classes = [3, 8, 9] # 3=Cloud Shadows, 8=Cloud Medium prob, 9=Cloud High prob, 10=Thin cirrus\n", " clouds = scl.eq(cloud_classes[0]) \\\n", " .Or(scl.eq(cloud_classes[1])) \\\n", " .Or(scl.eq(cloud_classes[2]))\n", "\n", " # def s2_preprocessing(img, roi):\n", " # Scale reflectance (Sentinel-2 is stored as 0–10000)\n", " scaled = img.select(['B2','B3','B4','B5','B6','B7','B8','B8A','B11','B12'])\n", "\n", " # --- Derived indices ---\n", " ndsi = scaled.normalizedDifference(['B3', 'B11']).rename('ndsi')\n", " ndwi = scaled.normalizedDifference(['B3', 'B8']).rename('ndwi')\n", "\n", " # --- Combine all results ---\n", " processed = ee.Image(scaled\n", " .addBands(ndwi)\n", " .addBands(ndsi)\n", " .clip(roi)\n", " .toFloat()\n", " .copyProperties(img, img.propertyNames()))\n", "\n", " return processed, clouds\n", "\n", "# add for each image the date as a property\n", "def add_date(img):\n", " date = img.date().format('YYYYMMdd')\n", " return img.set('date', date)\n", "\n", "\n", "def export_pair(s1_data, s2_data, s1_date, s2_date, roi, name):\n", " s1_image = s1_data.filter(ee.Filter.eq('date', s1_date)).mosaic()\n", " s2_image = s2_data.filter(ee.Filter.eq('date', s2_date)).mosaic()\n", " processed, cloud_mask = s2_preprocessing(s2_image, roi)\n", " \n", " if s1_image and s2_image:\n", " task_s1 = ee.batch.Export.image.toDrive(\n", " image=s1_image,\n", " description=f'Sentinel1_{s1_date}',\n", " folder=f'GEE_exports_02',\n", " fileNamePrefix=f'{s1_date}_{name}_s1',\n", " region=roi,\n", " scale=10,\n", " maxPixels=1e13\n", " )\n", " task_s2 = ee.batch.Export.image.toDrive(\n", " image=processed,\n", " description=f'Sentinel2_{s2_date}',\n", " folder=f'GEE_exports_02',\n", " fileNamePrefix=f'{s2_date}_{name}_s2',\n", " region=roi,\n", " scale=10,\n", " maxPixels=1e13\n", " )\n", " # task_lake_mask = ee.batch.Export.image.toDrive(\n", " # image=s2_lake_mask,\n", " # description=f'Sentinel2_LakeMask_{s2_date}',\n", " # folder=f'GEE_exports',\n", " # fileNamePrefix=f'{s2_date}_{name}_lake_mask',\n", " # region=roi,\n", " # scale=10,\n", " # maxPixels=1e13\n", " # )\n", " task_cloud_mask = ee.batch.Export.image.toDrive(\n", " image=cloud_mask,\n", " description=f'Sentinel2_CloudMask_{s2_date}',\n", " folder=f'GEE_exports_02',\n", " fileNamePrefix=f'{s2_date}_{name}_cloud_mask',\n", " region=roi,\n", " scale=10,\n", " maxPixels=1e13\n", " )\n", " task_s1.start()\n", " task_s2.start()\n", " # task_lake_mask.start()\n", " task_cloud_mask.start()\n", " print(f'Exporting Sentinel-1 and Sentinel-2 for date: {s1_date}, {s2_date}')\n", " \n", " else:\n", " print(f'No matching images for date: {s1_date}, {s2_date}')\n", "\n", "\n", " \n", "def generate_dataset(name, roi, years):\n", " \n", " for year in years:\n", " DATE_START = f'{year}-{MONTHS_START:02d}-01'\n", " DATE_END = f'{year}-{MONTHS_END:02d}-30'\n", " print(f'Processing year: {year} from {DATE_START} to {DATE_END}')\n", " \n", " s1_col = (ee.ImageCollection('COPERNICUS/S1_GRD')\n", " .filterBounds(roi)\n", " .filterDate(DATE_START, DATE_END)\n", " .filter(ee.Filter.eq('instrumentMode', 'IW'))\n", " .map(lambda img: img.select(['VV', 'VH']).clip(roi).toFloat())\n", " )\n", " # PALSAR-2 ScanSAR collection\n", " # s1_col = (ee.ImageCollection('JAXA/ALOS/PALSAR-2/Level2_2/ScanSAR')\n", " # .filterBounds(roi)\n", " # .filterDate(DATE_START, DATE_END)\n", " # .map(lambda img: img.select(['HH']).clip(roi).toFloat())\n", " # )\n", " s2_raw = (ee.ImageCollection(\"COPERNICUS/S2_SR_HARMONIZED\")\n", " .filterBounds(roi)\n", " .filterDate(DATE_START, DATE_END))\n", " # s2_col, s2_lake_mask, s2_cloud_mask = s2_raw.map(lambda img: s2_preprocessing(img, roi))\n", "\n", " dem_col = ee.ImageCollection(\"COPERNICUS/DEM/GLO30\").mosaic().clip(roi)\n", " dem_col = dem_col.select('DEM').rename('elevation').toFloat()\n", " \n", " s1_col = s1_col.map(add_date)\n", " s2_col = s2_raw.map(add_date)\n", " \n", " s2_dates = s2_raw.aggregate_array('system:time_start').getInfo()\n", " s1_dates = s1_col.aggregate_array('system:time_start').getInfo()\n", " \n", " s1_dates = [ee.Date(d).format('YYYYMMdd').getInfo() for d in s1_dates]\n", " s2_dates = [ee.Date(d).format('YYYYMMdd').getInfo() for d in s2_dates]\n", " \n", " common_dates = set(s1_dates).intersection(set(s2_dates))\n", " for date in common_dates:\n", " export_pair(s1_col, s2_col, date, date, roi, name)\n", " # s1_dates = [d for d in s1_dates if d not in common_dates]\n", " # s2_dates = [d for d in s2_dates if d not in common_dates]\n", " # s1_dates = sorted(s1_dates)\n", " # s2_dates = sorted(s2_dates)\n", " \n", " # s1_dates = [datetime.datetime.strptime(d, \"%Y%m%d\").timestamp() for d in s1_dates]\n", " # s2_dates = [datetime.datetime.strptime(d, \"%Y%m%d\").timestamp() for d in s2_dates]\n", " \n", " # i, j = 0, 0\n", " # while i < len(s1_dates) and j < len(s2_dates):\n", " # date_s1 = s1_dates[i]\n", " # date_s2 = s2_dates[j]\n", "\n", " # if abs(date_s1 - date_s2) <= 2 * 86400: # 2 days in seconds\n", " # date_str_s1 = datetime.datetime.fromtimestamp(date_s1).strftime(\"%Y%m%d\")\n", " # date_str_s2 = datetime.datetime.fromtimestamp(date_s2).strftime(\"%Y%m%d\")\n", " # export_pair(s1_col, s2_col, date_str_s1, date_str_s2, roi, name)\n", " # i += 1\n", " # j += 1\n", " # elif date_s1 < date_s2:\n", " # i += 1\n", " # else:\n", " # j += 1\n", "\n", "def export_location_dem(name, roi):\n", " dem_image = ee.ImageCollection(\"COPERNICUS/DEM/GLO30\").mosaic().clip(roi)\n", " dem_image = dem_image.select('DEM').rename('elevation').toFloat()\n", " task_dem = ee.batch.Export.image.toDrive(\n", " image=dem_image,\n", " description=f'DEM_{name}',\n", " folder=f'GEE_exports',\n", " fileNamePrefix=f'{name}_dem',\n", " region=roi,\n", " scale=10,\n", " maxPixels=1e13\n", " )\n", " task_dem.start()\n", " print(f'Exporting DEM for location: {name}')\n", " \n", "for loc_name, loc_roi in LOCATION.items():\n", " print(f'Generating dataset for location: {loc_name}')\n", " export_location_dem(loc_name, loc_roi)\n", " generate_dataset(loc_name, loc_roi, YEARS)\n", " # break\n", " \n", "\n" ] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.7" } }, "nbformat": 4, "nbformat_minor": 5 }