Vertdure commited on
Commit
1d6dba1
·
verified ·
1 Parent(s): ee783b2

Upload 31 files

Browse files
Home.py ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.title("About")
7
+ st.sidebar.info(
8
+ """
9
+ - Web App URL: <https://streamlit.gishub.org>
10
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
11
+ """
12
+ )
13
+
14
+ st.sidebar.title("Contact")
15
+ st.sidebar.info(
16
+ """
17
+ Picenni Kenzo [wetlands.io]() | [GitHub]() | [Twitter]() | [YouTube]() | [LinkedIn]()
18
+ """
19
+ )
20
+
21
+ st.sidebar.title("Support")
22
+ st.sidebar.info(
23
+ """
24
+ If you want to reward my work, I'd love a cup of coffee from you. Thanks!
25
+
26
+ """
27
+ )
28
+
29
+
30
+ st.title("VertGIS")
31
+
32
+ st.markdown(
33
+ """
34
+ This multi-page web app demonstrates various interactive web apps created using [streamlit](https://streamlit.io) and open-source mapping libraries,
35
+ such as [leafmap](https://leafmap.org), [geemap](https://geemap.org), [pydeck](https://deckgl.readthedocs.io), and [kepler.gl](https://docs.kepler.gl/docs/keplergl-jupyter).
36
+ This is an open-source project and you are very welcome to contribute your comments, questions, resources, and apps as [issues](https://github.com/giswqs/streamlit-geospatial/issues) or
37
+ [pull requests](https://github.com/giswqs/streamlit-geospatial/pulls) to the [GitHub repository](https://github.com/giswqs/streamlit-geospatial).
38
+
39
+ """
40
+ )
41
+
42
+ st.info("Click on the left sidebar menu to navigate to the different apps.")
43
+
44
+ st.subheader("Timelapse of Satellite Imagery")
45
+ st.markdown(
46
+ """
47
+ The following timelapse animations were created using the Timelapse web app. Click `Timelapse` on the left sidebar menu to create your own timelapse for any location around the globe.
48
+ """
49
+ )
50
+
51
+ row1_col1, row1_col2 = st.columns(2)
52
+ with row1_col1:
53
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/spain.gif")
54
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/las_vegas.gif")
55
+
56
+ with row1_col2:
57
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/goes.gif")
58
+ st.image("https://github.com/giswqs/data/raw/main/timelapse/fire.gif")
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2021 Qiusheng Wu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
README.md CHANGED
@@ -1,11 +1,11 @@
1
  ---
2
- title: VertGis
3
- emoji: 📚
4
- colorFrom: blue
5
- colorTo: blue
6
  sdk: streamlit
7
- sdk_version: 1.39.0
8
- app_file: app.py
9
  pinned: false
10
  license: mit
11
  ---
 
1
  ---
2
+ title: Streamlit
3
+ emoji: 🔥
4
+ colorFrom: indigo
5
+ colorTo: green
6
  sdk: streamlit
7
+ sdk_version: 1.34.0
8
+ app_file: Home.py
9
  pinned: false
10
  license: mit
11
  ---
data/cog_files.txt ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ https://www.maxar.com/open-data/california-colorado-fires
2
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-02-16/pine-gulch-fire20/1030010076004E00.tif
3
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-08-18/pine-gulch-fire20/1040010041D3B300.tif
4
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-11-13/grizzly-creek-fire20/1040010045785200.tif
5
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2018-11-13/grizzly-creek-fire20/10400100443AEC00.tif
6
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-06/czu-lightning-complex-fire/104001004941E100.tif
7
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-18/cameron-peak-fire20/103001008DA5B500.tif
8
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-02-22/czu-lightning-complex-fire/103001008DB2E200.tif
9
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-01/grizzly-creek-fire20/104001004881EF00.tif
10
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-17/czu-lightning-complex-fire/103001008F905300.tif
11
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-04-17/czu-lightning-complex-fire/1030010092B22200.tif
12
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-06-27/czu-lightning-complex-fire/1030010094A52300.tif
13
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-09-08/czu-lightning-complex-fire/103001009C9FBB00.tif
14
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-09-24/lnu-lightning-complex-fire/103001009A079B00.tif
15
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-10-05/czu-lightning-complex-fire/103001009C10F800.tif
16
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-10-05/czu-lightning-complex-fire/103001009A266800.tif
17
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-04/czu-lightning-complex-fire/1050010019917900.tif
18
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-04/czu-lightning-complex-fire/1050010019917800.tif
19
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-18/czu-lightning-complex-fire/1050010019C2F600.tif
20
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-11-28/cameron-peak-fire20/103001009D72E000.tif
21
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-10/czu-lightning-complex-fire/105001001A3A8700.tif
22
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-28/lnu-lightning-complex-fire/10300100A1972700.tif
23
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2019-12-28/lnu-lightning-complex-fire/103001009F5D6B00.tif
24
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-01-15/cameron-peak-fire20/1040010057992100.tif
25
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-04-15/lnu-lightning-complex-fire/10300100A4B23600.tif
26
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-04-23/czu-lightning-complex-fire/10300100A589D100.tif
27
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-09/lnu-lightning-complex-fire/10300100A332EE00.tif
28
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-23/river-carmel-fires/10300100A77E9400.tif
29
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-23/river-carmel-fires/10300100A500A500.tif
30
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-05-24/river-carmel-fires/105001001D64E200.tif
31
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-27/lnu-lightning-complex-fire/10300100A8663800.tif
32
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/river-carmel-fires/10300100A9D60C00.tif
33
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/czu-lightning-complex-fire/10300100A8C66400.tif
34
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-06-30/czu-lightning-complex-fire/10300100A8892900.tif
35
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-11/czu-lightning-complex-fire/10300100AB381200.tif
36
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-11/czu-lightning-complex-fire/10300100AA180600.tif
37
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-13/pine-gulch-fire20/10300100AA57D700.tif
38
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-20/lnu-lightning-complex-fire/104001005C529000.tif
39
+ https://opendata.digitalglobe.com/events/california-fire-2020/pre-event/2020-07-28/pine-gulch-fire20/104001005DB06E00.tif
40
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-14/pine-gulch-fire20/10300100AAC8DD00.tif
41
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-16/pine-gulch-fire20/104001005D4A6100.tif
42
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-17/grizzly-creek-fire20/10300100ACCA3700.tif
43
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-17/cameron-peak-fire20/10300100AB4ED400.tif
44
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/swir-cog/104A0100606FFE00.tif
45
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100ACD06200.tif
46
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100AAD4A000.tif
47
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/pine-gulch-fire20/10300100AA293800.tif
48
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-20/lnu-lightning-complex-fire/10400100606FFE00.tif
49
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100ACBA2B00.tif
50
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100AA49F600.tif
51
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/lnu-lightning-complex-fire/104001005C1AC900.tif
52
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/104001005F9F5300.tif
53
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/104001005F453300.tif
54
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/river-carmel-fires/10300100ADC14400.tif
55
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-21/czu-lightning-complex-fire/104001005F43D400.tif
56
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-23/grizzly-creek-fire20/104001005FA09C00.tif
57
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-23/grizzly-creek-fire20/104001005DC71000.tif
58
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-26/river-carmel-fires/105001001F58F000.tif
59
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-26/lnu-lightning-complex-fire/10300100AC163A00.tif
60
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-29/river-carmel-fires/10300100AAD27500.tif
61
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-08-29/river-carmel-fires/10300100A9C75A00.tif
62
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/1040010060188800.tif
63
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/104001005F7E6500.tif
64
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-03/cameron-peak-fire20/10300100AE685A00.tif
65
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-09-04/cameron-peak-fire20/1040010060761C00.tif
66
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-05/cameron-peak-fire20/104001006113B700.tif
67
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-05/cameron-peak-fire20/10400100610CD400.tif
68
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/1040010062B14C00.tif
69
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100626BFA00.tif
70
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100622A6600.tif
71
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/10400100606B6300.tif
72
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-12/cameron-peak-fire20/104001005F908800.tif
73
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-15/cameron-peak-fire20/10500100205EDA00.tif
74
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-15/cameron-peak-fire20/10500100205ED900.tif
75
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100B0004A00.tif
76
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100AD0D1200.tif
77
+ https://opendata.digitalglobe.com/events/california-fire-2020/post-event/2020-10-22/east-troublesome-fire20/10300100AD0CA600.tif
data/html/sfo_buildings.html ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="utf-8">
5
+ <!-- Include the CesiumJS JavaScript and CSS files -->
6
+ <script src="https://cesium.com/downloads/cesiumjs/releases/1.88/Build/Cesium/Cesium.js"></script>
7
+ <link href="https://cesium.com/downloads/cesiumjs/releases/1.88/Build/Cesium/Widgets/widgets.css" rel="stylesheet">
8
+ </head>
9
+ <body>
10
+ <div id="cesiumContainer"></div>
11
+ <script>
12
+ // Your access token can be found at: https://cesium.com/ion/tokens.
13
+ // Replace `your_access_token` with your Cesium ion access token.
14
+
15
+ Cesium.Ion.defaultAccessToken = 'your_access_token';
16
+
17
+ // Initialize the Cesium Viewer in the HTML element with the `cesiumContainer` ID.
18
+ const viewer = new Cesium.Viewer('cesiumContainer', {
19
+ terrainProvider: Cesium.createWorldTerrain()
20
+ });
21
+ // Add Cesium OSM Buildings, a global 3D buildings layer.
22
+ const buildingTileset = viewer.scene.primitives.add(Cesium.createOsmBuildings());
23
+ // Fly the camera to San Francisco at the given longitude, latitude, and height.
24
+ viewer.camera.flyTo({
25
+ destination : Cesium.Cartesian3.fromDegrees(-122.4175, 37.655, 400),
26
+ orientation : {
27
+ heading : Cesium.Math.toRadians(0.0),
28
+ pitch : Cesium.Math.toRadians(-15.0),
29
+ }
30
+ });
31
+ </script>
32
+ </div>
33
+ </body>
34
+ </html>
data/realtor_data_dict.csv ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Name,Label,Description
2
+ median_listing_price,Median Listing Price,The median listing price within the specified geography during the specified month.
3
+ median_listing_price_mm,Median Listing Price M/M,The percentage change in the median listing price from the previous month.
4
+ median_listing_price_yy,Median Listing Price Y/Y,The percentage change in the median listing price from the same month in the previous year.
5
+ active_listing_count,Active Listing Count,"The count of active listings within the specified geography during the specified month. The active listing count tracks the number of for sale properties on the market, excluding pending listings where a pending status is available. This is a snapshot measure of how many active listings can be expected on any given day of the specified month."
6
+ active_listing_count_mm,Active Listing Count M/M,The percentage change in the active listing count from the previous month.
7
+ active_listing_count_yy,Active Listing Count Y/Y,The percentage change in the active listing count from the same month in the previous year.
8
+ median_days_on_market,Days on Market,The median number of days property listings spend on the market within the specified geography during the specified month. Time spent on the market is defined as the time between the initial listing of a property and either its closing date or the date it is taken off the market.
9
+ median_days_on_market_mm,Days on Market M/M,The percentage change in the median days on market from the previous month.
10
+ median_days_on_market_yy,Days on Market Y/Y,The percentage change in the median days on market from the same month in the previous year.
11
+ new_listing_count,New Listing Count,The count of new listings added to the market within the specified geography. The new listing count represents a typical week’s worth of new listings in a given month. The new listing count can be multiplied by the number of weeks in a month to produce a monthly new listing count.
12
+ new_listing_count_mm,New Listing Count M/M,The percentage change in the new listing count from the previous month.
13
+ new_listing_count_yy,New Listing Count Y/Y,The percentage change in the new listing count from the same month in the previous year.
14
+ price_increased_count,Price Increase Count,The count of listings which have had their price increased within the specified geography. The price increase count represents a typical week’s worth of listings which have had their price increased in a given month. The price increase count can be multiplied by the number of weeks in a month to produce a monthly price increase count.
15
+ price_increased_count_mm,Price Increase Count M/M,The percentage change in the price increase count from the previous month.
16
+ price_increased_count_yy,Price Increase Count Y/Y,The percentage change in the price increase count from the same month in the previous year.
17
+ price_reduced_count,Price Decrease Count,The count of listings which have had their price reduced within the specified geography. The price decrease count represents a typical week’s worth of listings which have had their price reduced in a given month. The price decrease count can be multiplied by the number of weeks in a month to produce a monthly price decrease count.
18
+ price_reduced_count_mm,Price Decrease Count M/M,The percentage change in the price decrease count from the previous month.
19
+ price_reduced_count_yy,Price Decrease Count Y/Y,The percentage change in the price decrease count from the same month in the previous year.
20
+ pending_listing_count,Pending Listing Count,"The count of pending listings within the specified geography during the specified month, if a pending definition is available for that geography. This is a snapshot measure of how many pending listings can be expected on any given day of the specified month."
21
+ pending_listing_count_mm,Pending Listing Count M/M,The percentage change in the pending listing count from the previous month.
22
+ pending_listing_count_yy,Pending Listing Count Y/Y,The percentage change in the pending listing count from the same month in the previous year.
23
+ median_listing_price_per_square_foot,Median List Price Per Sqft,The median listing price per square foot within the specified geography during the specified month.
24
+ median_listing_price_per_square_foot_mm,Median List Price Per Sqft M/M,The percentage change in the median listing price per square foot from the previous month.
25
+ median_listing_price_per_square_foot_yy,Median List Price Per Sqft Y/Y,The percentage change in the median listing price per square foot from the same month in the previous year.
26
+ median_square_feet,Median Listing Sqft,The median listing square feet within the specified geography during the specified month.
27
+ median_square_feet_mm,Median Listing Sqft M/M,The percentage change in the median listing square feet from the previous month.
28
+ median_square_feet_yy,Median Listing Sqft Y/Y,The percentage change in the median listing square feet from the same month in the previous year.
29
+ average_listing_price,Avg Listing Price,The average listing price within the specified geography during the specified month.
30
+ average_listing_price_mm,Avg Listing Price M/M,The percentage change in the average listing price from the previous month.
31
+ average_listing_price_yy,Avg Listing Price Y/Y,The percentage change in the average listing price from the same month in the previous year.
32
+ total_listing_count,Total Listing Count,The total of both active listings and pending listings within the specified geography during the specified month. This is a snapshot measure of how many total listings can be expected on any given day of the specified month.
33
+ total_listing_count_mm,Total Listing Count M/M,The percentage change in the total listing count from the previous month.
34
+ total_listing_count_yy,Total Listing Count Y/Y,The percentage change in the total listing count from the same month in the previous year.
35
+ pending_ratio,Pending Ratio,The ratio of the pending listing count to the active listing count within the specified geography during the specified month.
36
+ pending_ratio_mm,Pending Ratio M/M,The change in the pending ratio from the previous month.
37
+ pending_ratio_yy,Pending Ratio Y/Y,The change in the pending ratio from the same month in the previous year.
data/scotland_xyz.tsv ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Name URL
2
+ Ordnance Survey - Air Photos, 1944-1950 - 1:10,560 https://geo.nls.uk/maps/air-photos/{z}/{x}/{y}.png
3
+ Ordnance Survey - Six Inch Scotland, 1843-1882 - 1:10,560 https://mapseries-tilesets.s3.amazonaws.com/os/6inchfirst/{z}/{x}/{y}.png
4
+ War Office, Great Britain 1:25,000. GSGS 3906, 1940-43 https://mapseries-tilesets.s3.amazonaws.com/gsgs3906/{z}/{x}/{y}.png
5
+ Roy - Roy Highlands, 1747-1752 - 1:36000 https://mapseries-tilesets.s3.amazonaws.com/roy/highlands/{z}/{x}/{y}.png
6
+ Roy - Roy Lowlands, 1752-1755 - 1:36000 https://mapseries-tilesets.s3.amazonaws.com/roy/lowlands/{z}/{x}/{y}.png
7
+ Great Britain - OS 1:10,560, 1949-1970 https://mapseries-tilesets.s3.amazonaws.com/os/britain10knatgrid/{z}/{x}/{y}.png
8
+ Great Britain - Bartholomew Half Inch, 1897-1907 https://mapseries-tilesets.s3.amazonaws.com/bartholomew_great_britain/{z}/{x}/{y}.png
9
+ OS 25 inch, 1892-1914 - Scotland South https://mapseries-tilesets.s3.amazonaws.com/25_inch/scotland_1/{z}/{x}/{y}.png
10
+ OS 25 inch, 1892-1914 - Scotland North https://mapseries-tilesets.s3.amazonaws.com/25_inch/scotland_2/{z}/{x}/{y}.png
11
+ OS 25 inch, 1892-1914 - Bedfordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/bedfordshire/{z}/{x}/{y}.png
12
+ OS 25 inch, 1892-1914 - Berkshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/berkshire/{z}/{x}/{y}.png
13
+ OS 25 inch, 1892-1914 - Buckinghamshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/buckingham/{z}/{x}/{y}.png
14
+ OS 25 inch, 1892-1914 - Cambridgeshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/cambridge/{z}/{x}/{y}.png
15
+ OS 25 inch, 1892-1914 - Cheshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/cheshire/{z}/{x}/{y}.png
16
+ OS 25 inch, 1892-1914 - Cornwall https://mapseries-tilesets.s3.amazonaws.com/25_inch/cornwall/{z}/{x}/{y}.png
17
+ OS 25 inch, 1892-1914 - Cumberland https://mapseries-tilesets.s3.amazonaws.com/25_inch/cumberland/{z}/{x}/{y}.png
18
+ OS 25 inch, 1892-1914 - Devon https://mapseries-tilesets.s3.amazonaws.com/25_inch/devon/{z}/{x}/{y}.png
19
+ OS 25 inch, 1892-1914 - Dorset https://mapseries-tilesets.s3.amazonaws.com/25_inch/dorset/{z}/{x}/{y}.png
20
+ OS 25 inch, 1892-1914 - Durham https://mapseries-tilesets.s3.amazonaws.com/25_inch/durham/{z}/{x}/{y}.png
21
+ OS 25 inch, 1892-1914 - Essex https://mapseries-tilesets.s3.amazonaws.com/25_inch/essex/{z}/{x}/{y}.png
22
+ OS 25 inch, 1892-1914 - Gloucestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/gloucestershire/{z}/{x}/{y}.png
23
+ OS 25 inch, 1892-1914 - Hampshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/hampshire/{z}/{x}/{y}.png
24
+ OS 25 inch, 1892-1914 - Herefordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/herefordshire/{z}/{x}/{y}.png
25
+ OS 25 inch, 1892-1914 - Hertfordshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/hertfordshire/{z}/{x}/{y}.png
26
+ OS 25 inch, 1892-1914 - Huntingdon https://mapseries-tilesets.s3.amazonaws.com/25_inch/huntingdon/{z}/{x}/{y}.png
27
+ OS 25 inch, 1892-1914 - Kent https://mapseries-tilesets.s3.amazonaws.com/25_inch/kent/{z}/{x}/{y}.png
28
+ OS 25 inch, 1892-1914 - Lancashire https://mapseries-tilesets.s3.amazonaws.com/25_inch/lancashire/{z}/{x}/{y}.png
29
+ OS 25 inch, 1892-1914 - Leicestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/leicestershire/{z}/{x}/{y}.png
30
+ OS 25 inch, 1892-1914 - Lincolnshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/lincolnshire/{z}/{x}/{y}.png
31
+ OS 25 inch, 1892-1914 - London https://mapseries-tilesets.s3.amazonaws.com/25_inch/london/{z}/{x}/{y}.png
32
+ OS 25 inch, 1892-1914 - Middlesex https://mapseries-tilesets.s3.amazonaws.com/25_inch/middlesex/{z}/{x}/{y}.png
33
+ OS 25 inch, 1892-1914 - Norfolk https://mapseries-tilesets.s3.amazonaws.com/25_inch/norfolk/{z}/{x}/{y}.png
34
+ OS 25 inch, 1892-1914 - Northamptonshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/northampton/{z}/{x}/{y}.png
35
+ OS 25 inch, 1892-1914 - Northumberland https://mapseries-tilesets.s3.amazonaws.com/25_inch/northumberland/{z}/{x}/{y}.png
36
+ OS 25 inch, 1892-1914 - Nottinghamshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/nottinghamshire/{z}/{x}/{y}.png
37
+ OS 25 inch, 1892-1914 - Oxford https://mapseries-tilesets.s3.amazonaws.com/25_inch/oxford/{z}/{x}/{y}.png
38
+ OS 25 inch, 1892-1914 - Rutland https://mapseries-tilesets.s3.amazonaws.com/25_inch/rutland/{z}/{x}/{y}.png
39
+ OS 25 inch, 1892-1914 - Shropshire / Derbyshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/Shrop_Derby/{z}/{x}/{y}.png
40
+ OS 25 inch, 1892-1914 - Somerset https://mapseries-tilesets.s3.amazonaws.com/25_inch/somerset/{z}/{x}/{y}.png
41
+ OS 25 inch, 1892-1914 - Stafford https://mapseries-tilesets.s3.amazonaws.com/25_inch/stafford/{z}/{x}/{y}.png
42
+ OS 25 inch, 1892-1914 - Suffolk https://mapseries-tilesets.s3.amazonaws.com/25_inch/suffolk/{z}/{x}/{y}.png
43
+ OS 25 inch, 1892-1914 - Surrey https://mapseries-tilesets.s3.amazonaws.com/25_inch/surrey/{z}/{x}/{y}.png
44
+ OS 25 inch, 1892-1914 - Sussex https://mapseries-tilesets.s3.amazonaws.com/25_inch/sussex/{z}/{x}/{y}.png
45
+ OS 25 inch, 1892-1914 - Wales https://mapseries-tilesets.s3.amazonaws.com/25_inch/wales/{z}/{x}/{y}.png
46
+ OS 25 inch, 1892-1914 - Warwick https://mapseries-tilesets.s3.amazonaws.com/25_inch/warwick/{z}/{x}/{y}.png
47
+ OS 25 inch, 1892-1914 - Westmorland https://mapseries-tilesets.s3.amazonaws.com/25_inch/westmorland/{z}/{x}/{y}.png
48
+ OS 25 inch, 1892-1914 - Wiltshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/wiltshire2nd/{z}/{x}/{y}.png
49
+ OS 25 inch, 1892-1914 - Worcestershire https://mapseries-tilesets.s3.amazonaws.com/25_inch/Worcestershire/{z}/{x}/{y}.png
50
+ OS 25 inch, 1892-1914 - Yorkshire https://mapseries-tilesets.s3.amazonaws.com/25_inch/yorkshire/{z}/{x}/{y}.png
51
+ OS 25 inch, 1892-1914 'Holes' (fills gaps in series) https://geo.nls.uk/mapdata3/os/25_inch_holes_england/{z}/{x}/{y}.png
data/us_counties.geojson ADDED
The diff for this file is too large to render. See raw diff
 
data/us_metro_areas.geojson ADDED
The diff for this file is too large to render. See raw diff
 
data/us_nation.geojson ADDED
The diff for this file is too large to render. See raw diff
 
data/us_states.geojson ADDED
The diff for this file is too large to render. See raw diff
 
packages.txt ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ ffmpeg
2
+ gifsicle
3
+ build-essential
4
+ python3-dev
5
+ gdal-bin
6
+ libgdal-dev
7
+ libproj-dev
8
+ libgeos-dev
9
+ proj-bin
pages/10_🌍_Earth_Engine_Datasets.py ADDED
@@ -0,0 +1,162 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ee
2
+ import json
3
+ import streamlit as st
4
+ import geemap.foliumap as geemap
5
+
6
+ st.set_page_config(layout="wide")
7
+
8
+ st.sidebar.info(
9
+ """
10
+ - Web App URL: <https://streamlit.gishub.org>
11
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12
+ """
13
+ )
14
+
15
+ st.sidebar.title("Contact")
16
+ st.sidebar.info(
17
+ """
18
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19
+ """
20
+ )
21
+
22
+
23
+ def nlcd():
24
+
25
+ # st.header("National Land Cover Database (NLCD)")
26
+
27
+ row1_col1, row1_col2 = st.columns([3, 1])
28
+ width = 950
29
+ height = 600
30
+
31
+ Map = geemap.Map(center=[40, -100], zoom=4)
32
+
33
+ # Select the seven NLCD epoches after 2000.
34
+ years = ["2001", "2004", "2006", "2008", "2011", "2013", "2016", "2019"]
35
+
36
+ # Get an NLCD image by year.
37
+ def getNLCD(year):
38
+ # Import the NLCD collection.
39
+ dataset = ee.ImageCollection("USGS/NLCD_RELEASES/2019_REL/NLCD")
40
+
41
+ # Filter the collection by year.
42
+ nlcd = dataset.filter(ee.Filter.eq("system:index", year)).first()
43
+
44
+ # Select the land cover band.
45
+ landcover = nlcd.select("landcover")
46
+ return landcover
47
+
48
+ with row1_col2:
49
+ selected_year = st.multiselect("Select a year", years)
50
+ add_legend = st.checkbox("Show legend")
51
+
52
+ if selected_year:
53
+ for year in selected_year:
54
+ Map.addLayer(getNLCD(year), {}, "NLCD " + year)
55
+
56
+ if add_legend:
57
+ Map.add_legend(
58
+ legend_title="NLCD Land Cover Classification", builtin_legend="NLCD"
59
+ )
60
+ with row1_col1:
61
+ Map.to_streamlit(width=width, height=height)
62
+
63
+ else:
64
+ with row1_col1:
65
+ Map.to_streamlit(width=width, height=height)
66
+
67
+
68
+ def search_data():
69
+
70
+ # st.header("Search Earth Engine Data Catalog")
71
+
72
+ Map = geemap.Map()
73
+
74
+ if "ee_assets" not in st.session_state:
75
+ st.session_state["ee_assets"] = None
76
+ if "asset_titles" not in st.session_state:
77
+ st.session_state["asset_titles"] = None
78
+
79
+ col1, col2 = st.columns([2, 1])
80
+
81
+ dataset = None
82
+ with col2:
83
+ keyword = st.text_input("Enter a keyword to search (e.g., elevation)", "")
84
+ if keyword:
85
+ ee_assets = geemap.search_ee_data(keyword)
86
+ asset_titles = [x["title"] for x in ee_assets]
87
+ asset_types = [x["type"] for x in ee_assets]
88
+
89
+ translate = {
90
+ "image_collection": "ee.ImageCollection('",
91
+ "image": "ee.Image('",
92
+ "table": "ee.FeatureCollection('",
93
+ "table_collection": "ee.FeatureCollection('",
94
+ }
95
+
96
+ dataset = st.selectbox("Select a dataset", asset_titles)
97
+ if len(ee_assets) > 0:
98
+ st.session_state["ee_assets"] = ee_assets
99
+ st.session_state["asset_titles"] = asset_titles
100
+
101
+ if dataset is not None:
102
+ with st.expander("Show dataset details", True):
103
+ index = asset_titles.index(dataset)
104
+
105
+ html = geemap.ee_data_html(st.session_state["ee_assets"][index])
106
+ html = html.replace("\n", "")
107
+ st.markdown(html, True)
108
+
109
+ ee_id = ee_assets[index]["id"]
110
+ uid = ee_assets[index]["uid"]
111
+ st.markdown(f"""**Earth Engine Snippet:** `{ee_id}`""")
112
+ ee_asset = f"{translate[asset_types[index]]}{ee_id}')"
113
+
114
+ if ee_asset.startswith("ee.ImageCollection"):
115
+ ee_asset = ee.ImageCollection(ee_id)
116
+ elif ee_asset.startswith("ee.Image"):
117
+ ee_asset = ee.Image(ee_id)
118
+ elif ee_asset.startswith("ee.FeatureCollection"):
119
+ ee_asset = ee.FeatureCollection(ee_id)
120
+
121
+ vis_params = st.text_input(
122
+ "Enter visualization parameters as a dictionary", {}
123
+ )
124
+ layer_name = st.text_input("Enter a layer name", uid)
125
+ button = st.button("Add dataset to map")
126
+ if button:
127
+ vis = {}
128
+ try:
129
+ if vis_params.strip() == "":
130
+ # st.error("Please enter visualization parameters")
131
+ vis_params = "{}"
132
+ vis = json.loads(vis_params.replace("'", '"'))
133
+ if not isinstance(vis, dict):
134
+ st.error("Visualization parameters must be a dictionary")
135
+ try:
136
+ Map.addLayer(ee_asset, vis, layer_name)
137
+ except Exception as e:
138
+ st.error(f"Error adding layer: {e}")
139
+ except Exception as e:
140
+ st.error(f"Invalid visualization parameters: {e}")
141
+
142
+ with col1:
143
+ Map.to_streamlit()
144
+ else:
145
+ with col1:
146
+ Map.to_streamlit()
147
+
148
+
149
+ def app():
150
+ st.title("Earth Engine Data Catalog")
151
+
152
+ apps = ["Search Earth Engine Data Catalog", "National Land Cover Database (NLCD)"]
153
+
154
+ selected_app = st.selectbox("Select an app", apps)
155
+
156
+ if selected_app == "National Land Cover Database (NLCD)":
157
+ nlcd()
158
+ elif selected_app == "Search Earth Engine Data Catalog":
159
+ search_data()
160
+
161
+
162
+ app()
pages/12_🌲_VertXtractor.py ADDED
@@ -0,0 +1,443 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import folium
3
+ from streamlit_folium import st_folium
4
+ from folium.plugins import Draw
5
+ import geopandas as gpd
6
+ import tempfile
7
+ import os
8
+ import urllib.request
9
+ import json
10
+ from pathlib import Path
11
+ import datetime
12
+ from osgeo import gdal
13
+ import io
14
+ import zipfile
15
+ import base64
16
+ import concurrent.futures
17
+ import requests
18
+ from functools import partial
19
+
20
+ # Constants
21
+ CATEGORIES = {
22
+ 'Gebueschwald': 'Forêt buissonnante',
23
+ 'Wald': 'Forêt',
24
+ 'Wald offen': 'Forêt claisemée',
25
+ 'Gehoelzflaeche': 'Zone boisée',
26
+ }
27
+ MERGE_CATEGORIES = True
28
+
29
+ URL_STAC_SWISSTOPO_BASE = 'https://data.geo.admin.ch/api/stac/v0.9/collections/'
30
+
31
+ DIC_LAYERS = {
32
+ 'ortho': 'ch.swisstopo.swissimage-dop10',
33
+ 'mnt': 'ch.swisstopo.swissalti3d',
34
+ 'mns': 'ch.swisstopo.swisssurface3d-raster',
35
+ 'bati3D_v2': 'ch.swisstopo.swissbuildings3d_2',
36
+ 'bati3D_v3': 'ch.swisstopo.swissbuildings3d_3_0',
37
+ }
38
+
39
+ # Helper functions
40
+ def wgs84_to_lv95(lat, lon):
41
+ url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={lon}&northing={lat}&format=json'
42
+ with urllib.request.urlopen(url) as response:
43
+ data = json.load(response)
44
+ return data['easting'], data['northing']
45
+
46
+ def lv95_to_wgs84(x, y):
47
+ url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
48
+ with urllib.request.urlopen(url) as response:
49
+ data = json.load(response)
50
+ return data['northing'], data['easting']
51
+
52
+ def detect_and_convert_bbox(bbox):
53
+ xmin, ymin, xmax, ymax = bbox
54
+
55
+ wgs84_margin = 0.9
56
+ wgs84_bounds = {
57
+ 'xmin': 5.96 - wgs84_margin,
58
+ 'ymin': 45.82 - wgs84_margin,
59
+ 'xmax': 10.49 + wgs84_margin,
60
+ 'ymax': 47.81 + wgs84_margin
61
+ }
62
+
63
+ lv95_margin = 100000
64
+ lv95_bounds = {
65
+ 'xmin': 2485000 - lv95_margin,
66
+ 'ymin': 1075000 - lv95_margin,
67
+ 'xmax': 2834000 + lv95_margin,
68
+ 'ymax': 1296000 + lv95_margin
69
+ }
70
+
71
+ if (wgs84_bounds['xmin'] <= xmin <= wgs84_bounds['xmax'] and
72
+ wgs84_bounds['ymin'] <= ymin <= wgs84_bounds['ymax'] and
73
+ wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
74
+ wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
75
+
76
+ lv95_min = wgs84_to_lv95(ymin, xmin)
77
+ lv95_max = wgs84_to_lv95(ymax, xmax)
78
+
79
+ bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
80
+ return (bbox, bbox_lv95)
81
+
82
+ if (lv95_bounds['xmin'] <= xmin <= lv95_bounds['xmax'] and
83
+ lv95_bounds['ymin'] <= ymin <= lv95_bounds['ymax'] and
84
+ lv95_bounds['xmin'] <= xmax <= lv95_bounds['xmax'] and
85
+ lv95_bounds['ymin'] <= ymax <= lv95_bounds['ymax']):
86
+
87
+ wgs84_min = lv95_to_wgs84(xmin, ymin)
88
+ wgs84_max = lv95_to_wgs84(xmax, ymax)
89
+
90
+ bbox_wgs84 = (wgs84_min[1], wgs84_min[0], wgs84_max[1], wgs84_max[0])
91
+ return (bbox_wgs84, bbox)
92
+
93
+ return None
94
+
95
+ def get_list_from_STAC_swisstopo(url, est, sud, ouest, nord, gdb=False):
96
+ if gdb:
97
+ lst_indesirables = []
98
+ else:
99
+ lst_indesirables = ['.xyz.zip', '.gdb.zip']
100
+
101
+ sufixe_url = f"/items?bbox={est},{sud},{ouest},{nord}"
102
+ url += sufixe_url
103
+ res = []
104
+
105
+ while url:
106
+ with urllib.request.urlopen(url) as response:
107
+ json_res = json.load(response)
108
+ url = None
109
+ links = json_res.get('links', None)
110
+ if links:
111
+ for link in links:
112
+ if link['rel'] == 'next':
113
+ url = link['href']
114
+
115
+ for item in json_res['features']:
116
+ for k, dic in item['assets'].items():
117
+ href = dic['href']
118
+ if gdb:
119
+ if href[-8:] == '.gdb.zip':
120
+ if len(dic['href'].split('/')[-1].split('_')) == 7:
121
+ res.append(dic['href'])
122
+ else:
123
+ if href[-8:] not in lst_indesirables:
124
+ res.append(dic['href'])
125
+ return res
126
+
127
+ def suppr_doublons_bati3D_v2(lst_url):
128
+ dico = {}
129
+ dxf_files = [url for url in lst_url if url[-8:] == '.dxf.zip']
130
+ for dxf in dxf_files:
131
+ *a, date, feuille = dxf.split('/')[-2].split('_')
132
+ dico.setdefault(feuille, []).append((date, dxf))
133
+ res = []
134
+ for k, liste in dico.items():
135
+ res.append(sorted(liste, reverse=True)[0][1])
136
+ return res
137
+
138
+ def suppr_doublons_bati3D_v3(lst_url):
139
+ dico = {}
140
+ gdb_files = [url for url in lst_url if url[-8:] == '.gdb.zip']
141
+ for gdb in gdb_files:
142
+ *a, date, feuille = gdb.split('/')[-2].split('_')
143
+ dico.setdefault(feuille, []).append((date, gdb))
144
+ res = []
145
+ for k, liste in dico.items():
146
+ res.append(sorted(liste, reverse=True)[0][1])
147
+ return res
148
+
149
+ def suppr_doublons_list_ortho(lst):
150
+ dic = {}
151
+ for url in lst:
152
+ nom, an, noflle, taille_px, epsg = url.split('/')[-1][:-4].split('_')
153
+ dic.setdefault((noflle, float(taille_px)), []).append((an, url))
154
+ res = []
155
+ for noflle, lst in dic.items():
156
+ an, url = sorted(lst, reverse=True)[0]
157
+ res.append(url)
158
+ return res
159
+
160
+ def suppr_doublons_list_mnt(lst):
161
+ dic = {}
162
+ for url in lst:
163
+ nom, an, noflle, taille_px, epsg, inconnu = url.split('/')[-1][:-4].split('_')
164
+ dic.setdefault((noflle, float(taille_px)), []).append((an, url))
165
+ res = []
166
+ for noflle, lst in dic.items():
167
+ an, url = sorted(lst, reverse=True)[0]
168
+ res.append(url)
169
+ return res
170
+
171
+ @st.cache_data
172
+ def get_urls(bbox_wgs84, data_types, resolutions):
173
+ urls = []
174
+ with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
175
+ future_to_data_type = {
176
+ executor.submit(
177
+ get_urls_for_data_type,
178
+ data_type,
179
+ bbox_wgs84,
180
+ resolutions.get(data_type)
181
+ ): data_type for data_type, enabled in data_types.items() if enabled
182
+ }
183
+ for future in concurrent.futures.as_completed(future_to_data_type):
184
+ data_type = future_to_data_type[future]
185
+ try:
186
+ urls.extend(future.result())
187
+ except Exception as exc:
188
+ st.error(f"Error fetching URLs for {data_type}: {exc}")
189
+ return urls
190
+
191
+ def get_urls_for_data_type(data_type, bbox_wgs84, resolution=None):
192
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS[data_type]
193
+ if data_type in ['mnt', 'ortho']:
194
+ tri = f'_{resolution}_'
195
+ lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if tri in v]
196
+ if data_type == 'mnt':
197
+ return suppr_doublons_list_mnt(lst)
198
+ else:
199
+ return suppr_doublons_list_ortho(lst)
200
+ elif data_type == 'mns':
201
+ lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if 'raster' in v]
202
+ return suppr_doublons_list_mnt(lst)
203
+ elif data_type == 'bati3D_v2':
204
+ lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84)
205
+ return suppr_doublons_bati3D_v2(lst)
206
+ elif data_type == 'bati3D_v3':
207
+ lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84, gdb=True)
208
+ return suppr_doublons_bati3D_v3(lst)
209
+ return []
210
+
211
+ def fetch_url(url):
212
+ response = requests.get(url)
213
+ return response.content
214
+
215
+ def merge_ortho_images(urls, output_format='GTiff'):
216
+ try:
217
+ with tempfile.TemporaryDirectory() as temp_dir:
218
+ local_files = []
219
+ with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
220
+ future_to_url = {executor.submit(fetch_url, url): url for url in urls}
221
+ for i, future in enumerate(concurrent.futures.as_completed(future_to_url)):
222
+ url = future_to_url[future]
223
+ try:
224
+ data = future.result()
225
+ local_filename = os.path.join(temp_dir, f"ortho_{i}.tif")
226
+ with open(local_filename, 'wb') as f:
227
+ f.write(data)
228
+ local_files.append(local_filename)
229
+ except Exception as exc:
230
+ st.error(f"Error downloading {url}: {exc}")
231
+
232
+ if not local_files:
233
+ st.error("No ortho images were successfully downloaded.")
234
+ return None
235
+
236
+ vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=False)
237
+ vrt_path = os.path.join(temp_dir, "merged.vrt")
238
+ vrt = gdal.BuildVRT(vrt_path, local_files, options=vrt_options)
239
+ vrt = None # Close the dataset
240
+
241
+ output_path = os.path.join(temp_dir, f"merged.{output_format.lower()}")
242
+ if output_format == 'GTiff':
243
+ translate_options = gdal.TranslateOptions(format="GTiff", creationOptions=["COMPRESS=LZW", "TILED=YES"])
244
+ elif output_format == 'JPEG':
245
+ translate_options = gdal.TranslateOptions(format="JPEG", creationOptions=["QUALITY=85"])
246
+ elif output_format == 'PNG':
247
+ translate_options = gdal.TranslateOptions(format="PNG", creationOptions=["COMPRESS=DEFLATE"])
248
+ else:
249
+ st.error(f"Unsupported output format: {output_format}")
250
+ return None
251
+
252
+ gdal.Translate(output_path, vrt_path, options=translate_options)
253
+
254
+ if not os.path.exists(output_path):
255
+ st.error(f"Failed to create merged image: {output_path}")
256
+ return None
257
+
258
+ with open(output_path, 'rb') as f:
259
+ return f.read()
260
+ except Exception as e:
261
+ st.error(f"Error in merge_ortho_images: {e}")
262
+ return None
263
+
264
+ def create_geojson_with_links(urls, bbox):
265
+ features = []
266
+ for url in urls:
267
+ feature = {
268
+ "type": "Feature",
269
+ "geometry": {
270
+ "type": "Polygon",
271
+ "coordinates": [bbox]
272
+ },
273
+ "properties": {
274
+ "url": url,
275
+ "type": url.split('/')[-2].split('_')[0]
276
+ }
277
+ }
278
+ features.append(feature)
279
+
280
+ geojson = {
281
+ "type": "FeatureCollection",
282
+ "features": features
283
+ }
284
+ return json.dumps(geojson)
285
+
286
+ @st.cache_data
287
+ def prepare_download_package(urls, bbox, ortho_format):
288
+ geojson_data = create_geojson_with_links(urls, bbox)
289
+ ortho_urls = [url for url in urls if 'swissimage-dop10' in url]
290
+ ortho_data = merge_ortho_images(ortho_urls, ortho_format) if ortho_urls else None
291
+
292
+ zip_buffer = io.BytesIO()
293
+ with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
294
+ zip_file.writestr('download_links.geojson', geojson_data)
295
+ if ortho_data:
296
+ zip_file.writestr(f'merged_ortho.{ortho_format.lower()}', ortho_data)
297
+ else:
298
+ st.warning("Failed to merge ortho images. Only download links will be included in the package.")
299
+
300
+ return zip_buffer.getvalue()
301
+
302
+ def geojson_forest(bbox, fn_geojson):
303
+ xmin, ymin, xmax, ymax = bbox
304
+ url_base = 'https://hepiadata.hesge.ch/arcgis/rest/services/suisse/TLM_C4D_couverture_sol/FeatureServer/1/query?'
305
+
306
+ sql = ' OR '.join([f"OBJEKTART='{cat}'" for cat in CATEGORIES.keys()])
307
+
308
+ params = {
309
+ "geometry": f"{xmin},{ymin},{xmax},{ymax}",
310
+ "geometryType": "esriGeometryEnvelope",
311
+ "returnGeometry": "true",
312
+ "outFields": "OBJEKTART",
313
+ "orderByFields": "OBJEKTART",
314
+ "where": sql,
315
+ "returnZ": "true",
316
+ "outSR": '2056',
317
+ "spatialRel": "esriSpatialRelIntersects",
318
+ "f": "geojson"
319
+ }
320
+ query_string = urllib.parse.urlencode(params)
321
+ url = url_base + query_string
322
+
323
+ with urllib.request.urlopen(url) as response:
324
+ data = json.load(response)
325
+
326
+ with open(fn_geojson, 'w') as f:
327
+ json.dump(data, f)
328
+
329
+ # Streamlit app
330
+ st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
331
+
332
+ st.title("Swiss Geospatial Data Downloader")
333
+
334
+ # Sidebar for data selection
335
+ st.sidebar.header("Data Selection")
336
+ data_types = {
337
+ 'mnt': st.sidebar.checkbox("Digital Terrain Model (MNT)", value=True),
338
+ 'mns': st.sidebar.checkbox("Digital Surface Model (MNS)", value=True),
339
+ 'bati3D_v2': st.sidebar.checkbox("3D Buildings v2", value=True),
340
+ 'bati3D_v3': st.sidebar.checkbox("3D Buildings v3", value=True),
341
+ 'ortho': st.sidebar.checkbox("Orthophotos", value=True),
342
+ }
343
+
344
+ resolutions = {
345
+ 'mnt': st.sidebar.selectbox("MNT Resolution", [0.5, 2.0], index=0),
346
+ 'ortho': st.sidebar.selectbox("Orthophoto Resolution", [0.1, 2.0], index=0),
347
+ }
348
+
349
+ ortho_format = st.sidebar.selectbox("Ortho Output Format", ['GTiff', 'JPEG', 'PNG'], index=0)
350
+
351
+ # Main content area
352
+ st.subheader("Select Bounding Box")
353
+
354
+ # Create a map centered on Switzerland
355
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
356
+
357
+ # Add rectangle draw control
358
+ draw = Draw(
359
+ draw_options={
360
+ 'rectangle': True,
361
+ 'polygon': False,
362
+ 'polyline': False,
363
+ 'circle': False,
364
+ 'marker': False,
365
+ 'circlemarker': False
366
+ },
367
+ edit_options={'edit': False}
368
+ )
369
+ draw.add_to(m)
370
+
371
+ # Use st_folium to render the map and get the drawn bbox
372
+ output = st_folium(m, width=700, height=500)
373
+
374
+ # Initialize session state for bbox
375
+ if 'bbox' not in st.session_state:
376
+ st.session_state.bbox = [6.0, 46.0, 10.0, 47.0] # Default values for Switzerland
377
+
378
+ # Update bbox if a new one is drawn
379
+ if output['last_active_drawing']:
380
+ coordinates = output['last_active_drawing']['geometry']['coordinates'][0]
381
+ st.session_state.bbox = [
382
+ min(coord[0] for coord in coordinates),
383
+ min(coord[1] for coord in coordinates),
384
+ max(coord[0] for coord in coordinates),
385
+ max(coord[1] for coord in coordinates)
386
+ ]
387
+
388
+ # Display and allow editing of bounding box coordinates
389
+ st.subheader("Enter Bounding Box Coordinates")
390
+ col1, col2, col3, col4 = st.columns(4)
391
+ with col1:
392
+ xmin = st.number_input("Min Longitude", value=st.session_state.bbox[0], format="%.4f", key="xmin")
393
+ with col2:
394
+ ymin = st.number_input("Min Latitude", value=st.session_state.bbox[1], format="%.4f", key="ymin")
395
+ with col3:
396
+ xmax = st.number_input("Max Longitude", value=st.session_state.bbox[2], format="%.4f", key="xmax")
397
+ with col4:
398
+ ymax = st.number_input("Max Latitude", value=st.session_state.bbox[3], format="%.4f", key="ymax")
399
+
400
+ # Update session state if coordinates are manually changed
401
+ st.session_state.bbox = [xmin, ymin, xmax, ymax]
402
+
403
+ if st.session_state.bbox:
404
+ st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
405
+
406
+ bbox_results = detect_and_convert_bbox(st.session_state.bbox)
407
+
408
+ if bbox_results:
409
+ bbox_wgs84, bbox_lv95 = bbox_results
410
+ st.write(f"Converted bounding box (LV95): {bbox_lv95}")
411
+
412
+ if st.button("Get Download Package"):
413
+ with st.spinner("Preparing download package..."):
414
+ urls = get_urls(bbox_wgs84, data_types, resolutions)
415
+ if urls:
416
+ zip_data = prepare_download_package(urls, bbox_wgs84, ortho_format)
417
+ b64 = base64.b64encode(zip_data).decode()
418
+ href = f'<a href="data:application/zip;base64,{b64}" download="swiss_geospatial_data.zip">Download All Data</a>'
419
+ st.markdown(href, unsafe_allow_html=True)
420
+ st.success("Download package prepared. Click the link above to download.")
421
+ else:
422
+ st.warning("No files found for the selected area and options.")
423
+
424
+ if st.button("Download Forest Data"):
425
+ with st.spinner("Downloading forest data..."):
426
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.geojson') as tmp:
427
+ geojson_forest(bbox_lv95, tmp.name)
428
+ gdf = gpd.read_file(tmp.name)
429
+ st.write(gdf)
430
+
431
+ # Provide download link for forest data
432
+ with open(tmp.name, 'r') as f:
433
+ forest_data = f.read()
434
+ b64 = base64.b64encode(forest_data.encode()).decode()
435
+ href = f'<a href="data:application/json;base64,{b64}" download="forest_data.geojson">Download Forest Data</a>'
436
+ st.markdown(href, unsafe_allow_html=True)
437
+
438
+ os.unlink(tmp.name)
439
+ st.success("Forest data prepared. Click the link above to download.")
440
+ else:
441
+ st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
442
+
443
+ st.sidebar.info("This application allows you to download various types of geospatial data for Switzerland. Select the data types you want, draw a bounding box on the map, and click 'Get Download Package' to prepare all data for download.")
pages/1_🖼️_VertBox.py ADDED
@@ -0,0 +1,422 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import folium
3
+ from streamlit_folium import st_folium
4
+ from folium.plugins import Draw
5
+ import geopandas as gpd
6
+ import tempfile
7
+ import os
8
+ import urllib.request
9
+ import json
10
+ from pathlib import Path
11
+ import datetime
12
+ from osgeo import gdal
13
+ import io
14
+ import zipfile
15
+ import base64
16
+ import concurrent.futures
17
+ import requests
18
+ from functools import partial
19
+
20
+ # Constants
21
+ CATEGORIES = {
22
+ 'Gebueschwald': 'Forêt buissonnante',
23
+ 'Wald': 'Forêt',
24
+ 'Wald offen': 'Forêt claisemée',
25
+ 'Gehoelzflaeche': 'Zone boisée',
26
+ }
27
+ MERGE_CATEGORIES = True
28
+ URL_STAC_SWISSTOPO_BASE = 'https://data.geo.admin.ch/api/stac/v0.9/collections/'
29
+ DIC_LAYERS = {
30
+ 'ortho': 'ch.swisstopo.swissimage-dop10',
31
+ 'mnt': 'ch.swisstopo.swissalti3d',
32
+ 'mns': 'ch.swisstopo.swisssurface3d-raster',
33
+ 'bati3D_v2': 'ch.swisstopo.swissbuildings3d_2',
34
+ 'bati3D_v3': 'ch.swisstopo.swissbuildings3d_3_0',
35
+ }
36
+
37
+ # Helper functions
38
+ def wgs84_to_lv95(lat, lon):
39
+ url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={lon}&northing={lat}&format=json'
40
+ with urllib.request.urlopen(url) as response:
41
+ data = json.load(response)
42
+ return data['easting'], data['northing']
43
+
44
+ def lv95_to_wgs84(x, y):
45
+ url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
46
+ with urllib.request.urlopen(url) as response:
47
+ data = json.load(response)
48
+ return data['northing'], data['easting']
49
+
50
+ def detect_and_convert_bbox(bbox):
51
+ xmin, ymin, xmax, ymax = bbox
52
+ wgs84_margin = 0.9
53
+ wgs84_bounds = {
54
+ 'xmin': 5.96 - wgs84_margin,
55
+ 'ymin': 45.82 - wgs84_margin,
56
+ 'xmax': 10.49 + wgs84_margin,
57
+ 'ymax': 47.81 + wgs84_margin
58
+ }
59
+ lv95_margin = 100000
60
+ lv95_bounds = {
61
+ 'xmin': 2485000 - lv95_margin,
62
+ 'ymin': 1075000 - lv95_margin,
63
+ 'xmax': 2834000 + lv95_margin,
64
+ 'ymax': 1296000 + lv95_margin
65
+ }
66
+
67
+ if (wgs84_bounds['xmin'] <= xmin <= wgs84_bounds['xmax'] and
68
+ wgs84_bounds['ymin'] <= ymin <= wgs84_bounds['ymax'] and
69
+ wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
70
+ wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
71
+ lv95_min = wgs84_to_lv95(ymin, xmin)
72
+ lv95_max = wgs84_to_lv95(ymax, xmax)
73
+ bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
74
+ return (bbox, bbox_lv95)
75
+
76
+ if (lv95_bounds['xmin'] <= xmin <= lv95_bounds['xmax'] and
77
+ lv95_bounds['ymin'] <= ymin <= lv95_bounds['ymax'] and
78
+ lv95_bounds['xmin'] <= xmax <= lv95_bounds['xmax'] and
79
+ lv95_bounds['ymin'] <= ymax <= lv95_bounds['ymax']):
80
+ wgs84_min = lv95_to_wgs84(xmin, ymin)
81
+ wgs84_max = lv95_to_wgs84(xmax, ymax)
82
+ bbox_wgs84 = (wgs84_min[1], wgs84_min[0], wgs84_max[1], wgs84_max[0])
83
+ return (bbox_wgs84, bbox)
84
+
85
+ return None
86
+
87
+ def get_list_from_STAC_swisstopo(url, est, sud, ouest, nord, gdb=False):
88
+ lst_indesirables = [] if gdb else ['.xyz.zip', '.gdb.zip']
89
+ sufixe_url = f"/items?bbox={est},{sud},{ouest},{nord}"
90
+ url += sufixe_url
91
+ res = []
92
+
93
+ while url:
94
+ with urllib.request.urlopen(url) as response:
95
+ json_res = json.load(response)
96
+
97
+ url = None
98
+ links = json_res.get('links', None)
99
+ if links:
100
+ for link in links:
101
+ if link['rel'] == 'next':
102
+ url = link['href']
103
+
104
+ for item in json_res['features']:
105
+ for k, dic in item['assets'].items():
106
+ href = dic['href']
107
+ if gdb:
108
+ if href[-8:] == '.gdb.zip' and len(dic['href'].split('/')[-1].split('_')) == 7:
109
+ res.append(dic['href'])
110
+ else:
111
+ if href[-8:] not in lst_indesirables:
112
+ res.append(dic['href'])
113
+
114
+ return res
115
+
116
+ def suppr_doublons_bati3D_v2(lst_url):
117
+ dico = {}
118
+ dxf_files = [url for url in lst_url if url[-8:] == '.dxf.zip']
119
+ for dxf in dxf_files:
120
+ *a, date, feuille = dxf.split('/')[-2].split('_')
121
+ dico.setdefault(feuille, []).append((date, dxf))
122
+ res = []
123
+ for k, liste in dico.items():
124
+ res.append(sorted(liste, reverse=True)[0][1])
125
+ return res
126
+
127
+ def suppr_doublons_bati3D_v3(lst_url):
128
+ dico = {}
129
+ gdb_files = [url for url in lst_url if url[-8:] == '.gdb.zip']
130
+ for gdb in gdb_files:
131
+ *a, date, feuille = gdb.split('/')[-2].split('_')
132
+ dico.setdefault(feuille, []).append((date, gdb))
133
+ res = []
134
+ for k, liste in dico.items():
135
+ res.append(sorted(liste, reverse=True)[0][1])
136
+ return res
137
+
138
+ def suppr_doublons_list_ortho(lst):
139
+ dic = {}
140
+ for url in lst:
141
+ nom, an, noflle, taille_px, epsg = url.split('/')[-1][:-4].split('_')
142
+ dic.setdefault((noflle, float(taille_px)), []).append((an, url))
143
+ res = []
144
+ for noflle, lst in dic.items():
145
+ an, url = sorted(lst, reverse=True)[0]
146
+ res.append(url)
147
+ return res
148
+
149
+ def suppr_doublons_list_mnt(lst):
150
+ dic = {}
151
+ for url in lst:
152
+ nom, an, noflle, taille_px, epsg, inconnu = url.split('/')[-1][:-4].split('_')
153
+ dic.setdefault((noflle, float(taille_px)), []).append((an, url))
154
+ res = []
155
+ for noflle, lst in dic.items():
156
+ an, url = sorted(lst, reverse=True)[0]
157
+ res.append(url)
158
+ return res
159
+
160
+ @st.cache_data
161
+ def get_urls(bbox_wgs84, data_types, resolutions):
162
+ urls = []
163
+ with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
164
+ future_to_data_type = {
165
+ executor.submit(
166
+ get_urls_for_data_type,
167
+ data_type,
168
+ bbox_wgs84,
169
+ resolutions.get(data_type)
170
+ ): data_type
171
+ for data_type, enabled in data_types.items() if enabled
172
+ }
173
+ for future in concurrent.futures.as_completed(future_to_data_type):
174
+ data_type = future_to_data_type[future]
175
+ try:
176
+ urls.extend(future.result())
177
+ except Exception as exc:
178
+ st.error(f"Error fetching URLs for {data_type}: {exc}")
179
+ return urls
180
+
181
+ def get_urls_for_data_type(data_type, bbox_wgs84, resolution=None):
182
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS[data_type]
183
+ if data_type in ['mnt', 'ortho']:
184
+ tri = f'_{resolution}_'
185
+ lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if tri in v]
186
+ if data_type == 'mnt':
187
+ return suppr_doublons_list_mnt(lst)
188
+ else:
189
+ return suppr_doublons_list_ortho(lst)
190
+ elif data_type == 'mns':
191
+ lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if 'raster' in v]
192
+ return suppr_doublons_list_mnt(lst)
193
+ elif data_type == 'bati3D_v2':
194
+ lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84)
195
+ return suppr_doublons_bati3D_v2(lst)
196
+ elif data_type == 'bati3D_v3':
197
+ lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84, gdb=True)
198
+ return suppr_doublons_bati3D_v3(lst)
199
+ return []
200
+
201
+ def fetch_url(url):
202
+ response = requests.get(url)
203
+ return response.content
204
+
205
+ def merge_ortho_images(urls, output_format='GTiff'):
206
+ try:
207
+ with tempfile.TemporaryDirectory() as temp_dir:
208
+ local_files = []
209
+ with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
210
+ future_to_url = {executor.submit(fetch_url, url): url for url in urls}
211
+ for i, future in enumerate(concurrent.futures.as_completed(future_to_url)):
212
+ url = future_to_url[future]
213
+ try:
214
+ data = future.result()
215
+ local_filename = os.path.join(temp_dir, f"ortho_{i}.tif")
216
+ with open(local_filename, 'wb') as f:
217
+ f.write(data)
218
+ local_files.append(local_filename)
219
+ except Exception as exc:
220
+ st.error(f"Error downloading {url}: {exc}")
221
+
222
+ if not local_files:
223
+ st.error("No ortho images were successfully downloaded.")
224
+ return None
225
+
226
+ vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=False)
227
+ vrt_path = os.path.join(temp_dir, "merged.vrt")
228
+ vrt = gdal.BuildVRT(vrt_path, local_files, options=vrt_options)
229
+ vrt = None # Close the dataset
230
+
231
+ output_path = os.path.join(temp_dir, f"merged.{output_format.lower()}")
232
+ if output_format == 'GTiff':
233
+ translate_options = gdal.TranslateOptions(format="GTiff", creationOptions=["COMPRESS=LZW", "TILED=YES"])
234
+ elif output_format == 'JPEG':
235
+ translate_options = gdal.TranslateOptions(format="JPEG", creationOptions=["QUALITY=85"])
236
+ elif output_format == 'PNG':
237
+ translate_options = gdal.TranslateOptions(format="PNG", creationOptions=["COMPRESS=DEFLATE"])
238
+ else:
239
+ st.error(f"Unsupported output format: {output_format}")
240
+ return None
241
+
242
+ gdal.Translate(output_path, vrt_path, options=translate_options)
243
+
244
+ if not os.path.exists(output_path):
245
+ st.error(f"Failed to create merged image: {output_path}")
246
+ return None
247
+
248
+ with open(output_path, 'rb') as f:
249
+ return f.read()
250
+ except Exception as e:
251
+ st.error(f"Error in merge_ortho_images: {e}")
252
+ return None
253
+
254
+ def create_geojson_with_links(urls, bbox):
255
+ features = []
256
+ for url in urls:
257
+ feature = {
258
+ "type": "Feature",
259
+ "geometry": {
260
+ "type": "Polygon",
261
+ "coordinates": [bbox]
262
+ },
263
+ "properties": {
264
+ "url": url,
265
+ "type": url.split('/')[-2].split('_')[0]
266
+ }
267
+ }
268
+ features.append(feature)
269
+
270
+ geojson = {
271
+ "type": "FeatureCollection",
272
+ "features": features
273
+ }
274
+ return json.dumps(geojson)
275
+
276
+ @st.cache_data
277
+ def prepare_download_package(urls, bbox, ortho_format):
278
+ geojson_data = create_geojson_with_links(urls, bbox)
279
+ ortho_urls = [url for url in urls if 'swissimage-dop10' in url]
280
+ ortho_data = merge_ortho_images(ortho_urls, ortho_format) if ortho_urls else None
281
+
282
+ zip_buffer = io.BytesIO()
283
+ with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
284
+ zip_file.writestr('download_links.geojson', geojson_data)
285
+ if ortho_data:
286
+ zip_file.writestr(f'merged_ortho.{ortho_format.lower()}', ortho_data)
287
+ else:
288
+ st.warning("Failed to merge ortho images. Only download links will be included in the package.")
289
+
290
+ return zip_buffer.getvalue()
291
+
292
+ def geojson_forest(bbox, fn_geojson):
293
+ xmin, ymin, xmax, ymax = bbox
294
+ url_base = 'https://hepiadata.hesge.ch/arcgis/rest/services/suisse/TLM_C4D_couverture_sol/FeatureServer/1/query?'
295
+ sql = ' OR '.join([f"OBJEKTART='{cat}'" for cat in CATEGORIES.keys()])
296
+ params = {
297
+ "geometry": f"{xmin},{ymin},{xmax},{ymax}",
298
+ "geometryType": "esriGeometryEnvelope",
299
+ "returnGeometry": "true",
300
+ "outFields": "OBJEKTART",
301
+ "orderByFields": "OBJEKTART",
302
+ "where": sql,
303
+ "returnZ": "true",
304
+ "outSR": '2056',
305
+ "spatialRel": "esriSpatialRelIntersects",
306
+ "f": "geojson"
307
+ }
308
+ query_string = urllib.parse.urlencode(params)
309
+ url = url_base + query_string
310
+ with urllib.request.urlopen(url) as response:
311
+ data = json.load(response)
312
+ with open(fn_geojson, 'w') as f:
313
+ json.dump(data, f)
314
+
315
+ # Streamlit app
316
+ st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
317
+ st.title("Swiss Geospatial Data Downloader")
318
+
319
+ # Sidebar for data selection
320
+ st.sidebar.header("Data Selection")
321
+ data_types = {
322
+ 'mnt': st.sidebar.checkbox("Digital Terrain Model (MNT)", value=True),
323
+ 'mns': st.sidebar.checkbox("Digital Surface Model (MNS)", value=True),
324
+ 'bati3D_v2': st.sidebar.checkbox("3D Buildings v2", value=True),
325
+ 'bati3D_v3': st.sidebar.checkbox("3D Buildings v3", value=True),
326
+ 'ortho': st.sidebar.checkbox("Orthophotos", value=True),
327
+ }
328
+ resolutions = {
329
+ 'mnt': st.sidebar.selectbox("MNT Resolution", [0.5, 2.0], index=0),
330
+ 'ortho': st.sidebar.selectbox("Orthophoto Resolution", [0.1, 2.0], index=0),
331
+ }
332
+ ortho_format = st.sidebar.selectbox("Ortho Output Format", ['GTiff', 'JPEG', 'PNG'], index=0)
333
+
334
+ # Main content area
335
+ st.subheader("Select Bounding Box")
336
+
337
+ # Create a map centered on Switzerland
338
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
339
+
340
+ # Add rectangle draw control
341
+ draw = Draw(
342
+ draw_options={
343
+ 'rectangle': True,
344
+ 'polygon': False,
345
+ 'polyline': False,
346
+ 'circle': False,
347
+ 'marker': False,
348
+ 'circlemarker': False
349
+ },
350
+ edit_options={'edit': False}
351
+ )
352
+ draw.add_to(m)
353
+
354
+ # Use st_folium to render the map and get the drawn bbox
355
+ output = st_folium(m, width=700, height=500)
356
+
357
+ # Initialize session state for bbox
358
+ if 'bbox' not in st.session_state:
359
+ st.session_state.bbox = [6.0, 46.0, 10.0, 47.0] # Default values for Switzerland
360
+
361
+ # Update bbox if a new one is drawn
362
+ if output['last_active_drawing']:
363
+ coordinates = output['last_active_drawing']['geometry']['coordinates'][0]
364
+ st.session_state.bbox = [
365
+ min(coord[0] for coord in coordinates),
366
+ min(coord[1] for coord in coordinates),
367
+ max(coord[0] for coord in coordinates),
368
+ max(coord[1] for coord in coordinates)
369
+ ]
370
+
371
+ # Display and allow editing of bounding box coordinates
372
+ st.subheader("Enter Bounding Box Coordinates")
373
+ col1, col2, col3, col4 = st.columns(4)
374
+ with col1:
375
+ xmin = st.number_input("Min Longitude", value=st.session_state.bbox[0], format="%.4f", key="xmin")
376
+ with col2:
377
+ ymin = st.number_input("Min Latitude", value=st.session_state.bbox[1], format="%.4f", key="ymin")
378
+ with col3:
379
+ xmax = st.number_input("Max Longitude", value=st.session_state.bbox[2], format="%.4f", key="xmax")
380
+ with col4:
381
+ ymax = st.number_input("Max Latitude", value=st.session_state.bbox[3], format="%.4f", key="ymax")
382
+
383
+ # Update session state if coordinates are manually changed
384
+ st.session_state.bbox = [xmin, ymin, xmax, ymax]
385
+
386
+ if st.session_state.bbox:
387
+ st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
388
+ bbox_results = detect_and_convert_bbox(st.session_state.bbox)
389
+ if bbox_results:
390
+ bbox_wgs84, bbox_lv95 = bbox_results
391
+ st.write(f"Converted bounding box (LV95): {bbox_lv95}")
392
+
393
+ if st.button("Get Download Package"):
394
+ with st.spinner("Preparing download package..."):
395
+ urls = get_urls(bbox_wgs84, data_types, resolutions)
396
+ if urls:
397
+ zip_data = prepare_download_package(urls, bbox_wgs84, ortho_format)
398
+ b64 = base64.b64encode(zip_data).decode()
399
+ href = f'<a href="data:application/zip;base64,{b64}" download="swiss_geospatial_data.zip">Download All Data</a>'
400
+ st.markdown(href, unsafe_allow_html=True)
401
+ st.success("Download package prepared. Click the link above to download.")
402
+ else:
403
+ st.warning("No files found for the selected area and options.")
404
+
405
+ if st.button("Download Forest Data"):
406
+ with st.spinner("Downloading forest data..."):
407
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.geojson') as tmp:
408
+ geojson_forest(bbox_lv95, tmp.name)
409
+ gdf = gpd.read_file(tmp.name)
410
+ st.write(gdf)
411
+ # Provide download link for forest data
412
+ with open(tmp.name, 'r') as f:
413
+ forest_data = f.read()
414
+ b64 = base64.b64encode(forest_data.encode()).decode()
415
+ href = f'<a href="data:application/json;base64,{b64}" download="forest_data.geojson">Download Forest Data</a>'
416
+ st.markdown(href, unsafe_allow_html=True)
417
+ os.unlink(tmp.name)
418
+ st.success("Forest data prepared. Click the link above to download.")
419
+ else:
420
+ st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
421
+
422
+ st.sidebar.info("This application allows you to download various types of geospatial data for Switzerland. Select the data types you want, draw a bounding box on the map, and click 'Get Download Package' to prepare all data for download.")
pages/3_🪟_Split_Map.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.info(
7
+ """
8
+ - Web App URL: <https://streamlit.gishub.org>
9
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10
+ """
11
+ )
12
+
13
+ st.sidebar.title("Contact")
14
+ st.sidebar.info(
15
+ """
16
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17
+ """
18
+ )
19
+
20
+ st.title("Split-panel Map")
21
+
22
+ with st.expander("See source code"):
23
+ with st.echo():
24
+ m = leafmap.Map()
25
+ m.split_map(
26
+ left_layer="ESA WorldCover 2020 S2 FCC", right_layer="ESA WorldCover 2020"
27
+ )
28
+ m.add_legend(title="ESA Land Cover", builtin_legend="ESA_WorldCover")
29
+
30
+ m.to_streamlit(height=700)
pages/4_🔥_Heatmap.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.info(
7
+ """
8
+ - Web App URL: <https://streamlit.gishub.org>
9
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10
+ """
11
+ )
12
+
13
+ st.sidebar.title("Contact")
14
+ st.sidebar.info(
15
+ """
16
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17
+ """
18
+ )
19
+
20
+ st.title("Heatmap")
21
+
22
+ with st.expander("See source code"):
23
+ with st.echo():
24
+ filepath = "https://raw.githubusercontent.com/giswqs/leafmap/master/examples/data/us_cities.csv"
25
+ m = leafmap.Map(center=[40, -100], zoom=4)
26
+ m.add_heatmap(
27
+ filepath,
28
+ latitude="latitude",
29
+ longitude="longitude",
30
+ value="pop_max",
31
+ name="Heat map",
32
+ radius=20,
33
+ )
34
+ m.to_streamlit(height=700)
pages/5_📍_VertXtractor.py ADDED
@@ -0,0 +1,320 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import urllib
3
+ import json
4
+ from pathlib import Path
5
+ import datetime
6
+ import folium
7
+ from streamlit_folium import folium_static
8
+ import geopandas as gpd
9
+ import tempfile
10
+ import os
11
+
12
+ # Constants
13
+ CATEGORIES = {
14
+ 'Gebueschwald': 'Forêt buissonnante',
15
+ 'Wald': 'Forêt',
16
+ 'Wald offen': 'Forêt claisemée',
17
+ 'Gehoelzflaeche': 'Zone boisée',
18
+ }
19
+
20
+ MERGE_CATEGORIES = True
21
+ URL_STAC_SWISSTOPO_BASE = 'https://data.geo.admin.ch/api/stac/v0.9/collections/'
22
+ DIC_LAYERS = {
23
+ 'ortho': 'ch.swisstopo.swissimage-dop10',
24
+ 'mnt': 'ch.swisstopo.swissalti3d',
25
+ 'mns': 'ch.swisstopo.swisssurface3d-raster',
26
+ 'bati3D_v2': 'ch.swisstopo.swissbuildings3d_2',
27
+ 'bati3D_v3': 'ch.swisstopo.swissbuildings3d_3_0',
28
+ }
29
+
30
+ # Functions
31
+ def wgs84_to_lv95(lat, lon):
32
+ url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={lat}&northing={lon}&format=json'
33
+ site = urllib.request.urlopen(url)
34
+ data = json.load(site)
35
+ return data['easting'], data['northing']
36
+
37
+ def lv95_to_wgs84(x, y):
38
+ url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
39
+ f = urllib.request.urlopen(url)
40
+ txt = f.read().decode('utf-8')
41
+ json_res = json.loads(txt)
42
+ return json_res
43
+
44
+ def detect_and_convert_bbox(bbox):
45
+ xmin, ymin, xmax, ymax = bbox
46
+ wgs84_margin = 0.9
47
+ wgs84_bounds = {
48
+ 'xmin': 5.96 - wgs84_margin,
49
+ 'ymin': 45.82 - wgs84_margin,
50
+ 'xmax': 10.49 + wgs84_margin,
51
+ 'ymax': 47.81 + wgs84_margin
52
+ }
53
+ lv95_margin = 100000
54
+ lv95_bounds = {
55
+ 'xmin': 2485000 - lv95_margin,
56
+ 'ymin': 1075000 - lv95_margin,
57
+ 'xmax': 2834000 + lv95_margin,
58
+ 'ymax': 1296000 + lv95_margin
59
+ }
60
+
61
+ if (wgs84_bounds['xmin'] <= xmin <= wgs84_bounds['xmax'] and
62
+ wgs84_bounds['ymin'] <= ymin <= wgs84_bounds['ymax'] and
63
+ wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
64
+ wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
65
+ lv95_min = wgs84_to_lv95(xmin, ymin)
66
+ lv95_max = wgs84_to_lv95(xmax, ymax)
67
+ bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
68
+ return (bbox, bbox_lv95)
69
+
70
+ if (lv95_bounds['xmin'] <= xmin <= lv95_bounds['xmax'] and
71
+ lv95_bounds['ymin'] <= ymin <= lv95_bounds['ymax'] and
72
+ lv95_bounds['xmin'] <= xmax <= lv95_bounds['xmax'] and
73
+ lv95_bounds['ymin'] <= ymax <= lv95_bounds['ymax']):
74
+ wgs84_min = lv95_to_wgs84(xmin, ymin)
75
+ wgs84_max = lv95_to_wgs84(xmax, ymax)
76
+ bbox_wgs84 = (wgs84_min['easting'], wgs84_min['northing'], wgs84_max['easting'], wgs84_max['northing'])
77
+ return (bbox_wgs84, bbox)
78
+
79
+ return None
80
+
81
+ def get_list_from_STAC_swisstopo(url, est, sud, ouest, nord, gdb=False):
82
+ if gdb:
83
+ lst_indesirables = []
84
+ else:
85
+ lst_indesirables = ['.xyz.zip', '.gdb.zip']
86
+
87
+ sufixe_url = f"/items?bbox={est},{sud},{ouest},{nord}"
88
+ url += sufixe_url
89
+ res = []
90
+
91
+ while url:
92
+ f = urllib.request.urlopen(url)
93
+ txt = f.read().decode('utf-8')
94
+ json_res = json.loads(txt)
95
+ url = None
96
+ links = json_res.get('links', None)
97
+ if links:
98
+ for link in links:
99
+ if link['rel'] == 'next':
100
+ url = link['href']
101
+ for item in json_res['features']:
102
+ for k, dic in item['assets'].items():
103
+ href = dic['href']
104
+ if gdb:
105
+ if href[-8:] == '.gdb.zip':
106
+ if len(dic['href'].split('/')[-1].split('_')) == 7:
107
+ res.append(dic['href'])
108
+ else:
109
+ if href[-8:] not in lst_indesirables:
110
+ res.append(dic['href'])
111
+ return res
112
+
113
+ def suppr_doublons_list_ortho(lst):
114
+ dic = {}
115
+ for url in lst:
116
+ nom, an, noflle, taille_px, epsg = url.split('/')[-1][:-4].split('_')
117
+ dic.setdefault((noflle, float(taille_px)), []).append((an, url))
118
+ res = []
119
+ for noflle, lst in dic.items():
120
+ an, url = sorted(lst, reverse=True)[0]
121
+ res.append(url)
122
+ return res
123
+
124
+ def get_urls(bbox_wgs84, mnt=True, mns=True, bati3D_v2=True, bati3D_v3=True, ortho=True, mnt_resol=0.5, ortho_resol=0.1):
125
+ est, sud, ouest, nord = bbox_wgs84
126
+ urls = []
127
+
128
+ if mnt:
129
+ mnt_resol = 0.5 if mnt_resol < 2 else 2
130
+ tri = f'_{mnt_resol}_'
131
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['mnt']
132
+ lst = [v for v in get_list_from_STAC_swisstopo(url, est, sud, ouest, nord) if tri in v]
133
+ urls += lst
134
+
135
+ if mns:
136
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['mns']
137
+ lst = [v for v in get_list_from_STAC_swisstopo(url, est, sud, ouest, nord) if 'raster' in v]
138
+ urls += lst
139
+
140
+ if bati3D_v2:
141
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['bati3D_v2']
142
+ lst = get_list_from_STAC_swisstopo(url, est, sud, ouest, nord)
143
+ urls += lst
144
+
145
+ if bati3D_v3:
146
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['bati3D_v3']
147
+ lst = get_list_from_STAC_swisstopo(url, est, sud, ouest, nord, gdb=True)
148
+ urls += lst
149
+
150
+ if ortho:
151
+ ortho_resol = 0.1 if ortho_resol < 2 else 2
152
+ tri = f'_{ortho_resol}_'
153
+ url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['ortho']
154
+ lst = [v for v in get_list_from_STAC_swisstopo(url, est, sud, ouest, nord) if tri in v and v.endswith('.png')]
155
+ lst = suppr_doublons_list_ortho(lst)
156
+ urls += lst
157
+
158
+ return urls
159
+
160
+ def classification_urls(urls):
161
+ dic = {}
162
+ for url in urls:
163
+ fn = url.split('/')[-1]
164
+ dirname = fn.split('_')[0]
165
+
166
+ if dirname == 'swissbuildings3d':
167
+ name, version, *a = fn.split('_')
168
+ if version == '2':
169
+ an = fn.split('_')[2].split('-')[0]
170
+ elif version == '3':
171
+ an = fn.split('_')[3]
172
+ dirname = f'{name}_v{version}_{an}'
173
+ elif dirname == 'swissalti3d':
174
+ name, an, no_flle, resol, *a = fn.split('_')
175
+ if resol == '0.5':
176
+ resol = '50cm'
177
+ elif resol == '2':
178
+ resol = '2m'
179
+ dirname = f'{name}_{an}_{resol}'
180
+ elif dirname == 'swisssurface3d-raster':
181
+ name, an, no_flle, resol, *a = fn.split('_')
182
+ if resol == '0.5':
183
+ resol = '50cm'
184
+ dirname = f'{name}_{an}_{resol}'
185
+ elif dirname == 'swissimage-dop10':
186
+ name, an, no_flle, resol, *a = fn.split('_')
187
+ if resol == '0.1':
188
+ resol = '10cm'
189
+ elif resol == '2':
190
+ resol = '2m'
191
+ dirname = f'{name}_{an}_{resol}_png'
192
+
193
+ dic.setdefault(dirname, []).append((url, fn))
194
+ return dic
195
+
196
+ def download_files(urls, path):
197
+ now = datetime.datetime.now()
198
+ path = Path(path) / f'swisstopo_extraction_{now.strftime("%Y%m%d_%H%M")}'
199
+ path.mkdir(exist_ok=True)
200
+ for k, v in classification_urls(urls).items():
201
+ p = path / k
202
+ p.mkdir(exist_ok=True)
203
+ for url, fn in v:
204
+ urllib.request.urlretrieve(url, p / fn)
205
+ return path
206
+
207
+ def geojson_forest(bbox, fn_geojson):
208
+ xmin, ymin, xmax, ymax = bbox
209
+ url_base = 'https://hepiadata.hesge.ch/arcgis/rest/services/suisse/TLM_C4D_couverture_sol/FeatureServer/1/query?'
210
+ sql = ' OR '.join([f"OBJEKTART='{cat}'" for cat in CATEGORIES.keys()])
211
+ params = {
212
+ "geometry": f"{xmin},{ymin},{xmax},{ymax}",
213
+ "geometryType": "esriGeometryEnvelope",
214
+ "returnGeometry": "true",
215
+ "outFields": "OBJEKTART",
216
+ "orderByFields": "OBJEKTART",
217
+ "where": sql,
218
+ "returnZ": "true",
219
+ "outSR": '2056',
220
+ "spatialRel": "esriSpatialRelIntersects",
221
+ "f": "geojson"
222
+ }
223
+ query_string = urllib.parse.urlencode(params)
224
+ url = url_base + query_string
225
+ with urllib.request.urlopen(url) as response:
226
+ response_data = response.read()
227
+ data = json.loads(response_data)
228
+ with open(fn_geojson, 'w') as f:
229
+ json.dump(data, f)
230
+
231
+ # Streamlit app
232
+ st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
233
+ st.title("Swiss Geospatial Data Downloader")
234
+
235
+ # Sidebar for data selection
236
+ st.sidebar.header("Data Selection")
237
+ mnt = st.sidebar.checkbox("Digital Terrain Model (MNT)", value=True)
238
+ mns = st.sidebar.checkbox("Digital Surface Model (MNS)", value=True)
239
+ bati3D_v2 = st.sidebar.checkbox("3D Buildings v2", value=True)
240
+ bati3D_v3 = st.sidebar.checkbox("3D Buildings v3", value=True)
241
+ ortho = st.sidebar.checkbox("Orthophotos", value=True)
242
+ mnt_resol = st.sidebar.selectbox("MNT Resolution", [0.5, 2.0], index=0)
243
+ ortho_resol = st.sidebar.selectbox("Orthophoto Resolution", [0.1, 2.0], index=0)
244
+
245
+ # Main content area
246
+ st.subheader("Enter Bounding Box Coordinates")
247
+ col1, col2, col3, col4 = st.columns(4)
248
+ with col1:
249
+ xmin = st.number_input("Min Longitude", value=6.0, step=0.1)
250
+ with col2:
251
+ ymin = st.number_input("Min Latitude", value=46.0, step=0.1)
252
+ with col3:
253
+ xmax = st.number_input("Max Longitude", value=10.0, step=0.1)
254
+ with col4:
255
+ ymax = st.number_input("Max Latitude", value=47.0, step=0.1)
256
+
257
+ if st.button("Set Bounding Box"):
258
+ st.session_state.bbox = [xmin, ymin, xmax, ymax]
259
+
260
+ if 'bbox' in st.session_state:
261
+ st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
262
+ bbox_results = detect_and_convert_bbox(st.session_state.bbox)
263
+ if bbox_results:
264
+ bbox_wgs84, bbox_lv95 = bbox_results
265
+ st.write(f"Converted bounding box (LV95): {bbox_lv95}")
266
+
267
+ if st.button("Get Download Links"):
268
+ with st.spinner("Fetching download links..."):
269
+ urls = get_urls(bbox_wgs84, mnt, mns, bati3D_v2, bati3D_v3, ortho, mnt_resol, ortho_resol)
270
+ if urls:
271
+ st.success(f"Found {len(urls)} files to download:")
272
+ for url in urls:
273
+ st.write(url)
274
+
275
+ if st.button("Download Files"):
276
+ with st.spinner("Downloading files..."):
277
+ download_path = download_files(urls, "downloads")
278
+ st.success(f"Files downloaded to: {download_path}")
279
+ else:
280
+ st.warning("No files found for the selected area and options.")
281
+
282
+ # Option to download forest data
283
+ if st.button("Download Forest Data"):
284
+ with st.spinner("Downloading forest data..."):
285
+ with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.geojson') as tmp:
286
+ geojson_forest(bbox_lv95, tmp.name)
287
+ gdf = gpd.read_file(tmp.name)
288
+ st.write(gdf)
289
+
290
+ # Display the forest data on a map
291
+ m = folium.Map(location=[(ymin + ymax) / 2, (xmin + xmax) / 2], zoom_start=10)
292
+ folium.GeoJson(gdf).add_to(m)
293
+ folium_static(m)
294
+
295
+ # Option to download the GeoJSON file
296
+ st.download_button(
297
+ label="Download Forest GeoJSON",
298
+ data=gdf.to_json(),
299
+ file_name="forest_data.geojson",
300
+ mime="application/json"
301
+ )
302
+
303
+ os.unlink(tmp.name)
304
+ st.success("Forest data downloaded, displayed, and available for download.")
305
+
306
+ else:
307
+ st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
308
+
309
+ # Add information about the app in the sidebar
310
+ st.sidebar.info("""
311
+ This application allows you to download various types of geospatial data for Switzerland.
312
+ Select the data types you want, enter the bounding box coordinates, and click 'Get Download Links' to see available files.
313
+ You can also download forest data for the selected area.
314
+ """)
315
+
316
+ # Add a footer
317
+ st.markdown("""
318
+ ---
319
+ Created with ❤️ by Your Name
320
+ """)
pages/6_🗺️_Basemaps.py ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import leafmap.foliumap as leafmap
3
+
4
+ st.set_page_config(layout="wide")
5
+
6
+ st.sidebar.info(
7
+ """
8
+ - Web App URL: <https://streamlit.gishub.org>
9
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
10
+ """
11
+ )
12
+
13
+ st.sidebar.title("Contact")
14
+ st.sidebar.info(
15
+ """
16
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
17
+ """
18
+ )
19
+
20
+
21
+ def app():
22
+ st.title("Search Basemaps")
23
+ st.markdown(
24
+ """
25
+ This app is a demonstration of searching and loading basemaps from [xyzservices](https://github.com/geopandas/xyzservices) and [Quick Map Services (QMS)](https://github.com/nextgis/quickmapservices). Selecting from 1000+ basemaps with a few clicks.
26
+ """
27
+ )
28
+
29
+ with st.expander("See demo"):
30
+ st.image("https://i.imgur.com/0SkUhZh.gif")
31
+
32
+ row1_col1, row1_col2 = st.columns([3, 1])
33
+ width = 800
34
+ height = 600
35
+ tiles = None
36
+
37
+ with row1_col2:
38
+
39
+ checkbox = st.checkbox("Search Quick Map Services (QMS)")
40
+ keyword = st.text_input("Enter a keyword to search and press Enter:")
41
+ empty = st.empty()
42
+
43
+ if keyword:
44
+ options = leafmap.search_xyz_services(keyword=keyword)
45
+ if checkbox:
46
+ qms = leafmap.search_qms(keyword=keyword)
47
+ if qms is not None:
48
+ options = options + qms
49
+
50
+ tiles = empty.multiselect("Select XYZ tiles to add to the map:", options)
51
+
52
+ with row1_col1:
53
+ m = leafmap.Map()
54
+
55
+ if tiles is not None:
56
+ for tile in tiles:
57
+ m.add_xyz_service(tile)
58
+
59
+ m.to_streamlit(height=height)
60
+
61
+
62
+ app()
pages/7_📦_Web_Map_Service.py ADDED
@@ -0,0 +1,105 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import ast
2
+ import json
3
+ import streamlit as st
4
+ import leafmap.foliumap as leafmap
5
+
6
+ st.set_page_config(layout="wide")
7
+
8
+ st.sidebar.info(
9
+ """
10
+ - Web App URL: <https://streamlit.gishub.org>
11
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12
+ """
13
+ )
14
+
15
+ st.sidebar.title("Contact")
16
+ st.sidebar.info(
17
+ """
18
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19
+ """
20
+ )
21
+
22
+ # Define a whitelist of trusted URLs
23
+ trusted_urls = [
24
+ "https://services.terrascope.be/wms/v2",
25
+ # Add more trusted URLs here
26
+ ]
27
+
28
+
29
+ @st.cache_data
30
+ def get_layers(url):
31
+ options = leafmap.get_wms_layers(url)
32
+ return options
33
+
34
+
35
+ def is_trusted_url(url):
36
+ return url in trusted_urls
37
+
38
+
39
+ def app():
40
+ st.title("Web Map Service (WMS)")
41
+ st.markdown(
42
+ """
43
+ This app is a demonstration of loading Web Map Service (WMS) layers. Simply enter the URL of the WMS service
44
+ in the text box below and press Enter to retrieve the layers. Go to https://apps.nationalmap.gov/services to find
45
+ some WMS URLs if needed.
46
+ """
47
+ )
48
+
49
+ row1_col1, row1_col2 = st.columns([3, 1.3])
50
+ width = 800
51
+ height = 600
52
+ layers = None
53
+
54
+ with row1_col2:
55
+
56
+ esa_landcover = "https://services.terrascope.be/wms/v2"
57
+ url = st.text_input(
58
+ "Enter a WMS URL:", value="https://services.terrascope.be/wms/v2"
59
+ )
60
+ empty = st.empty()
61
+
62
+ if url:
63
+
64
+ if is_trusted_url(url):
65
+ options = get_layers(url)
66
+ # Process options as needed
67
+ else:
68
+ st.error(
69
+ "The entered URL is not trusted. Please enter a valid WMS URL."
70
+ )
71
+
72
+ default = None
73
+ if url == esa_landcover:
74
+ default = "WORLDCOVER_2020_MAP"
75
+ layers = empty.multiselect(
76
+ "Select WMS layers to add to the map:", options, default=default
77
+ )
78
+ add_legend = st.checkbox("Add a legend to the map", value=True)
79
+ if default == "WORLDCOVER_2020_MAP":
80
+ legend = str(leafmap.builtin_legends["ESA_WorldCover"])
81
+ else:
82
+ legend = ""
83
+ if add_legend:
84
+ legend_text = st.text_area(
85
+ "Enter a legend as a dictionary {label: color}",
86
+ value=legend,
87
+ height=200,
88
+ )
89
+
90
+ with row1_col1:
91
+ m = leafmap.Map(center=(36.3, 0), zoom=2)
92
+
93
+ if layers is not None:
94
+ for layer in layers:
95
+ m.add_wms_layer(
96
+ url, layers=layer, name=layer, attribution=" ", transparent=True
97
+ )
98
+ if add_legend and legend_text:
99
+ legend_dict = json.loads(legend_text.replace("'", '"'))
100
+ m.add_legend(legend_dict=legend_dict)
101
+
102
+ m.to_streamlit(height=height)
103
+
104
+
105
+ app()
pages/8_🏜️_Raster_Data_Visualization.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import leafmap.foliumap as leafmap
4
+ import leafmap.colormaps as cm
5
+ import streamlit as st
6
+
7
+ st.set_page_config(layout="wide")
8
+
9
+ st.sidebar.info(
10
+ """
11
+ - Web App URL: <https://streamlit.gishub.org>
12
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
13
+ """
14
+ )
15
+
16
+ st.sidebar.title("Contact")
17
+ st.sidebar.info(
18
+ """
19
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
20
+ """
21
+ )
22
+
23
+
24
+ @st.cache_data
25
+ def load_cog_list():
26
+ print(os.getcwd())
27
+ in_txt = os.path.join(os.getcwd(), "data/cog_files.txt")
28
+ with open(in_txt) as f:
29
+ return [line.strip() for line in f.readlines()[1:]]
30
+
31
+
32
+ @st.cache_data
33
+ def get_palettes():
34
+ return list(cm.palettes.keys())
35
+ # palettes = dir(palettable.matplotlib)[:-16]
36
+ # return ["matplotlib." + p for p in palettes]
37
+
38
+
39
+ st.title("Visualize Raster Datasets")
40
+ st.markdown(
41
+ """
42
+ An interactive web app for visualizing local raster datasets and Cloud Optimized GeoTIFF ([COG](https://www.cogeo.org)). The app was built using [streamlit](https://streamlit.io), [leafmap](https://leafmap.org), and [Titiler](https://developmentseed.org/titiler/).
43
+
44
+
45
+ """
46
+ )
47
+
48
+
49
+ def is_trusted_url(url):
50
+ if url.startswith("https://opendata.digitalglobe.com/events/california-fire-2020/"):
51
+ return True
52
+ else:
53
+ return False
54
+
55
+
56
+ row1_col1, row1_col2 = st.columns([2, 1])
57
+
58
+ with row1_col1:
59
+ cog_list = load_cog_list()
60
+ cog = st.selectbox("Select a sample Cloud Opitmized GeoTIFF (COG)", cog_list)
61
+
62
+ with row1_col2:
63
+ empty = st.empty()
64
+
65
+ url = empty.text_input(
66
+ "Enter a HTTP URL to a Cloud Optimized GeoTIFF (COG)",
67
+ cog,
68
+ )
69
+
70
+ if is_trusted_url(url):
71
+ try:
72
+ options = leafmap.cog_bands(url)
73
+ except Exception as e:
74
+ st.error(e)
75
+ if len(options) > 3:
76
+ default = options[:3]
77
+ else:
78
+ default = options[0]
79
+ bands = st.multiselect("Select bands to display", options, default=options)
80
+
81
+ if len(bands) == 1 or len(bands) == 3:
82
+ pass
83
+ else:
84
+ st.error("Please select one or three bands")
85
+ else:
86
+ st.error("Please enter a trusted URL")
87
+
88
+ add_params = st.checkbox("Add visualization parameters")
89
+ if add_params:
90
+ vis_params = st.text_area("Enter visualization parameters", "{}")
91
+ else:
92
+ vis_params = {}
93
+
94
+ if len(vis_params) > 0:
95
+ try:
96
+ vis_params = json.loads(vis_params.replace("'", '"'))
97
+ except Exception as e:
98
+ st.error(
99
+ f"Invalid visualization parameters. It should be a dictionary. Error: {e}"
100
+ )
101
+ vis_params = {}
102
+
103
+ submit = st.button("Submit")
104
+
105
+ m = leafmap.Map(latlon_control=False)
106
+
107
+ if submit:
108
+ if url:
109
+ try:
110
+ m.add_cog_layer(url, bands=bands, **vis_params)
111
+ except Exception as e:
112
+ with row1_col2:
113
+ st.error(e)
114
+ st.error("Work in progress. Try it again later.")
115
+
116
+ with row1_col1:
117
+ m.to_streamlit()
pages/9_🔲_Vector_Data_Visualization.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import fiona
3
+ import geopandas as gpd
4
+ import streamlit as st
5
+
6
+ st.set_page_config(layout="wide")
7
+
8
+ st.sidebar.info(
9
+ """
10
+ - Web App URL: <https://streamlit.gishub.org>
11
+ - GitHub repository: <https://github.com/giswqs/streamlit-geospatial>
12
+ """
13
+ )
14
+
15
+ st.sidebar.title("Contact")
16
+ st.sidebar.info(
17
+ """
18
+ Qiusheng Wu at [wetlands.io](https://wetlands.io) | [GitHub](https://github.com/giswqs) | [Twitter](https://twitter.com/giswqs) | [YouTube](https://www.youtube.com/@giswqs) | [LinkedIn](https://www.linkedin.com/in/giswqs)
19
+ """
20
+ )
21
+
22
+
23
+ # Define a whitelist of trusted URLs
24
+ trusted_urls = [
25
+ "https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
26
+ # Add more trusted URLs here
27
+ ]
28
+
29
+
30
+ def is_trusted_url(url):
31
+ return url in trusted_urls
32
+
33
+
34
+ def save_uploaded_file(file_content, file_name):
35
+ """
36
+ Save the uploaded file to a temporary directory
37
+ """
38
+ import tempfile
39
+ import os
40
+ import uuid
41
+
42
+ _, file_extension = os.path.splitext(file_name)
43
+ file_id = str(uuid.uuid4())
44
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
45
+
46
+ with open(file_path, "wb") as file:
47
+ file.write(file_content.getbuffer())
48
+
49
+ return file_path
50
+
51
+
52
+ def app():
53
+
54
+ st.title("Upload Vector Data")
55
+
56
+ row1_col1, row1_col2 = st.columns([2, 1])
57
+ width = 950
58
+ height = 600
59
+
60
+ with row1_col2:
61
+
62
+ backend = st.selectbox(
63
+ "Select a plotting backend", ["folium", "kepler.gl", "pydeck"], index=2
64
+ )
65
+
66
+ if backend == "folium":
67
+ import leafmap.foliumap as leafmap
68
+ elif backend == "kepler.gl":
69
+ import leafmap.kepler as leafmap
70
+ elif backend == "pydeck":
71
+ import leafmap.deck as leafmap
72
+
73
+ url = st.text_input(
74
+ "Enter a URL to a vector dataset",
75
+ "https://github.com/giswqs/streamlit-geospatial/raw/master/data/us_states.geojson",
76
+ )
77
+
78
+ data = st.file_uploader(
79
+ "Upload a vector dataset", type=["geojson", "kml", "zip", "tab"]
80
+ )
81
+
82
+ container = st.container()
83
+
84
+ if data or is_trusted_url(url):
85
+ if data:
86
+ file_path = save_uploaded_file(data, data.name)
87
+ layer_name = os.path.splitext(data.name)[0]
88
+ elif url:
89
+ file_path = url
90
+ layer_name = url.split("/")[-1].split(".")[0]
91
+
92
+ with row1_col1:
93
+ if file_path.lower().endswith(".kml"):
94
+ fiona.drvsupport.supported_drivers["KML"] = "rw"
95
+ gdf = gpd.read_file(file_path, driver="KML")
96
+ else:
97
+ gdf = gpd.read_file(file_path)
98
+ lon, lat = leafmap.gdf_centroid(gdf)
99
+ if backend == "pydeck":
100
+
101
+ column_names = gdf.columns.values.tolist()
102
+ random_column = None
103
+ with container:
104
+ random_color = st.checkbox("Apply random colors", True)
105
+ if random_color:
106
+ random_column = st.selectbox(
107
+ "Select a column to apply random colors", column_names
108
+ )
109
+
110
+ m = leafmap.Map(center=(lat, lon))
111
+ m.add_gdf(gdf, random_color_column=random_column)
112
+ st.pydeck_chart(m)
113
+
114
+ else:
115
+ m = leafmap.Map(center=(lat, lon), draw_export=True)
116
+ m.add_gdf(gdf, layer_name=layer_name)
117
+ # m.add_vector(file_path, layer_name=layer_name)
118
+ if backend == "folium":
119
+ m.zoom_to_gdf(gdf)
120
+ m.to_streamlit(width=width, height=height)
121
+
122
+ else:
123
+ with row1_col1:
124
+ m = leafmap.Map()
125
+ st.pydeck_chart(m)
126
+
127
+
128
+ app()
pages/HQTimelapse.py ADDED
@@ -0,0 +1,229 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ from concurrent.futures import ThreadPoolExecutor
18
+ from functools import lru_cache
19
+
20
+ st.set_page_config(layout="wide")
21
+
22
+ # Liste des dates disponibles
23
+ AVAILABLE_DATES = [
24
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
25
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
26
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
27
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
28
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
29
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
30
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
31
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
32
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
33
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
34
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
35
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
36
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
37
+ 20201231, 20211231
38
+ ]
39
+
40
+ @st.cache_data
41
+ def uploaded_file_to_gdf(data):
42
+ import tempfile
43
+ import os
44
+ import uuid
45
+
46
+ _, file_extension = os.path.splitext(data.name)
47
+ file_id = str(uuid.uuid4())
48
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
49
+
50
+ with open(file_path, "wb") as file:
51
+ file.write(data.getbuffer())
52
+
53
+ if file_path.lower().endswith(".kml"):
54
+ gdf = gpd.read_file(file_path, driver="KML")
55
+ else:
56
+ gdf = gpd.read_file(file_path)
57
+
58
+ return gdf
59
+
60
+ @lru_cache(maxsize=128)
61
+ def get_wms_url(bbox, width, height, time):
62
+ url = "https://wms.geo.admin.ch/"
63
+ params = {
64
+ "SERVICE": "WMS",
65
+ "REQUEST": "GetMap",
66
+ "VERSION": "1.3.0",
67
+ "LAYERS": "ch.swisstopo.zeitreihen",
68
+ "STYLES": "",
69
+ "CRS": "EPSG:2056",
70
+ "BBOX": ",".join(map(str, bbox)),
71
+ "WIDTH": str(width),
72
+ "HEIGHT": str(height),
73
+ "FORMAT": "image/png",
74
+ "TIME": str(time),
75
+ "TILED": "true"
76
+ }
77
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
78
+
79
+ def add_date_to_image(image, date):
80
+ draw = ImageDraw.Draw(image)
81
+ font = ImageFont.load_default()
82
+ text = str(date)
83
+
84
+ bbox = draw.textbbox((0, 0), text, font=font)
85
+ textwidth = bbox[2] - bbox[0]
86
+ textheight = bbox[3] - bbox[1]
87
+
88
+ margin = 10
89
+ x = image.width - textwidth - margin
90
+ y = image.height - textheight - margin
91
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
92
+ draw.text((x, y), text, font=font, fill="white")
93
+ return image
94
+
95
+ async def fetch_image(session, url, date):
96
+ async with session.get(url) as response:
97
+ if response.status == 200:
98
+ data = await response.read()
99
+ img = Image.open(BytesIO(data))
100
+ return add_date_to_image(img, date)
101
+ return None
102
+
103
+ async def download_images(bbox, width, height, available_years):
104
+ async with aiohttp.ClientSession() as session:
105
+ tasks = []
106
+ for date in available_years:
107
+ url = get_wms_url(bbox, width, height, date)
108
+ tasks.append(fetch_image(session, url, date))
109
+ return await asyncio.gather(*tasks)
110
+
111
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
112
+ with open(bin_file, 'rb') as f:
113
+ data = f.read()
114
+ bin_str = base64.b64encode(data).decode()
115
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
116
+ return href
117
+
118
+ def app():
119
+ st.title("Swiss Historical Timelapse Generator")
120
+
121
+ st.markdown(
122
+ """
123
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
124
+ """
125
+ )
126
+
127
+ row1_col1, row1_col2 = st.columns([2, 1])
128
+
129
+ with row1_col1:
130
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
131
+ folium.TileLayer(
132
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
133
+ attr="© swisstopo",
134
+ name="swisstopo",
135
+ overlay=False,
136
+ control=True
137
+ ).add_to(m)
138
+
139
+ draw = plugins.Draw(export=True)
140
+ draw.add_to(m)
141
+
142
+ folium.LayerControl().add_to(m)
143
+
144
+ folium_static(m, height=400)
145
+
146
+ with row1_col2:
147
+ data = st.file_uploader(
148
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
149
+ type=["geojson", "kml", "zip"],
150
+ )
151
+
152
+ with st.form("submit_form"):
153
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
154
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
155
+
156
+ width = st.slider("Image width:", 100, 1000, 800)
157
+ height = st.slider("Image height:", 100, 1000, 600)
158
+
159
+ speed = st.slider("Frames per second:", 1, 30, 5)
160
+
161
+ submitted = st.form_submit_button("Generate Timelapse")
162
+
163
+ if submitted:
164
+ if data is None:
165
+ st.warning("Please upload a GeoJSON file.")
166
+ else:
167
+ gdf = uploaded_file_to_gdf(data)
168
+ gdf_2056 = gdf.to_crs(epsg=2056)
169
+ bbox = tuple(gdf_2056.total_bounds)
170
+
171
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
172
+
173
+ # Création des dossiers temporaires pour stocker les images et les vidéos
174
+ temp_dir = tempfile.mkdtemp()
175
+ images_dir = os.path.join(temp_dir, "images")
176
+ os.makedirs(images_dir, exist_ok=True)
177
+
178
+ progress_bar = st.progress(0)
179
+
180
+ # Téléchargement asynchrone des images
181
+ images = asyncio.run(download_images(bbox, width, height, available_years))
182
+
183
+ all_image_files = []
184
+ for i, img in enumerate(images):
185
+ if img:
186
+ img_file = os.path.join(images_dir, f"image_{i}.png")
187
+ img.save(img_file, format='PNG')
188
+ all_image_files.append(img_file)
189
+ progress_bar.progress((i + 1) / len(available_years))
190
+
191
+ if all_image_files:
192
+ # Génération du GIF
193
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
194
+ with ThreadPoolExecutor() as executor:
195
+ frames = list(executor.map(imageio.imread, all_image_files))
196
+ imageio.mimsave(gif_path, frames, fps=speed, loop=0)
197
+ st.success("GIF Timelapse created successfully!")
198
+ st.image(gif_path)
199
+ st.markdown(get_binary_file_downloader_html(gif_path, 'Timelapse GIF'), unsafe_allow_html=True)
200
+
201
+ # Génération du MP4
202
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
203
+ imageio.mimsave(mp4_path, frames, fps=speed, format='FFMPEG', quality=9)
204
+ st.success("MP4 Timelapse created successfully!")
205
+ st.video(mp4_path)
206
+ st.markdown(get_binary_file_downloader_html(mp4_path, 'Timelapse MP4'), unsafe_allow_html=True)
207
+
208
+ # Création du ZIP avec les images individuelles
209
+ zip_path = os.path.join(temp_dir, "images.zip")
210
+ with zipfile.ZipFile(zip_path, 'w') as zipf:
211
+ for file in all_image_files:
212
+ zipf.write(file, os.path.basename(file))
213
+
214
+ st.success("Individual images saved successfully!")
215
+ st.markdown(get_binary_file_downloader_html(zip_path, 'Individual Images (ZIP)'), unsafe_allow_html=True)
216
+
217
+ # Nettoyage des fichiers temporaires
218
+ for file in all_image_files:
219
+ os.unlink(file)
220
+ os.rmdir(images_dir)
221
+ os.unlink(gif_path)
222
+ os.unlink(mp4_path)
223
+ os.unlink(zip_path)
224
+ os.rmdir(temp_dir)
225
+ else:
226
+ st.error("Failed to create timelapse. No images were generated.")
227
+
228
+ if __name__ == "__main__":
229
+ app()
pages/_📷_Timelapse.py ADDED
@@ -0,0 +1,238 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ from concurrent.futures import ThreadPoolExecutor
18
+ from functools import lru_cache
19
+
20
+ st.set_page_config(layout="wide")
21
+
22
+ # Liste des dates disponibles
23
+ AVAILABLE_DATES = [
24
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
25
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
26
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
27
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
28
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
29
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
30
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
31
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
32
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
33
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
34
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
35
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
36
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
37
+ 20201231, 20211231
38
+ ]
39
+
40
+ @st.cache_data
41
+ def uploaded_file_to_gdf(data):
42
+ import tempfile
43
+ import os
44
+ import uuid
45
+
46
+ _, file_extension = os.path.splitext(data.name)
47
+ file_id = str(uuid.uuid4())
48
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
49
+
50
+ with open(file_path, "wb") as file:
51
+ file.write(data.getbuffer())
52
+
53
+ if file_path.lower().endswith(".kml"):
54
+ gdf = gpd.read_file(file_path, driver="KML")
55
+ else:
56
+ gdf = gpd.read_file(file_path)
57
+
58
+ return gdf
59
+
60
+ @lru_cache(maxsize=128)
61
+ def get_wms_url(bbox, width, height, time):
62
+ url = "https://wms.geo.admin.ch/"
63
+ params = {
64
+ "SERVICE": "WMS",
65
+ "REQUEST": "GetMap",
66
+ "VERSION": "1.3.0",
67
+ "LAYERS": "ch.swisstopo.zeitreihen",
68
+ "STYLES": "",
69
+ "CRS": "EPSG:2056",
70
+ "BBOX": ",".join(map(str, bbox)),
71
+ "WIDTH": str(width),
72
+ "HEIGHT": str(height),
73
+ "FORMAT": "image/png",
74
+ "TIME": str(time),
75
+ "TILED": "true"
76
+ }
77
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
78
+
79
+ def add_date_to_image(image, date):
80
+ draw = ImageDraw.Draw(image)
81
+ font = ImageFont.load_default()
82
+ text = str(date)
83
+
84
+ bbox = draw.textbbox((0, 0), text, font=font)
85
+ textwidth = bbox[2] - bbox[0]
86
+ textheight = bbox[3] - bbox[1]
87
+
88
+ margin = 10
89
+ x = image.width - textwidth - margin
90
+ y = image.height - textheight - margin
91
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
92
+ draw.text((x, y), text, font=font, fill="white")
93
+ return image
94
+
95
+ async def fetch_image(session, url, date, semaphore):
96
+ async with semaphore:
97
+ async with session.get(url) as response:
98
+ if response.status == 200:
99
+ data = await response.read()
100
+ img = Image.open(BytesIO(data))
101
+ return add_date_to_image(img, date)
102
+ return None
103
+
104
+ async def download_images(bbox, width, height, available_years):
105
+ semaphore = asyncio.Semaphore(10) # Limite le nombre de requêtes simultanées
106
+ async with aiohttp.ClientSession() as session:
107
+ tasks = []
108
+ for date in available_years:
109
+ url = get_wms_url(bbox, width, height, date)
110
+ tasks.append(fetch_image(session, url, date, semaphore))
111
+ return await asyncio.gather(*tasks)
112
+
113
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
114
+ with open(bin_file, 'rb') as f:
115
+ data = f.read()
116
+ bin_str = base64.b64encode(data).decode()
117
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
118
+ return href
119
+
120
+ def process_images(images, format_option, speed, temp_dir):
121
+ results = {}
122
+
123
+ if "GIF" in format_option:
124
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
125
+ imageio.mimsave(gif_path, images, fps=speed, loop=0)
126
+ results["GIF"] = gif_path
127
+
128
+ if "MP4" in format_option:
129
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
130
+ imageio.mimsave(mp4_path, images, fps=speed, format='FFMPEG', quality=9)
131
+ results["MP4"] = mp4_path
132
+
133
+ if "Individual Images (ZIP)" in format_option:
134
+ zip_path = os.path.join(temp_dir, "images.zip")
135
+ with zipfile.ZipFile(zip_path, 'w') as zipf:
136
+ for i, img in enumerate(images):
137
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
138
+ img.save(img_path)
139
+ zipf.write(img_path, os.path.basename(img_path))
140
+ os.unlink(img_path)
141
+ results["ZIP"] = zip_path
142
+
143
+ return results
144
+
145
+ def app():
146
+ st.title("Swiss Historical Timelapse Generator")
147
+
148
+ st.markdown(
149
+ """
150
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
151
+ """
152
+ )
153
+
154
+ row1_col1, row1_col2 = st.columns([2, 1])
155
+
156
+ with row1_col1:
157
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
158
+ folium.TileLayer(
159
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
160
+ attr="© swisstopo",
161
+ name="swisstopo",
162
+ overlay=False,
163
+ control=True
164
+ ).add_to(m)
165
+
166
+ draw = plugins.Draw(export=True)
167
+ draw.add_to(m)
168
+
169
+ folium.LayerControl().add_to(m)
170
+
171
+ folium_static(m, height=400)
172
+
173
+ with row1_col2:
174
+ data = st.file_uploader(
175
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
176
+ type=["geojson", "kml", "zip"],
177
+ )
178
+
179
+ with st.form("submit_form"):
180
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
181
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
182
+
183
+ width = st.slider("Image width:", 100, 4000, 800)
184
+ height = st.slider("Image height:", 100, 4000, 600)
185
+
186
+ if width * height > 4000 * 4000:
187
+ st.warning("Warning: The image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
188
+
189
+ speed = st.slider("Frames per second:", 1, 30, 5)
190
+
191
+ format_option = st.multiselect("Choose output format(s):", ["GIF", "MP4", "Individual Images (ZIP)"], default=["GIF", "MP4", "Individual Images (ZIP)"])
192
+
193
+ submitted = st.form_submit_button("Generate Timelapse")
194
+
195
+ if submitted:
196
+ if data is None:
197
+ st.warning("Please upload a GeoJSON file.")
198
+ elif width * height > 4000 * 4000:
199
+ st.error("Image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
200
+ else:
201
+ gdf = uploaded_file_to_gdf(data)
202
+ gdf_2056 = gdf.to_crs(epsg=2056)
203
+ bbox = tuple(gdf_2056.total_bounds)
204
+
205
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
206
+
207
+ total_requests = len(available_years)
208
+
209
+ if total_requests > 500:
210
+ st.warning(f"You are requesting {total_requests} images. This exceeds the limit of 500 requests per second set by swisstopo. The process may take longer than expected.")
211
+
212
+ progress_bar = st.progress(0)
213
+
214
+ images = asyncio.run(download_images(bbox, width, height, available_years))
215
+
216
+ progress_bar.progress(100)
217
+
218
+ if images:
219
+ with tempfile.TemporaryDirectory() as temp_dir:
220
+ results = process_images(images, format_option, speed, temp_dir)
221
+
222
+ for format, path in results.items():
223
+ if format == "GIF":
224
+ st.success("GIF Timelapse created successfully!")
225
+ st.image(path)
226
+ st.markdown(get_binary_file_downloader_html(path, 'Timelapse GIF'), unsafe_allow_html=True)
227
+ elif format == "MP4":
228
+ st.success("MP4 Timelapse created successfully!")
229
+ st.video(path)
230
+ st.markdown(get_binary_file_downloader_html(path, 'Timelapse MP4'), unsafe_allow_html=True)
231
+ elif format == "ZIP":
232
+ st.success("Individual images saved successfully!")
233
+ st.markdown(get_binary_file_downloader_html(path, 'Individual Images (ZIP)'), unsafe_allow_html=True)
234
+ else:
235
+ st.error("Failed to create timelapse. No images were generated.")
236
+
237
+ if __name__ == "__main__":
238
+ app()
pages/histoire.py ADDED
@@ -0,0 +1,206 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+
16
+ st.set_page_config(layout="wide")
17
+
18
+ # Liste des dates disponibles
19
+ AVAILABLE_DATES = [
20
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
21
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
22
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
23
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
24
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
25
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
26
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
27
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
28
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
29
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
30
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
31
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
32
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
33
+ 20201231, 20211231
34
+ ]
35
+
36
+ @st.cache_data
37
+ def uploaded_file_to_gdf(data):
38
+ import tempfile
39
+ import os
40
+ import uuid
41
+
42
+ _, file_extension = os.path.splitext(data.name)
43
+ file_id = str(uuid.uuid4())
44
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
45
+
46
+ with open(file_path, "wb") as file:
47
+ file.write(data.getbuffer())
48
+
49
+ if file_path.lower().endswith(".kml"):
50
+ gdf = gpd.read_file(file_path, driver="KML")
51
+ else:
52
+ gdf = gpd.read_file(file_path)
53
+
54
+ return gdf
55
+
56
+ def get_wms_image(bbox, width, height, time):
57
+ url = "https://wms.geo.admin.ch/"
58
+ params = {
59
+ "SERVICE": "WMS",
60
+ "REQUEST": "GetMap",
61
+ "VERSION": "1.3.0",
62
+ "LAYERS": "ch.swisstopo.zeitreihen",
63
+ "STYLES": "",
64
+ "CRS": "EPSG:2056",
65
+ "BBOX": ",".join(map(str, bbox)),
66
+ "WIDTH": str(width),
67
+ "HEIGHT": str(height),
68
+ "FORMAT": "image/png",
69
+ "TIME": str(time),
70
+ "TILED": "true"
71
+ }
72
+ response = requests.get(url, params=params)
73
+ if response.status_code == 200:
74
+ return Image.open(BytesIO(response.content))
75
+ else:
76
+ st.error(f"Failed to fetch image: {response.status_code}")
77
+ return None
78
+
79
+ def add_date_to_image(image, date):
80
+ draw = ImageDraw.Draw(image)
81
+ font = ImageFont.load_default()
82
+ text = str(date)
83
+
84
+ bbox = draw.textbbox((0, 0), text, font=font)
85
+ textwidth = bbox[2] - bbox[0]
86
+ textheight = bbox[3] - bbox[1]
87
+
88
+ margin = 10
89
+ x = image.width - textwidth - margin
90
+ y = image.height - textheight - margin
91
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
92
+ draw.text((x, y), text, font=font, fill="white")
93
+ return image
94
+
95
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
96
+ with open(bin_file, 'rb') as f:
97
+ data = f.read()
98
+ bin_str = base64.b64encode(data).decode()
99
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
100
+ return href
101
+
102
+ def app():
103
+ st.title("Swiss Historical Timelapse Generator")
104
+
105
+ st.markdown(
106
+ """
107
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
108
+ """
109
+ )
110
+
111
+ row1_col1, row1_col2 = st.columns([2, 1])
112
+
113
+ with row1_col1:
114
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
115
+ folium.TileLayer(
116
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
117
+ attr="© swisstopo",
118
+ name="swisstopo",
119
+ overlay=False,
120
+ control=True
121
+ ).add_to(m)
122
+
123
+ draw = plugins.Draw(export=True)
124
+ draw.add_to(m)
125
+
126
+ folium.LayerControl().add_to(m)
127
+
128
+ folium_static(m, height=400)
129
+
130
+ with row1_col2:
131
+ data = st.file_uploader(
132
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
133
+ type=["geojson", "kml", "zip"],
134
+ )
135
+
136
+ with st.form("submit_form"):
137
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
138
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
139
+
140
+ width = st.slider("Image width:", 100, 1000, 800)
141
+ height = st.slider("Image height:", 100, 1000, 600)
142
+
143
+ speed = st.slider("Frames per second:", 1, 30, 5)
144
+
145
+ format_option = st.radio("Choose output format:", ("GIF", "MP4", "Both"))
146
+
147
+ submitted = st.form_submit_button("Generate Timelapse")
148
+
149
+ if submitted:
150
+ if data is None:
151
+ st.warning("Please upload a GeoJSON file.")
152
+ else:
153
+ gdf = uploaded_file_to_gdf(data)
154
+ gdf_2056 = gdf.to_crs(epsg=2056)
155
+ bbox = gdf_2056.total_bounds
156
+
157
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
158
+ images = []
159
+ image_files = []
160
+
161
+ progress_bar = st.progress(0)
162
+ for i, date in enumerate(available_years):
163
+ img = get_wms_image(bbox, width, height, date)
164
+ if img:
165
+ img_with_date = add_date_to_image(img, date)
166
+ images.append(img_with_date)
167
+
168
+ # Save individual image
169
+ img_file = tempfile.NamedTemporaryFile(delete=False, suffix=f"_{date}.png")
170
+ img_with_date.save(img_file.name)
171
+ image_files.append(img_file.name)
172
+
173
+ progress_bar.progress((i + 1) / len(available_years))
174
+
175
+ if images:
176
+ if format_option in ["GIF", "Both"]:
177
+ with tempfile.NamedTemporaryFile(suffix=".gif", delete=False) as tmp_file:
178
+ imageio.mimsave(tmp_file.name, images, fps=speed, loop=0)
179
+ st.success("GIF Timelapse created successfully!")
180
+ st.image(tmp_file.name)
181
+ st.markdown(get_binary_file_downloader_html(tmp_file.name, 'Timelapse GIF'), unsafe_allow_html=True)
182
+
183
+ if format_option in ["MP4", "Both"]:
184
+ with tempfile.NamedTemporaryFile(suffix=".mp4", delete=False) as tmp_file:
185
+ imageio.mimsave(tmp_file.name, images, fps=speed, format='FFMPEG')
186
+ st.success("MP4 Timelapse created successfully!")
187
+ st.video(tmp_file.name)
188
+ st.markdown(get_binary_file_downloader_html(tmp_file.name, 'Timelapse MP4'), unsafe_allow_html=True)
189
+
190
+ # Create ZIP file with individual images
191
+ with tempfile.NamedTemporaryFile(suffix=".zip", delete=False) as tmp_zip:
192
+ with zipfile.ZipFile(tmp_zip.name, 'w') as zipf:
193
+ for img_file in image_files:
194
+ zipf.write(img_file, os.path.basename(img_file))
195
+
196
+ st.success("Individual images saved successfully!")
197
+ st.markdown(get_binary_file_downloader_html(tmp_zip.name, 'Individual Images (ZIP)'), unsafe_allow_html=True)
198
+
199
+ # Clean up temporary image files
200
+ for img_file in image_files:
201
+ os.unlink(img_file)
202
+ else:
203
+ st.error("Failed to create timelapse. No images were generated.")
204
+
205
+ if __name__ == "__main__":
206
+ app()
pages/lastime.py ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ import concurrent.futures
18
+ import logging
19
+ import numpy as np
20
+
21
+ # Configuration du logging
22
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
23
+ logger = logging.getLogger(__name__)
24
+
25
+ # Configuration de la page Streamlit
26
+ st.set_page_config(layout="wide")
27
+
28
+ # Liste des dates disponibles
29
+ AVAILABLE_DATES = [
30
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
31
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
32
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
33
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
34
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
35
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
36
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
37
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
38
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
39
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
40
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
41
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
42
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
43
+ 20201231, 20211231
44
+ ]
45
+
46
+ @st.cache_data
47
+ def uploaded_file_to_gdf(data):
48
+ import tempfile
49
+ import os
50
+ import uuid
51
+
52
+ _, file_extension = os.path.splitext(data.name)
53
+ file_id = str(uuid.uuid4())
54
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
55
+
56
+ with open(file_path, "wb") as file:
57
+ file.write(data.getbuffer())
58
+
59
+ if file_path.lower().endswith(".kml"):
60
+ gdf = gpd.read_file(file_path, driver="KML")
61
+ else:
62
+ gdf = gpd.read_file(file_path)
63
+
64
+ return gdf
65
+
66
+ @st.cache_data
67
+ def get_wms_url(bbox, width, height, time):
68
+ url = "https://wms.geo.admin.ch/"
69
+ params = {
70
+ "SERVICE": "WMS",
71
+ "REQUEST": "GetMap",
72
+ "VERSION": "1.3.0",
73
+ "LAYERS": "ch.swisstopo.zeitreihen",
74
+ "STYLES": "",
75
+ "CRS": "EPSG:2056",
76
+ "BBOX": ",".join(map(str, bbox)),
77
+ "WIDTH": str(width),
78
+ "HEIGHT": str(height),
79
+ "FORMAT": "image/png",
80
+ "TIME": str(time),
81
+ "TILED": "true"
82
+ }
83
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
84
+
85
+ def add_date_to_image(image, date):
86
+ draw = ImageDraw.Draw(image)
87
+ font = ImageFont.load_default()
88
+ text = str(date // 10000) # Extraire seulement l'année
89
+
90
+ bbox = draw.textbbox((0, 0), text, font=font)
91
+ textwidth = bbox[2] - bbox[0]
92
+ textheight = bbox[3] - bbox[1]
93
+
94
+ margin = 10
95
+ x = image.width - textwidth - margin
96
+ y = image.height - textheight - margin
97
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
98
+ draw.text((x, y), text, font=font, fill="white")
99
+ return image
100
+
101
+ async def fetch_image(session, url, date, semaphore):
102
+ async with semaphore:
103
+ try:
104
+ async with session.get(url) as response:
105
+ if response.status == 200:
106
+ data = await response.read()
107
+ img = Image.open(BytesIO(data))
108
+ return add_date_to_image(img, date)
109
+ except Exception as e:
110
+ logger.error(f"Error fetching image for date {date}: {str(e)}")
111
+ return None
112
+
113
+ async def download_images(bbox, width, height, available_years, max_concurrent_requests):
114
+ semaphore = asyncio.Semaphore(max_concurrent_requests)
115
+ async with aiohttp.ClientSession() as session:
116
+ tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
117
+ return await asyncio.gather(*tasks)
118
+
119
+ def process_single_image(args):
120
+ img, i, format_option, temp_dir = args
121
+ results = {}
122
+ if img:
123
+ if "GIF" in format_option or "MP4" in format_option:
124
+ img_array = np.array(img)
125
+ results['array'] = img_array
126
+ if "Individual Images (ZIP)" in format_option:
127
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
128
+ img.save(img_path)
129
+ results['path'] = img_path
130
+ return results
131
+
132
+ def process_images(images, format_option, temp_dir, batch_size=100):
133
+ results = {"GIF": [], "MP4": [], "ZIP": []}
134
+
135
+ for i in range(0, len(images), batch_size):
136
+ batch = images[i:i+batch_size]
137
+ with concurrent.futures.ProcessPoolExecutor() as executor:
138
+ batch_results = list(executor.map(process_single_image, [(img, j, format_option, temp_dir) for j, img in enumerate(batch, start=i) if img is not None]))
139
+
140
+ for result in batch_results:
141
+ if 'array' in result:
142
+ results["GIF"].append(result['array'])
143
+ results["MP4"].append(result['array'])
144
+ if 'path' in result:
145
+ results["ZIP"].append(result['path'])
146
+
147
+ return results
148
+
149
+ def create_timelapse(images, format_option, speed, temp_dir):
150
+ results = {}
151
+
152
+ if "GIF" in format_option:
153
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
154
+ imageio.mimsave(gif_path, images["GIF"], fps=speed)
155
+ results["GIF"] = gif_path
156
+
157
+ if "MP4" in format_option:
158
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
159
+ imageio.mimsave(mp4_path, images["MP4"], fps=speed, format='FFMPEG', quality=8)
160
+ results["MP4"] = mp4_path
161
+
162
+ if "Individual Images (ZIP)" in format_option:
163
+ zip_path = os.path.join(temp_dir, "images.zip")
164
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
165
+ for img_path in images["ZIP"]:
166
+ zipf.write(img_path, os.path.basename(img_path))
167
+ os.unlink(img_path)
168
+ results["ZIP"] = zip_path
169
+
170
+ return results
171
+
172
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
173
+ with open(bin_file, 'rb') as f:
174
+ data = f.read()
175
+ bin_str = base64.b64encode(data).decode()
176
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
177
+ return href
178
+
179
+ def app():
180
+ st.title("Swiss Historical Timelapse Generator")
181
+
182
+ st.markdown(
183
+ """
184
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
185
+ """
186
+ )
187
+
188
+ row1_col1, row1_col2 = st.columns([2, 1])
189
+
190
+ with row1_col1:
191
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
192
+ folium.TileLayer(
193
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
194
+ attr="© swisstopo",
195
+ name="swisstopo",
196
+ overlay=False,
197
+ control=True
198
+ ).add_to(m)
199
+
200
+ draw = plugins.Draw(export=True)
201
+ draw.add_to(m)
202
+
203
+ folium.LayerControl().add_to(m)
204
+
205
+ folium_static(m, height=400)
206
+
207
+ with row1_col2:
208
+ data = st.file_uploader(
209
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
210
+ type=["geojson", "kml", "zip"],
211
+ )
212
+
213
+ with st.form("submit_form"):
214
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
215
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
216
+
217
+ size_options = {
218
+ "HD (720p)": (1280, 720),
219
+ "Full HD (1080p)": (1920, 1080),
220
+ "2K": (2560, 1440),
221
+ "4K": (3840, 2160),
222
+ "Custom": None
223
+ }
224
+
225
+ size_choice = st.selectbox("Choose image size:", list(size_options.keys()))
226
+
227
+ if size_choice == "Custom":
228
+ col1, col2 = st.columns(2)
229
+ with col1:
230
+ width = st.number_input("Width:", min_value=100, max_value=4000, value=800)
231
+ with col2:
232
+ height = st.number_input("Height:", min_value=100, max_value=4000, value=600)
233
+ else:
234
+ width, height = size_options[size_choice]
235
+
236
+ if width * height > 4000 * 4000:
237
+ st.warning("Warning: The image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
238
+
239
+ speed = st.slider("Frames per second:", 1, 30, 5)
240
+
241
+ format_option = st.multiselect("Choose output format(s):", ["GIF", "MP4", "Individual Images (ZIP)"], default=["GIF", "MP4", "Individual Images (ZIP)"])
242
+
243
+ with st.expander("Advanced Options"):
244
+ batch_size = st.slider("Batch Size", 50, 500, 100)
245
+ max_concurrent_requests = st.slider("Max Concurrent Requests", 10, 50, 20)
246
+
247
+ submitted = st.form_submit_button("Generate Timelapse")
248
+
249
+ if submitted:
250
+ if data is None:
251
+ st.warning("Please upload a GeoJSON file.")
252
+ elif width * height > 4000 * 4000:
253
+ st.error("Image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
254
+ else:
255
+ gdf = uploaded_file_to_gdf(data)
256
+ gdf_2056 = gdf.to_crs(epsg=2056)
257
+ bbox = tuple(gdf_2056.total_bounds)
258
+
259
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
260
+
261
+ total_requests = len(available_years)
262
+
263
+ if total_requests > 500:
264
+ st.warning(f"You are requesting {total_requests} images. This exceeds the limit of 500 requests per second set by swisstopo. The process may take longer than expected.")
265
+
266
+ progress_bar = st.progress(0)
267
+
268
+ with st.spinner('Downloading images...'):
269
+ images = asyncio.run(download_images(bbox, width, height, available_years, max_concurrent_requests))
270
+ progress_bar.progress(50)
271
+
272
+ if images:
273
+ logger.info(f"Retrieved {len(images)} images successfully")
274
+ with tempfile.TemporaryDirectory() as temp_dir:
275
+ with st.spinner('Processing images...'):
276
+ processed_images = process_images(images, format_option, temp_dir, batch_size)
277
+ results = create_timelapse(processed_images, format_option, speed, temp_dir)
278
+ progress_bar.progress(100)
279
+
280
+ for format, path in results.items():
281
+ if os.path.exists(path):
282
+ if format == "GIF":
283
+ st.success("GIF Timelapse created successfully!")
284
+ st.image(path)
285
+ elif format == "MP4":
286
+ st.success("MP4 Timelapse created successfully!")
287
+ st.video(path)
288
+ elif format == "ZIP":
289
+ st.success("Individual images saved successfully!")
290
+ st.markdown(get_binary_file_downloader_html(path, f'Timelapse {format}'), unsafe_allow_html=True)
291
+ else:
292
+ st.error(f"{format} file was not created successfully.")
293
+ else:
294
+ logger.error("No images were retrieved")
295
+ st.error("Failed to create timelapse. No images were generated.")
296
+
297
+ if __name__ == "__main__":
298
+ app()
pages/lastime_v1.py ADDED
@@ -0,0 +1,280 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ from concurrent.futures import ThreadPoolExecutor, ProcessPoolExecutor
18
+ from functools import lru_cache
19
+ import logging
20
+ import numpy as np
21
+
22
+ # Configuration du logging
23
+ logging.basicConfig(level=logging.INFO)
24
+ logger = logging.getLogger(__name__)
25
+
26
+ st.set_page_config(layout="wide")
27
+
28
+ # Liste des dates disponibles
29
+ AVAILABLE_DATES = [
30
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
31
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
32
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
33
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
34
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
35
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
36
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
37
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
38
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
39
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
40
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
41
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
42
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
43
+ 20201231, 20211231
44
+ ]
45
+
46
+ @st.cache_data
47
+ def uploaded_file_to_gdf(data):
48
+ import tempfile
49
+ import os
50
+ import uuid
51
+
52
+ _, file_extension = os.path.splitext(data.name)
53
+ file_id = str(uuid.uuid4())
54
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
55
+
56
+ with open(file_path, "wb") as file:
57
+ file.write(data.getbuffer())
58
+
59
+ if file_path.lower().endswith(".kml"):
60
+ gdf = gpd.read_file(file_path, driver="KML")
61
+ else:
62
+ gdf = gpd.read_file(file_path)
63
+
64
+ return gdf
65
+
66
+ @lru_cache(maxsize=128)
67
+ def get_wms_url(bbox, width, height, time):
68
+ url = "https://wms.geo.admin.ch/"
69
+ params = {
70
+ "SERVICE": "WMS",
71
+ "REQUEST": "GetMap",
72
+ "VERSION": "1.3.0",
73
+ "LAYERS": "ch.swisstopo.zeitreihen",
74
+ "STYLES": "",
75
+ "CRS": "EPSG:2056",
76
+ "BBOX": ",".join(map(str, bbox)),
77
+ "WIDTH": str(width),
78
+ "HEIGHT": str(height),
79
+ "FORMAT": "image/png",
80
+ "TIME": str(time),
81
+ "TILED": "true"
82
+ }
83
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
84
+
85
+ def add_date_to_image(image, date):
86
+ draw = ImageDraw.Draw(image)
87
+ font = ImageFont.load_default()
88
+ text = str(date // 10000) # Extraire seulement l'année
89
+
90
+ bbox = draw.textbbox((0, 0), text, font=font)
91
+ textwidth = bbox[2] - bbox[0]
92
+ textheight = bbox[3] - bbox[1]
93
+
94
+ margin = 10
95
+ x = image.width - textwidth - margin
96
+ y = image.height - textheight - margin
97
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
98
+ draw.text((x, y), text, font=font, fill="white")
99
+ return image
100
+
101
+ async def fetch_image(session, url, date, semaphore):
102
+ async with semaphore:
103
+ try:
104
+ async with session.get(url) as response:
105
+ if response.status == 200:
106
+ data = await response.read()
107
+ img = Image.open(BytesIO(data))
108
+ return add_date_to_image(img, date)
109
+ except Exception as e:
110
+ logger.error(f"Error fetching image for date {date}: {str(e)}")
111
+ return None
112
+
113
+ async def download_images(bbox, width, height, available_years):
114
+ semaphore = asyncio.Semaphore(20) # Limité à 20 requêtes simultanées
115
+ async with aiohttp.ClientSession() as session:
116
+ tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
117
+ return await asyncio.gather(*tasks)
118
+
119
+ def process_image(args):
120
+ img, i, temp_dir, format_option = args
121
+ results = {}
122
+ if img:
123
+ if "GIF" in format_option or "MP4" in format_option:
124
+ img_array = np.array(img)
125
+ results['array'] = img_array
126
+ if "Individual Images (ZIP)" in format_option:
127
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
128
+ img.save(img_path)
129
+ results['path'] = img_path
130
+ return results
131
+
132
+ def process_images(images, format_option, speed, temp_dir):
133
+ results = {}
134
+
135
+ with ProcessPoolExecutor() as executor:
136
+ processed_images = list(executor.map(process_image, [(img, i, temp_dir, format_option) for i, img in enumerate(images) if img is not None]))
137
+
138
+ if "GIF" in format_option:
139
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
140
+ imageio.mimsave(gif_path, [img['array'] for img in processed_images if 'array' in img], fps=speed, loop=0)
141
+ results["GIF"] = gif_path
142
+
143
+ if "MP4" in format_option:
144
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
145
+ imageio.mimsave(mp4_path, [img['array'] for img in processed_images if 'array' in img], fps=speed, format='FFMPEG', quality=9)
146
+ results["MP4"] = mp4_path
147
+
148
+ if "Individual Images (ZIP)" in format_option:
149
+ zip_path = os.path.join(temp_dir, "images.zip")
150
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
151
+ for img in processed_images:
152
+ if 'path' in img:
153
+ zipf.write(img['path'], os.path.basename(img['path']))
154
+ os.unlink(img['path'])
155
+ results["ZIP"] = zip_path
156
+
157
+ return results
158
+
159
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
160
+ with open(bin_file, 'rb') as f:
161
+ data = f.read()
162
+ bin_str = base64.b64encode(data).decode()
163
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
164
+ return href
165
+
166
+ def app():
167
+ st.title("Swiss Historical Timelapse Generator")
168
+
169
+ st.markdown(
170
+ """
171
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
172
+ """
173
+ )
174
+
175
+ row1_col1, row1_col2 = st.columns([2, 1])
176
+
177
+ with row1_col1:
178
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
179
+ folium.TileLayer(
180
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
181
+ attr="© swisstopo",
182
+ name="swisstopo",
183
+ overlay=False,
184
+ control=True
185
+ ).add_to(m)
186
+
187
+ draw = plugins.Draw(export=True)
188
+ draw.add_to(m)
189
+
190
+ folium.LayerControl().add_to(m)
191
+
192
+ folium_static(m, height=400)
193
+
194
+ with row1_col2:
195
+ data = st.file_uploader(
196
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
197
+ type=["geojson", "kml", "zip"],
198
+ )
199
+
200
+ with st.form("submit_form"):
201
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
202
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
203
+
204
+ # Options prédéfinies pour la taille des images
205
+ size_options = {
206
+ "HD (720p)": (1280, 720),
207
+ "Full HD (1080p)": (1920, 1080),
208
+ "2K": (2560, 1440),
209
+ "4K": (3840, 2160),
210
+ "Custom": None
211
+ }
212
+
213
+ size_choice = st.selectbox("Choose image size:", list(size_options.keys()))
214
+
215
+ if size_choice == "Custom":
216
+ col1, col2 = st.columns(2)
217
+ with col1:
218
+ width = st.number_input("Width:", min_value=100, max_value=4000, value=800)
219
+ with col2:
220
+ height = st.number_input("Height:", min_value=100, max_value=4000, value=600)
221
+ else:
222
+ width, height = size_options[size_choice]
223
+
224
+ if width * height > 4000 * 4000:
225
+ st.warning("Warning: The image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
226
+
227
+ speed = st.slider("Frames per second:", 1, 30, 5)
228
+
229
+ format_option = st.multiselect("Choose output format(s):", ["GIF", "MP4", "Individual Images (ZIP)"], default=["GIF", "MP4", "Individual Images (ZIP)"])
230
+
231
+ submitted = st.form_submit_button("Generate Timelapse")
232
+
233
+ if submitted:
234
+ if data is None:
235
+ st.warning("Please upload a GeoJSON file.")
236
+ elif width * height > 4000 * 4000:
237
+ st.error("Image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
238
+ else:
239
+ gdf = uploaded_file_to_gdf(data)
240
+ gdf_2056 = gdf.to_crs(epsg=2056)
241
+ bbox = tuple(gdf_2056.total_bounds)
242
+
243
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
244
+
245
+ total_requests = len(available_years)
246
+
247
+ if total_requests > 500:
248
+ st.warning(f"You are requesting {total_requests} images. This exceeds the limit of 500 requests per second set by swisstopo. The process may take longer than expected.")
249
+
250
+ progress_bar = st.progress(0)
251
+
252
+ images = asyncio.run(download_images(bbox, width, height, available_years))
253
+
254
+ progress_bar.progress(100)
255
+
256
+ if images:
257
+ logger.info(f"Retrieved {len(images)} images successfully")
258
+ with tempfile.TemporaryDirectory() as temp_dir:
259
+ with st.spinner('Processing images... This may take a while for large images.'):
260
+ results = process_images(images, format_option, speed, temp_dir)
261
+
262
+ for format, path in results.items():
263
+ if os.path.exists(path):
264
+ if format == "GIF":
265
+ st.success("GIF Timelapse created successfully!")
266
+ st.image(path)
267
+ elif format == "MP4":
268
+ st.success("MP4 Timelapse created successfully!")
269
+ st.video(path)
270
+ elif format == "ZIP":
271
+ st.success("Individual images saved successfully!")
272
+ st.markdown(get_binary_file_downloader_html(path, f'Timelapse {format}'), unsafe_allow_html=True)
273
+ else:
274
+ st.error(f"{format} file was not created successfully.")
275
+ else:
276
+ logger.error("No images were retrieved")
277
+ st.error("Failed to create timelapse. No images were generated.")
278
+
279
+ if __name__ == "__main__":
280
+ app()
pages/lastimev2.py ADDED
@@ -0,0 +1,346 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ import logging
18
+
19
+ # Configuration du logging
20
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
21
+ logger = logging.getLogger(__name__)
22
+
23
+ # Configuration de la page Streamlit
24
+ st.set_page_config(layout="wide")
25
+
26
+ # Liste des dates disponibles
27
+ AVAILABLE_DATES = [
28
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
29
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
30
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
31
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
32
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
33
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
34
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
35
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
36
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
37
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
38
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
39
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
40
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
41
+ 20201231, 20211231
42
+ ]
43
+
44
+ @st.cache_data
45
+ def uploaded_file_to_gdf(data):
46
+ """
47
+ Convertit un fichier uploadé en GeoDataFrame.
48
+
49
+ Args:
50
+ data (UploadedFile): Le fichier uploadé par l'utilisateur.
51
+
52
+ Returns:
53
+ GeoDataFrame: Le GeoDataFrame créé à partir du fichier.
54
+ """
55
+ import tempfile
56
+ import os
57
+ import uuid
58
+
59
+ _, file_extension = os.path.splitext(data.name)
60
+ file_id = str(uuid.uuid4())
61
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
62
+
63
+ with open(file_path, "wb") as file:
64
+ file.write(data.getbuffer())
65
+
66
+ if file_path.lower().endswith(".kml"):
67
+ gdf = gpd.read_file(file_path, driver="KML")
68
+ else:
69
+ gdf = gpd.read_file(file_path)
70
+
71
+ return gdf
72
+
73
+ @st.cache_data
74
+ def get_wms_url(bbox, width, height, time):
75
+ """
76
+ Génère l'URL pour la requête WMS.
77
+
78
+ Args:
79
+ bbox (tuple): Les coordonnées de la bounding box.
80
+ width (int): La largeur de l'image.
81
+ height (int): La hauteur de l'image.
82
+ time (int): La date de la carte.
83
+
84
+ Returns:
85
+ str: L'URL complète pour la requête WMS.
86
+ """
87
+ url = "https://wms.geo.admin.ch/"
88
+ params = {
89
+ "SERVICE": "WMS",
90
+ "REQUEST": "GetMap",
91
+ "VERSION": "1.3.0",
92
+ "LAYERS": "ch.swisstopo.zeitreihen",
93
+ "STYLES": "default",
94
+ "CRS": "EPSG:2056",
95
+ "BBOX": ",".join(map(str, bbox)),
96
+ "WIDTH": str(width),
97
+ "HEIGHT": str(height),
98
+ "FORMAT": "image/png",
99
+ "TIME": str(time)
100
+ }
101
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
102
+
103
+ def add_date_to_image(image, date):
104
+ """
105
+ Ajoute la date à l'image.
106
+
107
+ Args:
108
+ image (PIL.Image): L'image à modifier.
109
+ date (int): La date à ajouter.
110
+
111
+ Returns:
112
+ PIL.Image: L'image avec la date ajoutée.
113
+ """
114
+ draw = ImageDraw.Draw(image)
115
+ font = ImageFont.load_default()
116
+ text = str(date // 10000) # Extraire seulement l'année
117
+
118
+ bbox = draw.textbbox((0, 0), text, font=font)
119
+ textwidth = bbox[2] - bbox[0]
120
+ textheight = bbox[3] - bbox[1]
121
+
122
+ margin = 10
123
+ x = image.width - textwidth - margin
124
+ y = image.height - textheight - margin
125
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
126
+ draw.text((x, y), text, font=font, fill="white")
127
+ return image
128
+
129
+ async def fetch_image(session, url, date, semaphore):
130
+ """
131
+ Télécharge une image de manière asynchrone.
132
+
133
+ Args:
134
+ session (aiohttp.ClientSession): La session HTTP.
135
+ url (str): L'URL de l'image.
136
+ date (int): La date de l'image.
137
+ semaphore (asyncio.Semaphore): Le sémaphore pour limiter les requêtes concurrentes.
138
+
139
+ Returns:
140
+ PIL.Image or None: L'image téléchargée ou None en cas d'erreur.
141
+ """
142
+ async with semaphore:
143
+ try:
144
+ async with session.get(url) as response:
145
+ if response.status == 200:
146
+ data = await response.read()
147
+ img = Image.open(BytesIO(data))
148
+ return add_date_to_image(img, date)
149
+ except Exception as e:
150
+ logger.error(f"Error fetching image for date {date}: {str(e)}")
151
+ return None
152
+
153
+ async def download_images(bbox, width, height, available_years, max_concurrent_requests):
154
+ """
155
+ Télécharge toutes les images de manière asynchrone.
156
+
157
+ Args:
158
+ bbox (tuple): Les coordonnées de la bounding box.
159
+ width (int): La largeur des images.
160
+ height (int): La hauteur des images.
161
+ available_years (list): Les années disponibles.
162
+ max_concurrent_requests (int): Le nombre maximum de requêtes simultanées.
163
+
164
+ Returns:
165
+ list: Liste des images téléchargées.
166
+ """
167
+ semaphore = asyncio.Semaphore(max_concurrent_requests)
168
+ async with aiohttp.ClientSession() as session:
169
+ tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
170
+ return await asyncio.gather(*tasks)
171
+
172
+ def create_timelapse(images, format_option, speed, temp_dir):
173
+ """
174
+ Crée le timelapse dans les formats sélectionnés.
175
+
176
+ Args:
177
+ images (list): Liste des images.
178
+ format_option (list): Options de format sélectionnées.
179
+ speed (int): Vitesse du timelapse (FPS).
180
+ temp_dir (str): Répertoire temporaire pour les fichiers.
181
+
182
+ Returns:
183
+ dict: Chemins des fichiers de timelapse créés.
184
+ """
185
+ results = {}
186
+
187
+ if "GIF" in format_option:
188
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
189
+ imageio.mimsave(gif_path, images, fps=speed)
190
+ results["GIF"] = gif_path
191
+
192
+ if "MP4" in format_option:
193
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
194
+ imageio.mimsave(mp4_path, images, fps=speed, format='FFMPEG', quality=8)
195
+ results["MP4"] = mp4_path
196
+
197
+ if "Individual Images (ZIP)" in format_option:
198
+ zip_path = os.path.join(temp_dir, "images.zip")
199
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
200
+ for i, img in enumerate(images):
201
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
202
+ img.save(img_path)
203
+ zipf.write(img_path, os.path.basename(img_path))
204
+ os.unlink(img_path)
205
+ results["ZIP"] = zip_path
206
+
207
+ return results
208
+
209
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
210
+ """
211
+ Génère un lien HTML pour le téléchargement d'un fichier binaire.
212
+
213
+ Args:
214
+ bin_file (str): Chemin du fichier binaire.
215
+ file_label (str): Étiquette pour le lien de téléchargement.
216
+
217
+ Returns:
218
+ str: Code HTML pour le lien de téléchargement.
219
+ """
220
+ with open(bin_file, 'rb') as f:
221
+ data = f.read()
222
+ bin_str = base64.b64encode(data).decode()
223
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Download {file_label}</a>'
224
+ return href
225
+
226
+ def app():
227
+ """
228
+ Fonction principale de l'application Streamlit.
229
+ """
230
+ st.title("Swiss Historical Timelapse Generator")
231
+
232
+ st.markdown(
233
+ """
234
+ An interactive web app for creating historical timelapses of Switzerland using WMS-Time.
235
+ """
236
+ )
237
+
238
+ row1_col1, row1_col2 = st.columns([2, 1])
239
+
240
+ with row1_col1:
241
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
242
+ folium.TileLayer(
243
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
244
+ attr="© swisstopo",
245
+ name="swisstopo",
246
+ overlay=False,
247
+ control=True
248
+ ).add_to(m)
249
+
250
+ draw = plugins.Draw(export=True)
251
+ draw.add_to(m)
252
+
253
+ folium.LayerControl().add_to(m)
254
+
255
+ folium_static(m, height=400)
256
+
257
+ with row1_col2:
258
+ data = st.file_uploader(
259
+ "Upload a GeoJSON file to use as an ROI. Customize timelapse parameters and then click the Submit button 😇👇",
260
+ type=["geojson", "kml", "zip"],
261
+ )
262
+
263
+ with st.form("submit_form"):
264
+ start_year = st.selectbox("Select start year:", [date // 10000 for date in AVAILABLE_DATES])
265
+ end_year = st.selectbox("Select end year:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
266
+
267
+ size_options = {
268
+ "HD (720p)": (1280, 720),
269
+ "Full HD (1080p)": (1920, 1080),
270
+ "2K": (2560, 1440),
271
+ "4K": (3840, 2160),
272
+ "Custom": None
273
+ }
274
+
275
+ size_choice = st.selectbox("Choose image size:", list(size_options.keys()))
276
+
277
+ if size_choice == "Custom":
278
+ col1, col2 = st.columns(2)
279
+ with col1:
280
+ width = st.number_input("Width:", min_value=100, max_value=4000, value=800)
281
+ with col2:
282
+ height = st.number_input("Height:", min_value=100, max_value=4000, value=600)
283
+ else:
284
+ width, height = size_options[size_choice]
285
+
286
+ if width * height > 4000 * 4000:
287
+ st.warning("Warning: The image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
288
+
289
+ speed = st.slider("Frames per second:", 1, 30, 5)
290
+
291
+ format_option = st.multiselect("Choose output format(s):", ["GIF", "MP4", "Individual Images (ZIP)"], default=["GIF", "MP4", "Individual Images (ZIP)"])
292
+
293
+ with st.expander("Advanced Options"):
294
+ max_concurrent_requests = st.slider("Max Concurrent Requests", 10, 50, 20)
295
+
296
+ submitted = st.form_submit_button("Generate Timelapse")
297
+
298
+ if submitted:
299
+ if data is None:
300
+ st.warning("Please upload a GeoJSON file.")
301
+ elif width * height > 4000 * 4000:
302
+ st.error("Image size exceeds the maximum allowed by swisstopo (4000x4000 pixels). Please reduce the width or height.")
303
+ else:
304
+ gdf = uploaded_file_to_gdf(data)
305
+ gdf_2056 = gdf.to_crs(epsg=2056)
306
+ bbox = tuple(gdf_2056.total_bounds)
307
+
308
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
309
+
310
+ total_requests = len(available_years)
311
+
312
+ if total_requests > 500:
313
+ st.warning(f"You are requesting {total_requests} images. This exceeds the limit of 500 requests per second set by swisstopo. The process may take longer than expected.")
314
+
315
+ progress_bar = st.progress(0)
316
+
317
+ with st.spinner('Downloading images...'):
318
+ images = asyncio.run(download_images(bbox, width, height, available_years, max_concurrent_requests))
319
+ progress_bar.progress(50)
320
+
321
+ if images:
322
+ logger.info(f"Retrieved {len(images)} images successfully")
323
+ with tempfile.TemporaryDirectory() as temp_dir:
324
+ with st.spinner('Processing images...'):
325
+ results = create_timelapse(images, format_option, speed, temp_dir)
326
+ progress_bar.progress(100)
327
+
328
+ for format, path in results.items():
329
+ if os.path.exists(path):
330
+ if format == "GIF":
331
+ st.success("GIF Timelapse created successfully!")
332
+ st.image(path)
333
+ elif format == "MP4":
334
+ st.success("MP4 Timelapse created successfully!")
335
+ st.video(path)
336
+ elif format == "ZIP":
337
+ st.success("Individual images saved successfully!")
338
+ st.markdown(get_binary_file_downloader_html(path, f'Timelapse {format}'), unsafe_allow_html=True)
339
+ else:
340
+ st.error(f"{format} file was not created successfully.")
341
+ else:
342
+ logger.error("No images were retrieved")
343
+ st.error("Failed to create timelapse. No images were generated.")
344
+
345
+ if __name__ == "__main__":
346
+ app()
pages/lastimev3.py ADDED
@@ -0,0 +1,352 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import geopandas as gpd
3
+ import folium
4
+ from folium import plugins
5
+ import requests
6
+ from PIL import Image, ImageDraw, ImageFont
7
+ from io import BytesIO
8
+ import imageio
9
+ import tempfile
10
+ import os
11
+ import zipfile
12
+ from datetime import datetime
13
+ from streamlit_folium import folium_static
14
+ import base64
15
+ import asyncio
16
+ import aiohttp
17
+ from concurrent.futures import ThreadPoolExecutor
18
+ from functools import lru_cache
19
+ import logging
20
+ import numpy as np
21
+
22
+ # Configuration du logging
23
+ logging.basicConfig(level=logging.INFO)
24
+ logger = logging.getLogger(__name__)
25
+
26
+ # Configuration de la page Streamlit
27
+ st.set_page_config(layout="wide")
28
+
29
+ # Liste des dates disponibles
30
+ AVAILABLE_DATES = [
31
+ 18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231, 18971231, 18981231, 18991231,
32
+ 19001231, 19011231, 19021231, 19031231, 19041231, 19051231, 19061231, 19071231, 19081231, 19091231,
33
+ 19101231, 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231, 19181231, 19191231,
34
+ 19201231, 19211231, 19221231, 19231231, 19241231, 19251231, 19261231, 19271231, 19281231, 19291231,
35
+ 19301231, 19311231, 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231, 19391231,
36
+ 19401231, 19411231, 19421231, 19431231, 19441231, 19451231, 19461231, 19471231, 19481231, 19491231,
37
+ 19501231, 19511231, 19521231, 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
38
+ 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231, 19671231, 19681231, 19691231,
39
+ 19701231, 19711231, 19721231, 19731231, 19741231, 19751231, 19761231, 19771231, 19781231, 19791231,
40
+ 19801231, 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231, 19881231, 19891231,
41
+ 19901231, 19911231, 19921231, 19931231, 19941231, 19951231, 19961231, 19971231, 19981231, 19991231,
42
+ 20001231, 20011231, 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231, 20091231,
43
+ 20101231, 20111231, 20121231, 20131231, 20141231, 20151231, 20161231, 20171231, 20181231, 20191231,
44
+ 20201231, 20211231
45
+ ]
46
+
47
+ @st.cache_data
48
+ def uploaded_file_to_gdf(data):
49
+ """
50
+ Convertit le fichier uploadé en GeoDataFrame.
51
+
52
+ Args:
53
+ data: Le fichier uploadé par l'utilisateur.
54
+
55
+ Returns:
56
+ GeoDataFrame: Le GeoDataFrame créé à partir du fichier uploadé.
57
+ """
58
+ import tempfile
59
+ import os
60
+ import uuid
61
+
62
+ _, file_extension = os.path.splitext(data.name)
63
+ file_id = str(uuid.uuid4())
64
+ file_path = os.path.join(tempfile.gettempdir(), f"{file_id}{file_extension}")
65
+
66
+ with open(file_path, "wb") as file:
67
+ file.write(data.getbuffer())
68
+
69
+ if file_path.lower().endswith(".kml"):
70
+ gdf = gpd.read_file(file_path, driver="KML")
71
+ else:
72
+ gdf = gpd.read_file(file_path)
73
+
74
+ return gdf
75
+
76
+ @lru_cache(maxsize=128)
77
+ def get_wms_url(bbox, width, height, time):
78
+ """
79
+ Génère l'URL pour la requête WMS.
80
+
81
+ Args:
82
+ bbox: La bounding box de la zone d'intérêt.
83
+ width: La largeur de l'image.
84
+ height: La hauteur de l'image.
85
+ time: La date pour laquelle récupérer l'image.
86
+
87
+ Returns:
88
+ str: L'URL complète pour la requête WMS.
89
+ """
90
+ url = "https://wms.geo.admin.ch/"
91
+ params = {
92
+ "SERVICE": "WMS",
93
+ "REQUEST": "GetMap",
94
+ "VERSION": "1.3.0",
95
+ "LAYERS": "ch.swisstopo.zeitreihen",
96
+ "STYLES": "",
97
+ "CRS": "EPSG:2056",
98
+ "BBOX": ",".join(map(str, bbox)),
99
+ "WIDTH": str(width),
100
+ "HEIGHT": str(height),
101
+ "FORMAT": "image/png",
102
+ "TIME": str(time),
103
+ "TILED": "true"
104
+ }
105
+ return url + "?" + "&".join(f"{k}={v}" for k, v in params.items())
106
+
107
+ def add_date_to_image(image, date):
108
+ """
109
+ Ajoute la date à l'image.
110
+
111
+ Args:
112
+ image: L'image à modifier.
113
+ date: La date à ajouter à l'image.
114
+
115
+ Returns:
116
+ Image: L'image modifiée avec la date ajoutée.
117
+ """
118
+ draw = ImageDraw.Draw(image)
119
+ font = ImageFont.load_default()
120
+ text = str(date // 10000) # Extraire seulement l'année
121
+
122
+ bbox = draw.textbbox((0, 0), text, font=font)
123
+ textwidth = bbox[2] - bbox[0]
124
+ textheight = bbox[3] - bbox[1]
125
+
126
+ margin = 10
127
+ x = image.width - textwidth - margin
128
+ y = image.height - textheight - margin
129
+ draw.rectangle((x-5, y-5, x+textwidth+5, y+textheight+5), fill="black")
130
+ draw.text((x, y), text, font=font, fill="white")
131
+ return image
132
+
133
+ async def fetch_image(session, url, date, semaphore):
134
+ """
135
+ Récupère une image de manière asynchrone.
136
+
137
+ Args:
138
+ session: La session aiohttp.
139
+ url: L'URL de l'image à récupérer.
140
+ date: La date correspondant à l'image.
141
+ semaphore: Le sémaphore pour limiter les requêtes simultanées.
142
+
143
+ Returns:
144
+ Image: L'image récupérée et modifiée, ou None en cas d'erreur.
145
+ """
146
+ async with semaphore:
147
+ try:
148
+ async with session.get(url) as response:
149
+ if response.status == 200:
150
+ data = await response.read()
151
+ img = Image.open(BytesIO(data))
152
+ return add_date_to_image(img, date)
153
+ except Exception as e:
154
+ logger.error(f"Erreur lors de la récupération de l'image pour la date {date}: {str(e)}")
155
+ return None
156
+
157
+ async def download_images(bbox, width, height, available_years):
158
+ """
159
+ Télécharge les images de manière asynchrone.
160
+
161
+ Args:
162
+ bbox: La bounding box de la zone d'intérêt.
163
+ width: La largeur des images.
164
+ height: La hauteur des images.
165
+ available_years: Les années disponibles pour le téléchargement.
166
+
167
+ Returns:
168
+ list: Une liste des images téléchargées.
169
+ """
170
+ semaphore = asyncio.Semaphore(20) # Limité à 20 requêtes simultanées
171
+ async with aiohttp.ClientSession() as session:
172
+ tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
173
+ return await asyncio.gather(*tasks)
174
+
175
+ def process_images_stream(images, format_option, speed, temp_dir):
176
+ """
177
+ Traite les images en utilisant une approche par flux pour optimiser l'utilisation de la mémoire.
178
+
179
+ Args:
180
+ images: La liste des images à traiter.
181
+ format_option: Les options de format choisies par l'utilisateur.
182
+ speed: La vitesse de lecture des images (en FPS).
183
+ temp_dir: Le répertoire temporaire pour stocker les fichiers générés.
184
+
185
+ Returns:
186
+ dict: Un dictionnaire contenant les chemins des fichiers générés.
187
+ """
188
+ results = {}
189
+
190
+ if "GIF" in format_option:
191
+ gif_path = os.path.join(temp_dir, "timelapse.gif")
192
+ with imageio.get_writer(gif_path, mode='I', fps=speed, loop=0) as writer:
193
+ for img in images:
194
+ if img is not None:
195
+ writer.append_data(np.array(img))
196
+ results["GIF"] = gif_path
197
+
198
+ if "MP4" in format_option:
199
+ mp4_path = os.path.join(temp_dir, "timelapse.mp4")
200
+ with imageio.get_writer(mp4_path, fps=speed, quality=9) as writer:
201
+ for img in images:
202
+ if img is not None:
203
+ writer.append_data(np.array(img))
204
+ results["MP4"] = mp4_path
205
+
206
+ if "Individual Images (ZIP)" in format_option:
207
+ zip_path = os.path.join(temp_dir, "images.zip")
208
+ with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
209
+ for i, img in enumerate(images):
210
+ if img is not None:
211
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
212
+ img.save(img_path)
213
+ zipf.write(img_path, os.path.basename(img_path))
214
+ os.unlink(img_path)
215
+ results["ZIP"] = zip_path
216
+
217
+ return results
218
+
219
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
220
+ """
221
+ Génère le HTML pour le téléchargement d'un fichier binaire.
222
+
223
+ Args:
224
+ bin_file: Le chemin du fichier binaire.
225
+ file_label: Le label à afficher pour le lien de téléchargement.
226
+
227
+ Returns:
228
+ str: Le HTML pour le lien de téléchargement.
229
+ """
230
+ with open(bin_file, 'rb') as f:
231
+ data = f.read()
232
+ bin_str = base64.b64encode(data).decode()
233
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">Télécharger {file_label}</a>'
234
+ return href
235
+
236
+ def app():
237
+ """
238
+ Fonction principale de l'application Streamlit.
239
+ """
240
+ st.title("Générateur de Timelapse Historique Suisse")
241
+
242
+ st.markdown(
243
+ """
244
+ Une application web interactive pour créer des timelapses historiques de la Suisse en utilisant WMS-Time.
245
+ """
246
+ )
247
+
248
+ row1_col1, row1_col2 = st.columns([2, 1])
249
+
250
+ with row1_col1:
251
+ m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
252
+ folium.TileLayer(
253
+ tiles="https://wmts.geo.admin.ch/1.0.0/ch.swisstopo.pixelkarte-farbe/default/current/3857/{z}/{x}/{y}.jpeg",
254
+ attr="© swisstopo",
255
+ name="swisstopo",
256
+ overlay=False,
257
+ control=True
258
+ ).add_to(m)
259
+
260
+ draw = plugins.Draw(export=True)
261
+ draw.add_to(m)
262
+
263
+ folium.LayerControl().add_to(m)
264
+
265
+ folium_static(m, height=400)
266
+
267
+ with row1_col2:
268
+ data = st.file_uploader(
269
+ "Téléchargez un fichier GeoJSON à utiliser comme ROI. Personnalisez les paramètres du timelapse puis cliquez sur le bouton Soumettre 😇👇",
270
+ type=["geojson", "kml", "zip"],
271
+ )
272
+
273
+ with st.form("submit_form"):
274
+ start_year = st.selectbox("Sélectionnez l'année de début:", [date // 10000 for date in AVAILABLE_DATES])
275
+ end_year = st.selectbox("Sélectionnez l'année de fin:", [date // 10000 for date in AVAILABLE_DATES], index=len(AVAILABLE_DATES)-1)
276
+
277
+ size_options = {
278
+ "HD (720p)": (1280, 720),
279
+ "Full HD (1080p)": (1920, 1080),
280
+ "2K": (2560, 1440),
281
+ "4K": (3840, 2160),
282
+ "Personnalisé": None
283
+ }
284
+
285
+ size_choice = st.selectbox("Choisissez la taille de l'image:", list(size_options.keys()))
286
+
287
+ if size_choice == "Personnalisé":
288
+ col1, col2 = st.columns(2)
289
+ with col1:
290
+ width = st.number_input("Largeur:", min_value=100, max_value=4000, value=800)
291
+ with col2:
292
+ height = st.number_input("Hauteur:", min_value=100, max_value=4000, value=600)
293
+ else:
294
+ width, height = size_options[size_choice]
295
+
296
+ if width * height > 4000 * 4000:
297
+ st.warning("Attention: La taille de l'image dépasse le maximum autorisé par swisstopo (4000x4000 pixels). Veuillez réduire la largeur ou la hauteur.")
298
+
299
+ speed = st.slider("Images par seconde:", 1, 30, 5)
300
+
301
+ format_option = st.multiselect("Choisissez le(s) format(s) de sortie:", ["GIF", "MP4", "Images individuelles (ZIP)"], default=["GIF", "MP4", "Images individuelles (ZIP)"])
302
+
303
+ submitted = st.form_submit_button("Générer le Timelapse")
304
+
305
+ if submitted:
306
+ if data is None:
307
+ st.warning("Veuillez télécharger un fichier GeoJSON.")
308
+ elif width * height > 4000 * 4000:
309
+ st.error("La taille de l'image dépasse le maximum autorisé par swisstopo (4000x4000 pixels). Veuillez réduire la largeur ou la hauteur.")
310
+ else:
311
+ gdf = uploaded_file_to_gdf(data)
312
+ gdf_2056 = gdf.to_crs(epsg=2056)
313
+ bbox = tuple(gdf_2056.total_bounds)
314
+
315
+ available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
316
+
317
+ total_requests = len(available_years)
318
+
319
+ if total_requests > 500:
320
+ st.warning(f"Vous demandez {total_requests} images. Cela dépasse la limite de 500 requêtes par seconde fixée par swisstopo. Le processus peut prendre plus de temps que prévu.")
321
+
322
+ progress_bar = st.progress(0)
323
+
324
+ images = asyncio.run(download_images(bbox, width, height, available_years))
325
+
326
+ progress_bar.progress(100)
327
+
328
+ if images:
329
+ logger.info(f"Récupération réussie de {len(images)} images")
330
+ with tempfile.TemporaryDirectory() as temp_dir:
331
+ with st.spinner('Traitement des images en cours... Cela peut prendre un certain temps pour les grandes images.'):
332
+ results = process_images_stream(images, format_option, speed, temp_dir)
333
+
334
+ for format, path in results.items():
335
+ if os.path.exists(path):
336
+ if format == "GIF":
337
+ st.success("Timelapse GIF créé avec succès!")
338
+ st.image(path)
339
+ elif format == "MP4":
340
+ st.success("Timelapse MP4 créé avec succès!")
341
+ st.video(path)
342
+ elif format == "ZIP":
343
+ st.success("Images individuelles sauvegardées avec succès!")
344
+ st.markdown(get_binary_file_downloader_html(path, f'Timelapse {format}'), unsafe_allow_html=True)
345
+ else:
346
+ st.error(f"Le fichier {format} n'a pas été créé avec succès.")
347
+ else:
348
+ logger.error("Aucune image n'a été récupérée")
349
+ st.error("Échec de la création du timelapse. Aucune image n'a été générée.")
350
+
351
+ if __name__ == "__main__":
352
+ app ()
pages/photo2timelapse.py ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import cv2
3
+ import numpy as np
4
+ import tempfile
5
+ import os
6
+ from PIL import Image
7
+ import io
8
+ import base64
9
+ from concurrent.futures import ThreadPoolExecutor, as_completed
10
+ from moviepy.editor import ImageSequenceClip
11
+ import logging
12
+ from tqdm import tqdm
13
+
14
+ # Configuration du logging
15
+ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
16
+
17
+ # Fonction pour compresser une image avec différentes méthodes
18
+ def compress_image(image_path, quality=85, method='pillow'):
19
+ if method == 'pillow':
20
+ with Image.open(image_path) as img:
21
+ img_byte_arr = io.BytesIO()
22
+ img.save(img_byte_arr, format='JPEG', quality=quality, optimize=True)
23
+ return Image.open(img_byte_arr)
24
+ elif method == 'cv2':
25
+ img = cv2.imread(image_path)
26
+ encode_param = [int(cv2.IMWRITE_JPEG_QUALITY), quality]
27
+ _, encimg = cv2.imencode('.jpg', img, encode_param)
28
+ return cv2.imdecode(encimg, 1)
29
+
30
+ # Fonction pour créer un GIF optimisé
31
+ def create_optimized_gif(image_paths, output_path, fps=10, max_size_mb=8):
32
+ frames = []
33
+ for img_path in tqdm(image_paths, desc="Traitement des images pour GIF"):
34
+ with Image.open(img_path) as img:
35
+ frames.append(img.copy())
36
+
37
+ durations = [1000//fps] * len(frames)
38
+
39
+ for quality in range(100, 0, -10):
40
+ with io.BytesIO() as buffer:
41
+ frames[0].save(buffer, format="GIF", save_all=True, append_images=frames[1:],
42
+ optimize=True, duration=durations, loop=0, quality=quality)
43
+ if buffer.tell() <= max_size_mb * 1024 * 1024:
44
+ with open(output_path, "wb") as f:
45
+ f.write(buffer.getvalue())
46
+ logging.info(f"GIF créé avec succès. Qualité: {quality}")
47
+ return
48
+
49
+ logging.warning("Impossible de créer un GIF sous la taille maximale spécifiée.")
50
+
51
+ # Fonction pour créer une vidéo MP4 optimisée
52
+ def create_optimized_video(image_paths, output_path, fps=30, quality='high'):
53
+ clip = ImageSequenceClip(image_paths, fps=fps)
54
+
55
+ if quality == 'high':
56
+ clip.write_videofile(output_path, codec='libx264', audio=False, fps=fps, bitrate="8000k")
57
+ elif quality == 'medium':
58
+ clip.write_videofile(output_path, codec='libx264', audio=False, fps=fps, bitrate="4000k")
59
+ else:
60
+ clip.write_videofile(output_path, codec='libx264', audio=False, fps=fps, bitrate="2000k")
61
+
62
+ logging.info(f"Vidéo MP4 créée avec succès. Qualité: {quality}")
63
+
64
+ # Fonction pour traiter une image en parallèle
65
+ def process_image(args):
66
+ img_path, temp_dir, compress = args
67
+ if compress:
68
+ img = compress_image(img_path, quality=85, method='pillow')
69
+ output_path = os.path.join(temp_dir, os.path.basename(img_path))
70
+ img.save(output_path, "JPEG", quality=85)
71
+ else:
72
+ output_path = img_path
73
+ return output_path
74
+
75
+ # Fonction pour générer un lien de téléchargement
76
+ def get_binary_file_downloader_html(bin_file, file_label='File'):
77
+ with open(bin_file, 'rb') as f:
78
+ data = f.read()
79
+ bin_str = base64.b64encode(data).decode()
80
+ href = f'<a href="data:application/octet-stream;base64,{bin_str}" download="{os.path.basename(bin_file)}">{file_label}</a>'
81
+ return href
82
+
83
+ # Application Streamlit principale
84
+ def main():
85
+ st.title("Générateur de Timelapse Suprême")
86
+
87
+ uploaded_files = st.file_uploader("Uploadez vos images", type=["png", "jpg", "jpeg"], accept_multiple_files=True)
88
+
89
+ if uploaded_files:
90
+ output_format = st.radio("Choisissez le format de sortie", ["GIF optimisé", "Vidéo MP4"])
91
+
92
+ if output_format == "Vidéo MP4":
93
+ quality = st.select_slider("Qualité vidéo", options=['low', 'medium', 'high'], value='medium')
94
+
95
+ fps = st.slider("Images par seconde", min_value=1, max_value=60, value=30)
96
+ compress_images = st.checkbox("Compresser les images avant traitement", value=True)
97
+
98
+ if st.button("Générer Timelapse"):
99
+ with st.spinner("Génération du timelapse en cours..."):
100
+ with tempfile.TemporaryDirectory() as temp_dir:
101
+ # Traitement parallèle des images
102
+ with ThreadPoolExecutor() as executor:
103
+ futures = [executor.submit(process_image, (file, temp_dir, compress_images)) for file in uploaded_files]
104
+ processed_images = [future.result() for future in as_completed(futures)]
105
+
106
+ if output_format == "GIF optimisé":
107
+ output_path = os.path.join(temp_dir, "optimized_timelapse.gif")
108
+ create_optimized_gif(processed_images, output_path, fps=fps)
109
+ st.success("GIF optimisé généré!")
110
+ st.image(output_path)
111
+ st.markdown(get_binary_file_downloader_html(output_path, 'Télécharger le GIF'), unsafe_allow_html=True)
112
+
113
+ elif output_format == "Vidéo MP4":
114
+ output_path = os.path.join(temp_dir, "timelapse.mp4")
115
+ create_optimized_video(processed_images, output_path, fps=fps, quality=quality)
116
+ st.success("Vidéo MP4 générée!")
117
+ st.video(output_path)
118
+ st.markdown(get_binary_file_downloader_html(output_path, 'Télécharger la vidéo MP4'), unsafe_allow_html=True)
119
+
120
+ if __name__ == "__main__":
121
+ main()
requirements.txt ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dépendances principales
2
+ streamlit
3
+ folium
4
+ streamlit-folium
5
+
6
+ # Traitement géospatial
7
+ GDAL
8
+ geopandas
9
+ fiona
10
+ pyproj
11
+ rasterio
12
+
13
+ # Visualisation et cartographie
14
+ leafmap>=0.35.2
15
+ plotly
16
+ palettable
17
+ # cartopy # Commenté car peut causer des problèmes d'installation
18
+
19
+ # Traitement d'images et vidéo
20
+ Pillow
21
+ imageio
22
+ imageio[ffmpeg]
23
+ opencv-python-headless
24
+ moviepy
25
+ ffmpeg-python
26
+
27
+ # Autres utilitaires
28
+ numpy
29
+ owslib
30
+ trimesh
31
+
32
+ # Dépendances GitHub
33
+ git+https://github.com/giswqs/geemap
34
+
35
+ # Dépendances spécifiques (décommentez si nécessaire)
36
+ # ipywidgets<8.0.5
37
+ # jupyter-server-proxy
38
+ # keplergl
39
+ # localtileserver
40
+ # nbserverproxy
41
+ # streamlit-bokeh-events
42
+ # streamlit-keplergl
43
+ # tropycal
44
+ # altair<5
45
+
46
+ --find-links=https://girder.github.io/large_image_wheels GDAL