Vertdure commited on
Commit
d6319a8
·
verified ·
1 Parent(s): d403f81

Update pages/v4.py

Browse files
Files changed (1) hide show
  1. pages/v4.py +33 -42
pages/v4.py CHANGED
@@ -48,24 +48,8 @@ def upload_to_s3(file_path, bucket_name, object_name):
48
  # Liste des dates disponibles
49
  AVAILABLE_DATES = [
50
  18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231,
51
- 18971231, 18981231, 18991231, 19001231, 19011231, 19021231, 19031231,
52
- 19041231, 19051231, 19061231, 19071231, 19081231, 19091231, 19101231,
53
- 19111231, 19121231, 19131231, 19141231, 19151231, 19161231, 19171231,
54
- 19181231, 19191231, 19201231, 19211231, 19221231, 19231231, 19241231,
55
- 19251231, 19261231, 19271231, 19281231, 19291231, 19301231, 19311231,
56
- 19321231, 19331231, 19341231, 19351231, 19361231, 19371231, 19381231,
57
- 19391231, 19401231, 19411231, 19421231, 19431231, 19441231, 19451231,
58
- 19461231, 19471231, 19481231, 19491231, 19501231, 19511231, 19521231,
59
- 19531231, 19541231, 19551231, 19561231, 19571231, 19581231, 19591231,
60
- 19601231, 19611231, 19621231, 19631231, 19641231, 19651231, 19661231,
61
- 19671231, 19681231, 19691231, 19701231, 19711231, 19721231, 19731231,
62
- 19741231, 19751231, 19761231, 19771231, 19781231, 19791231, 19801231,
63
- 19811231, 19821231, 19831231, 19841231, 19851231, 19861231, 19871231,
64
- 19881231, 19891231, 19901231, 19911231, 19921231, 19931231, 19941231,
65
- 19951231, 19961231, 19971231, 19981231, 19991231, 20001231, 20011231,
66
- 20021231, 20031231, 20041231, 20051231, 20061231, 20071231, 20081231,
67
- 20091231, 20101231, 20111231, 20121231, 20131231, 20141231, 20151231,
68
- 20161231, 20171231, 20181231, 20191231, 20201231, 20211231
69
  ]
70
 
71
  @st.cache_data
@@ -110,45 +94,49 @@ async def fetch_image(session, url, date, semaphore):
110
  if response.status == 200:
111
  data = await response.read()
112
  img = Image.open(BytesIO(data))
 
113
  return add_date_to_image(img, date)
 
 
114
  except Exception as e:
115
- logger.error(f"Erreur lors de la récupération de l'image pour la date {date}: {str(e)}")
116
  return None
117
 
118
  async def download_images(bbox, width, height, available_years):
119
  semaphore = asyncio.Semaphore(20)
120
  async with aiohttp.ClientSession() as session:
121
  tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
122
- return await asyncio.gather(*tasks)
 
 
 
 
123
 
124
  def process_images_stream(images, format_option, speed, temp_dir):
125
  results = {}
126
 
127
- if "GIF" in format_option:
128
  gif_path = os.path.join(temp_dir, "timelapse.gif")
129
  with imageio.get_writer(gif_path, mode='I', fps=speed, loop=0) as writer:
130
  for img in images:
131
- if img is not None:
132
- writer.append_data(np.array(img))
133
  results["GIF"] = gif_path
134
 
135
- if "MP4" in format_option:
136
  mp4_path = os.path.join(temp_dir, "timelapse.mp4")
137
  with imageio.get_writer(mp4_path, fps=speed, quality=9) as writer:
138
  for img in images:
139
- if img is not None:
140
- writer.append_data(np.array(img))
141
  results["MP4"] = mp4_path
142
 
143
- if "Images individuelles (ZIP)" in format_option:
144
  zip_path = os.path.join(temp_dir, "images.zip")
145
  with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
146
  for i, img in enumerate(images):
147
- if img is not None:
148
- img_path = os.path.join(temp_dir, f"image_{i}.png")
149
- img.save(img_path)
150
- zipf.write(img_path, os.path.basename(img_path))
151
- os.remove(img_path)
152
  results["ZIP"] = zip_path
153
 
154
  return results
@@ -203,16 +191,19 @@ def app():
203
  available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
204
  images = asyncio.run(download_images(bbox, width, height, available_years))
205
 
206
- with tempfile.TemporaryDirectory() as temp_dir:
207
- results = process_images_stream(images, format_option, speed, temp_dir)
208
-
209
- # Téléchargement des fichiers sur S3 et génération des liens
210
- for format, path in results.items():
211
- s3_url = upload_to_s3(path, "timelapse-storage-vertgis", os.path.basename(path))
212
- if s3_url:
213
- st.markdown(f"[Télécharger le timelapse {format}]({s3_url})", unsafe_allow_html=True)
214
- else:
215
- st.error(f"Le fichier {format} n'a pas pu être téléchargé sur S3.")
 
 
 
216
 
217
  if __name__ == "__main__":
218
  app()
 
48
  # Liste des dates disponibles
49
  AVAILABLE_DATES = [
50
  18641231, 18701231, 18801231, 18901231, 18941231, 18951231, 18961231,
51
+ # ... (ajoutez toutes les autres dates ici)
52
+ 20201231, 20211231
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  ]
54
 
55
  @st.cache_data
 
94
  if response.status == 200:
95
  data = await response.read()
96
  img = Image.open(BytesIO(data))
97
+ logger.info(f"Image pour {date} téléchargée avec succès")
98
  return add_date_to_image(img, date)
99
+ else:
100
+ logger.error(f"Échec de la récupération de l'image pour {date} : {response.status}")
101
  except Exception as e:
102
+ logger.error(f"Erreur lors de la récupération de l'image pour {date} : {str(e)}")
103
  return None
104
 
105
  async def download_images(bbox, width, height, available_years):
106
  semaphore = asyncio.Semaphore(20)
107
  async with aiohttp.ClientSession() as session:
108
  tasks = [fetch_image(session, get_wms_url(bbox, width, height, date), date, semaphore) for date in available_years]
109
+ images = await asyncio.gather(*tasks)
110
+ # Filtrer les images qui ne sont pas None
111
+ images = [img for img in images if img is not None]
112
+ logger.info(f"{len(images)} images téléchargées avec succès")
113
+ return images
114
 
115
  def process_images_stream(images, format_option, speed, temp_dir):
116
  results = {}
117
 
118
+ if "GIF" in format_option and images:
119
  gif_path = os.path.join(temp_dir, "timelapse.gif")
120
  with imageio.get_writer(gif_path, mode='I', fps=speed, loop=0) as writer:
121
  for img in images:
122
+ writer.append_data(np.array(img))
 
123
  results["GIF"] = gif_path
124
 
125
+ if "MP4" in format_option and images:
126
  mp4_path = os.path.join(temp_dir, "timelapse.mp4")
127
  with imageio.get_writer(mp4_path, fps=speed, quality=9) as writer:
128
  for img in images:
129
+ writer.append_data(np.array(img))
 
130
  results["MP4"] = mp4_path
131
 
132
+ if "Images individuelles (ZIP)" in format_option and images:
133
  zip_path = os.path.join(temp_dir, "images.zip")
134
  with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as zipf:
135
  for i, img in enumerate(images):
136
+ img_path = os.path.join(temp_dir, f"image_{i}.png")
137
+ img.save(img_path)
138
+ zipf.write(img_path, os.path.basename(img_path))
139
+ os.remove(img_path)
 
140
  results["ZIP"] = zip_path
141
 
142
  return results
 
191
  available_years = [date for date in AVAILABLE_DATES if start_year <= date // 10000 <= end_year]
192
  images = asyncio.run(download_images(bbox, width, height, available_years))
193
 
194
+ if not images:
195
+ st.error("Aucune image n'a été téléchargée. Veuillez vérifier votre sélection ou réessayer.")
196
+ else:
197
+ with tempfile.TemporaryDirectory() as temp_dir:
198
+ results = process_images_stream(images, format_option, speed, temp_dir)
199
+
200
+ # Téléchargement des fichiers sur S3 et génération des liens
201
+ for format, path in results.items():
202
+ s3_url = upload_to_s3(path, "timelapse-storage-vertgis", os.path.basename(path))
203
+ if s3_url:
204
+ st.markdown(f"[Télécharger le timelapse {format}]({s3_url})", unsafe_allow_html=True)
205
+ else:
206
+ st.error(f"Le fichier {format} n'a pas pu être téléchargé sur S3.")
207
 
208
  if __name__ == "__main__":
209
  app()