Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,3 +1,4 @@
|
|
|
|
|
| 1 |
from flask import Flask, render_template, request, jsonify, Response
|
| 2 |
import requests
|
| 3 |
from bs4 import BeautifulSoup
|
|
@@ -11,6 +12,10 @@ app.config['CACHE_TYPE'] = 'SimpleCache'
|
|
| 11 |
app.config['CACHE_DEFAULT_TIMEOUT'] = 300 # Cache responses for 5 minutes
|
| 12 |
cache = Cache(app)
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
# Internal mapping of crops to pests (for the form)
|
| 15 |
CROP_TO_PESTS = {
|
| 16 |
"Sorgum": ["FallArmyWorm"],
|
|
@@ -103,8 +108,8 @@ def index():
|
|
| 103 |
# Build the external image URL (using HTTP)
|
| 104 |
base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
|
| 105 |
external_image_url = f"{base_url}{param}{week}.jpg"
|
| 106 |
-
# Build our proxy URL
|
| 107 |
-
image_url = f"/
|
| 108 |
|
| 109 |
return render_template('index.html',
|
| 110 |
crops=list(CROP_TO_PESTS.keys()),
|
|
@@ -118,7 +123,6 @@ def index():
|
|
| 118 |
selected_param=param,
|
| 119 |
image_url=image_url)
|
| 120 |
|
| 121 |
-
# Cache this route based on its query string.
|
| 122 |
@app.route('/fetch_weeks')
|
| 123 |
@cache.cached(timeout=300, query_string=True)
|
| 124 |
def fetch_weeks():
|
|
@@ -149,19 +153,48 @@ def fetch_weeks():
|
|
| 149 |
weeks = [str(i) for i in range(1, 53)]
|
| 150 |
return jsonify({"weeks": weeks})
|
| 151 |
|
| 152 |
-
#
|
| 153 |
-
@app.route('/
|
| 154 |
-
|
| 155 |
-
|
|
|
|
| 156 |
external_url = request.args.get('url')
|
| 157 |
-
if
|
| 158 |
-
return
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 165 |
|
| 166 |
if __name__ == '__main__':
|
| 167 |
app.run(debug=True)
|
|
|
|
| 1 |
+
import threading, time
|
| 2 |
from flask import Flask, render_template, request, jsonify, Response
|
| 3 |
import requests
|
| 4 |
from bs4 import BeautifulSoup
|
|
|
|
| 12 |
app.config['CACHE_DEFAULT_TIMEOUT'] = 300 # Cache responses for 5 minutes
|
| 13 |
cache = Cache(app)
|
| 14 |
|
| 15 |
+
# Global variables for pre-fetched image data
|
| 16 |
+
cached_image = None
|
| 17 |
+
cached_mimetype = None
|
| 18 |
+
|
| 19 |
# Internal mapping of crops to pests (for the form)
|
| 20 |
CROP_TO_PESTS = {
|
| 21 |
"Sorgum": ["FallArmyWorm"],
|
|
|
|
| 108 |
# Build the external image URL (using HTTP)
|
| 109 |
base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
|
| 110 |
external_image_url = f"{base_url}{param}{week}.jpg"
|
| 111 |
+
# Build our proxy URL so that the image is served via our app
|
| 112 |
+
image_url = f"/fast-proxy?url={external_image_url}"
|
| 113 |
|
| 114 |
return render_template('index.html',
|
| 115 |
crops=list(CROP_TO_PESTS.keys()),
|
|
|
|
| 123 |
selected_param=param,
|
| 124 |
image_url=image_url)
|
| 125 |
|
|
|
|
| 126 |
@app.route('/fetch_weeks')
|
| 127 |
@cache.cached(timeout=300, query_string=True)
|
| 128 |
def fetch_weeks():
|
|
|
|
| 153 |
weeks = [str(i) for i in range(1, 53)]
|
| 154 |
return jsonify({"weeks": weeks})
|
| 155 |
|
| 156 |
+
# This endpoint now uses a background-prefetched image.
|
| 157 |
+
@app.route('/fast-proxy')
|
| 158 |
+
def fast_proxy():
|
| 159 |
+
global cached_image, cached_mimetype
|
| 160 |
+
# If the "url" parameter doesn't match our cached URL, you might consider triggering a prefetch update.
|
| 161 |
external_url = request.args.get('url')
|
| 162 |
+
if cached_image and external_url == prefetch_image.cached_url:
|
| 163 |
+
return Response(cached_image, mimetype=cached_mimetype)
|
| 164 |
+
else:
|
| 165 |
+
# If no pre-fetched image is available, fall back to a direct fetch.
|
| 166 |
+
try:
|
| 167 |
+
resp = requests.get(external_url, timeout=10)
|
| 168 |
+
return Response(resp.content, mimetype=resp.headers.get('Content-Type', 'image/jpeg'))
|
| 169 |
+
except Exception as e:
|
| 170 |
+
return str(e), 500
|
| 171 |
+
|
| 172 |
+
# Background prefetching – we also store the URL that was prefetched.
|
| 173 |
+
def prefetch_image_task():
|
| 174 |
+
global cached_image, cached_mimetype
|
| 175 |
+
# For demonstration, we prefetch one specific image.
|
| 176 |
+
# In a more dynamic scenario, you could maintain a dict of URL -> image data.
|
| 177 |
+
while True:
|
| 178 |
+
# Here, update the URL as needed. For example, if you have default parameters:
|
| 179 |
+
url = "http://www.icar-crida.res.in:8080/naip/gisimages/7/2024-25/74_Maxt1.jpg"
|
| 180 |
+
try:
|
| 181 |
+
response = requests.get(url, timeout=10)
|
| 182 |
+
if response.status_code == 200:
|
| 183 |
+
cached_image = response.content
|
| 184 |
+
cached_mimetype = response.headers.get('Content-Type', 'image/jpeg')
|
| 185 |
+
prefetch_image_task.cached_url = url
|
| 186 |
+
print("Prefetched image from", url)
|
| 187 |
+
else:
|
| 188 |
+
print("Failed to prefetch, status code:", response.status_code)
|
| 189 |
+
except Exception as ex:
|
| 190 |
+
print("Prefetch error:", ex)
|
| 191 |
+
time.sleep(300) # Update every 5 minutes
|
| 192 |
+
|
| 193 |
+
# Initialize cached_url attribute for our task function.
|
| 194 |
+
prefetch_image_task.cached_url = ""
|
| 195 |
+
|
| 196 |
+
# Start the background prefetch thread
|
| 197 |
+
threading.Thread(target=prefetch_image_task, daemon=True).start()
|
| 198 |
|
| 199 |
if __name__ == '__main__':
|
| 200 |
app.run(debug=True)
|