krushimitravit commited on
Commit
f82953c
·
verified ·
1 Parent(s): e5f5db0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +229 -162
app.py CHANGED
@@ -1,162 +1,229 @@
1
- from flask import Flask, render_template, request, jsonify, Response
2
- import requests
3
- from bs4 import BeautifulSoup
4
- from flask import stream_with_context
5
-
6
- app = Flask(__name__)
7
-
8
- # Internal mapping of crops to pests (for the form)
9
- CROP_TO_PESTS = {
10
- "Sorgum": ["FallArmyWorm"],
11
- "Maize": ["FallArmyWorm"],
12
- "Rice": ["Blast", "GallMidge", "YSB", "PlantHopper", "BlueBeetle", "BacterialLeafBlight"],
13
- "Cotton": ["Thrips", "Whitefly", "PinkBollworm", "Jassid", "BollRot", "AmericanBollworm"],
14
- "Soybean": ["Girdlebeetle", "H.armigera", "Semilooper", "Spodoptera", "StemFLy"],
15
- "Tur": ["Wilt", "Webbed_Leaves", "Pod_damage"],
16
- "Sugarcane": ["FallArmyGrub", "WhiteGrub"],
17
- "Gram": ["H.armigera", "Wilt"]
18
- }
19
-
20
- # Fixed year options for the form
21
- YEARS = ["2024-25", "2023-24", "2022-23", "2021-22"]
22
-
23
- # Map our internal crop names to the external page's crop values.
24
- CROP_MAPPING = {
25
- "Cotton": "1",
26
- "Gram": "4",
27
- "Maize": "7",
28
- "Rice": "3",
29
- "Sorghum": "6",
30
- "Soybean": "2",
31
- "Sugarcane": "8",
32
- "Tur": "5",
33
- "Sorgum": "6" # Adjust if needed
34
- }
35
-
36
- # Map our internal pest names to external page values per crop.
37
- PEST_MAPPING = {
38
- "Cotton": {
39
- "FallArmyWorm": "71"
40
- },
41
- "Gram": {
42
- "H.armigera": "72",
43
- "Wilt": "73"
44
- },
45
- "Maize": {
46
- "FallArmyWorm": "74"
47
- },
48
- "Rice": {
49
- "Blast": "75",
50
- "GallMidge": "76",
51
- "YSB": "77",
52
- "PlantHopper": "78",
53
- "BlueBeetle": "79",
54
- "BacterialLeafBlight": "80"
55
- },
56
- "Soybean": {
57
- "Girdlebeetle": "81",
58
- "H.armigera": "82",
59
- "Semilooper": "83",
60
- "Spodoptera": "84",
61
- "StemFLy": "85"
62
- },
63
- "Tur": {
64
- "Wilt": "86",
65
- "Webbed_Leaves": "87",
66
- "Pod_damage": "88"
67
- },
68
- "Sugarcane": {
69
- "FallArmyGrub": "89",
70
- "WhiteGrub": "90"
71
- },
72
- "Sorgum": {
73
- "FallArmyWorm": "91"
74
- }
75
- }
76
-
77
- # Parameter codes and labels for the final image URL
78
- PARAMS = {
79
- "Mint": "Min Temperature",
80
- "Maxt": "Max Temperature",
81
- "RH": "Relative Humidity",
82
- "RF": "Rainfall",
83
- "PR": "Pest Report"
84
- }
85
-
86
- @app.route('/')
87
- def index():
88
- # Read query parameters (if provided)
89
- crop = request.args.get('crop', '')
90
- pest = request.args.get('pest', '')
91
- year = request.args.get('year', '')
92
- week = request.args.get('week', '')
93
- param = request.args.get('param', '')
94
-
95
- image_url = ""
96
- if crop and pest and year and week and param:
97
- # Build the external image URL (using HTTP)
98
- base_url = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
99
- external_image_url = f"{base_url}{param}{week}.jpg"
100
- # Instead of using the external HTTP URL directly, we build our proxy URL
101
- image_url = f"/proxy-image?url={external_image_url}"
102
-
103
- return render_template('index.html',
104
- crops=list(CROP_TO_PESTS.keys()),
105
- crop_to_pests=CROP_TO_PESTS,
106
- years=YEARS,
107
- params=PARAMS,
108
- selected_crop=crop,
109
- selected_pest=pest,
110
- selected_year=year,
111
- selected_week=week,
112
- selected_param=param,
113
- image_url=image_url)
114
-
115
- @app.route('/fetch_weeks')
116
- def fetch_weeks():
117
- crop = request.args.get('crop', '')
118
- pest = request.args.get('pest', '')
119
- year = request.args.get('year', '')
120
-
121
- ext_crop = CROP_MAPPING.get(crop, '')
122
- ext_pest = ""
123
- if crop in PEST_MAPPING and pest in PEST_MAPPING[crop]:
124
- ext_pest = PEST_MAPPING[crop][pest]
125
-
126
- payload = {
127
- "country": ext_crop,
128
- "city": ext_pest,
129
- "sowing": year
130
- }
131
-
132
- weeks = []
133
- try:
134
- response = requests.get("http://www.icar-crida.res.in:8080/naip/gismaps.jsp", params=payload, timeout=10)
135
- soup = BeautifulSoup(response.text, 'html.parser')
136
- week_options = soup.select('select[name="week"] option')
137
- weeks = [opt.get('value') for opt in week_options if opt.get('value') and "Select" not in opt.get('value')]
138
- if not weeks:
139
- weeks = [str(i) for i in range(1, 53)]
140
- except Exception as e:
141
- weeks = [str(i) for i in range(1, 53)]
142
- return jsonify({"weeks": weeks})
143
-
144
- @app.route('/proxy-image')
145
- def proxy_image():
146
- external_url = request.args.get('url')
147
- if not external_url:
148
- return "Missing URL", 400
149
-
150
- try:
151
- # Use streaming so that the response is sent in chunks
152
- resp = requests.get(external_url, timeout=10, stream=True)
153
- return Response(
154
- stream_with_context(resp.iter_content(chunk_size=1024)),
155
- mimetype=resp.headers.get('Content-Type', 'image/jpeg')
156
- )
157
- except Exception as e:
158
- return str(e), 500
159
-
160
-
161
- if __name__ == '__main__':
162
- app.run(debug=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, render_template, request, jsonify, Response
2
+ import requests
3
+ from bs4 import BeautifulSoup
4
+ from flask import stream_with_context
5
+
6
+ app = Flask(__name__)
7
+
8
+ # Internal mapping of crops to pests (for the form)
9
+ CROP_TO_PESTS = {
10
+ "Sorgum": ["FallArmyWorm"],
11
+ "Maize": ["FallArmyWorm"],
12
+ "Rice": ["Blast", "GallMidge", "YSB", "PlantHopper", "BlueBeetle", "BacterialLeafBlight"],
13
+ "Cotton": ["Thrips", "Whitefly", "PinkBollworm", "Jassid", "BollRot", "AmericanBollworm"],
14
+ "Soybean": ["Girdlebeetle", "H.armigera", "Semilooper", "Spodoptera", "StemFLy"],
15
+ "Tur": ["Wilt", "Webbed_Leaves", "Pod_damage"],
16
+ "Sugarcane": ["FallArmyGrub", "WhiteGrub"],
17
+ "Gram": ["H.armigera", "Wilt"]
18
+ }
19
+
20
+ # Fixed year options for the form
21
+ YEARS = ["2024-25", "2023-24", "2022-23", "2021-22"]
22
+
23
+ # Map our internal crop names to the external page's crop values.
24
+ CROP_MAPPING = {
25
+ "Cotton": "1",
26
+ "Gram": "4",
27
+ "Maize": "7",
28
+ "Rice": "3",
29
+ "Sorghum": "6",
30
+ "Soybean": "2",
31
+ "Sugarcane": "8",
32
+ "Tur": "5",
33
+ "Sorgum": "6"
34
+ }
35
+
36
+ # Map our internal pest names to external page values per crop.
37
+ PEST_MAPPING = {
38
+ "Cotton": {
39
+ "FallArmyWorm": "71"
40
+ },
41
+ "Gram": {
42
+ "H.armigera": "72",
43
+ "Wilt": "73"
44
+ },
45
+ "Maize": {
46
+ "FallArmyWorm": "74"
47
+ },
48
+ "Rice": {
49
+ "Blast": "75",
50
+ "GallMidge": "76",
51
+ "YSB": "77",
52
+ "PlantHopper": "78",
53
+ "BlueBeetle": "79",
54
+ "BacterialLeafBlight": "80"
55
+ },
56
+ "Soybean": {
57
+ "Girdlebeetle": "81",
58
+ "H.armigera": "82",
59
+ "Semilooper": "83",
60
+ "Spodoptera": "84",
61
+ "StemFLy": "85"
62
+ },
63
+ "Tur": {
64
+ "Wilt": "86",
65
+ "Webbed_Leaves": "87",
66
+ "Pod_damage": "88"
67
+ },
68
+ "Sugarcane": {
69
+ "FallArmyGrub": "89",
70
+ "WhiteGrub": "90"
71
+ },
72
+ "Sorgum": {
73
+ "FallArmyWorm": "91"
74
+ }
75
+ }
76
+
77
+ # Parameter codes and labels for the final image URL
78
+ PARAMS = {
79
+ "Mint": "Min Temperature",
80
+ "Maxt": "Max Temperature",
81
+ "RH": "Relative Humidity",
82
+ "RF": "Rainfall",
83
+ "PR": "Pest Report"
84
+ }
85
+
86
+
87
+ def build_image_url(crop, year, pest, param, week):
88
+ """Build the external ICAR image URL for a given combination."""
89
+ base = f"http://www.icar-crida.res.in:8080/naip/gisimages/{crop}/{year}/{pest}_"
90
+ return f"{base}{param}{week}.jpg"
91
+
92
+
93
+ def image_exists(url):
94
+ """Return True if the remote image URL returns a valid image (HTTP 200 + image content-type)."""
95
+ try:
96
+ resp = requests.head(url, timeout=6, allow_redirects=True)
97
+ if resp.status_code == 200:
98
+ ct = resp.headers.get("Content-Type", "")
99
+ return ct.startswith("image/")
100
+ # Some servers don't support HEAD; fall back to a small GET
101
+ if resp.status_code in (405, 403):
102
+ resp = requests.get(url, timeout=8, stream=True)
103
+ ct = resp.headers.get("Content-Type", "")
104
+ return resp.status_code == 200 and ct.startswith("image/")
105
+ except Exception:
106
+ pass
107
+ return False
108
+
109
+
110
+ @app.route('/')
111
+ def index():
112
+ crop = request.args.get('crop', '')
113
+ pest = request.args.get('pest', '')
114
+ year = request.args.get('year', '')
115
+ week = request.args.get('week', '')
116
+ param = request.args.get('param', '')
117
+
118
+ image_url = ""
119
+ data_available = True
120
+
121
+ if crop and pest and year and week and param:
122
+ external_image_url = build_image_url(crop, year, pest, param, week)
123
+ if image_exists(external_image_url):
124
+ image_url = f"/proxy-image?url={external_image_url}"
125
+ else:
126
+ data_available = False
127
+
128
+ return render_template('index.html',
129
+ crops=list(CROP_TO_PESTS.keys()),
130
+ crop_to_pests=CROP_TO_PESTS,
131
+ years=YEARS,
132
+ params=PARAMS,
133
+ selected_crop=crop,
134
+ selected_pest=pest,
135
+ selected_year=year,
136
+ selected_week=week,
137
+ selected_param=param,
138
+ image_url=image_url,
139
+ data_available=data_available)
140
+
141
+
142
+ @app.route('/fetch_weeks')
143
+ def fetch_weeks():
144
+ """Return only weeks for which at least one param image actually exists."""
145
+ crop = request.args.get('crop', '')
146
+ pest = request.args.get('pest', '')
147
+ year = request.args.get('year', '')
148
+
149
+ if not (crop and pest and year):
150
+ return jsonify({"weeks": []})
151
+
152
+ ext_crop = CROP_MAPPING.get(crop, '')
153
+ ext_pest = PEST_MAPPING.get(crop, {}).get(pest, '')
154
+
155
+ # Try to scrape available weeks from the upstream page first
156
+ candidate_weeks = []
157
+ try:
158
+ payload = {"country": ext_crop, "city": ext_pest, "sowing": year}
159
+ response = requests.get(
160
+ "http://www.icar-crida.res.in:8080/naip/gismaps.jsp",
161
+ params=payload, timeout=10
162
+ )
163
+ soup = BeautifulSoup(response.text, 'html.parser')
164
+ week_options = soup.select('select[name="week"] option')
165
+ candidate_weeks = [
166
+ opt.get('value') for opt in week_options
167
+ if opt.get('value') and "Select" not in opt.get('value', '')
168
+ ]
169
+ except Exception:
170
+ pass
171
+
172
+ # Fall back to weeks 1-52 if scraping failed
173
+ if not candidate_weeks:
174
+ candidate_weeks = [str(i) for i in range(1, 53)]
175
+
176
+ # Filter: keep only weeks where at least one param image exists
177
+ available_weeks = []
178
+ for week in candidate_weeks:
179
+ for param_code in PARAMS.keys():
180
+ url = build_image_url(crop, year, pest, param_code, week)
181
+ if image_exists(url):
182
+ available_weeks.append(week)
183
+ break # No need to check all params for this week
184
+
185
+ return jsonify({"weeks": available_weeks})
186
+
187
+
188
+ @app.route('/check_availability')
189
+ def check_availability():
190
+ """
191
+ Check which params are available for a given crop/pest/year/week combination.
192
+ Returns a dict of {param_code: bool} so the frontend can disable unavailable options.
193
+ """
194
+ crop = request.args.get('crop', '')
195
+ pest = request.args.get('pest', '')
196
+ year = request.args.get('year', '')
197
+ week = request.args.get('week', '')
198
+
199
+ if not (crop and pest and year and week):
200
+ return jsonify({"availability": {}})
201
+
202
+ availability = {}
203
+ for param_code in PARAMS.keys():
204
+ url = build_image_url(crop, year, pest, param_code, week)
205
+ availability[param_code] = image_exists(url)
206
+
207
+ return jsonify({"availability": availability})
208
+
209
+
210
+ @app.route('/proxy-image')
211
+ def proxy_image():
212
+ external_url = request.args.get('url')
213
+ if not external_url:
214
+ return "Missing URL", 400
215
+
216
+ try:
217
+ resp = requests.get(external_url, timeout=10, stream=True)
218
+ if resp.status_code != 200 or not resp.headers.get('Content-Type', '').startswith('image/'):
219
+ return "Image not available", 404
220
+ return Response(
221
+ stream_with_context(resp.iter_content(chunk_size=1024)),
222
+ mimetype=resp.headers.get('Content-Type', 'image/jpeg')
223
+ )
224
+ except Exception as e:
225
+ return str(e), 500
226
+
227
+
228
+ if __name__ == '__main__':
229
+ app.run(host='0.0.0.0', port=7860, debug=False)