shaheerawan3 commited on
Commit
97b3f03
·
verified ·
1 Parent(s): 6e703d5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +72 -72
app.py CHANGED
@@ -201,81 +201,81 @@ class ImageScraper:
201
  ]
202
 
203
  def get_images(self, query: str, num_images: int = 15) -> Dict[str, List[Dict[str, str]]]:
204
- try:
205
- # Ensure query is not empty
206
- if not query.strip():
207
- query = "digital security"
208
-
209
- base_url = "https://pixabay.com/api/"
210
- params = {
211
- 'key': self.PIXABAY_API_KEY,
212
- 'q': query,
213
- 'image_type': 'photo',
214
- 'per_page': num_images,
215
- 'safesearch': True,
216
- 'lang': 'en'
217
- }
218
-
219
- print(f"Querying Pixabay API with params: {params}") # Debug log
220
- response = requests.get(base_url, params=params)
221
- print(f"API Response status: {response.status_code}") # Debug log
222
-
223
- if response.status_code == 200:
224
- data = response.json()
225
- hits = data.get('hits', [])
226
- print(f"Found {len(hits)} images") # Debug log
227
-
228
- if not hits:
229
- # If no results, try backup keywords
230
- backup_keywords = ["digital security", "technology", "cyber security", "business technology"]
231
- for keyword in backup_keywords:
232
- params['q'] = keyword
233
- backup_response = requests.get(base_url, params=params)
234
- if backup_response.status_code == 200:
235
- backup_data = backup_response.json()
236
- hits.extend(backup_data.get('hits', []))
237
- if hits:
238
- break
239
-
240
- # Structure response
241
- result = {
242
- 'primary': [],
243
- 'secondary': [],
244
- 'general': []
245
  }
246
-
247
- # Ensure we have at least some images
248
- if not hits:
249
- result['general'] = [{'url': url, 'keyword': 'technology', 'relevance': 'Fallback', 'tags': 'technology'}
250
- for url in self.get_stock_images()]
251
- return result
252
-
253
- # Categorize found images
254
- for hit in hits[:num_images]:
255
- image_data = {
256
- 'url': hit['largeImageURL'],
257
- 'keyword': query,
258
- 'relevance': 'Primary match',
259
- 'tags': hit.get('tags', '')
260
- }
261
 
262
- if len(result['primary']) < num_images // 3:
263
- result['primary'].append(image_data)
264
- elif len(result['secondary']) < num_images // 3:
265
- result['secondary'].append(image_data)
266
- else:
267
- result['general'].append(image_data)
268
-
269
- return result
270
-
271
- except Exception as e:
272
- print(f"Error in get_images: {str(e)}") # Debug log
273
- # Return stock images as fallback
274
- return {
275
- 'general': [{'url': url, 'keyword': 'technology', 'relevance': 'Fallback', 'tags': 'technology'}
276
- for url in self.get_stock_images()]
277
- }
 
 
 
 
 
 
 
 
278
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
279
 
280
  def get_fallback_keywords(self) -> List[Dict[str, str]]:
281
  """Return fallback keywords if AI extraction fails"""
 
201
  ]
202
 
203
  def get_images(self, query: str, num_images: int = 15) -> Dict[str, List[Dict[str, str]]]:
204
+ try:
205
+ # Ensure query is not empty
206
+ if not query.strip():
207
+ query = "digital security"
208
+
209
+ base_url = "https://pixabay.com/api/"
210
+ params = {
211
+ 'key': self.PIXABAY_API_KEY,
212
+ 'q': query,
213
+ 'image_type': 'photo',
214
+ 'per_page': num_images,
215
+ 'safesearch': True,
216
+ 'lang': 'en'
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
217
  }
218
+
219
+ print(f"Querying Pixabay API with params: {params}") # Debug log
220
+ response = requests.get(base_url, params=params)
221
+ print(f"API Response status: {response.status_code}") # Debug log
222
+
223
+ if response.status_code == 200:
224
+ data = response.json()
225
+ hits = data.get('hits', [])
226
+ print(f"Found {len(hits)} images") # Debug log
 
 
 
 
 
 
227
 
228
+ if not hits:
229
+ # If no results, try backup keywords
230
+ backup_keywords = ["digital security", "technology", "cyber security", "business technology"]
231
+ for keyword in backup_keywords:
232
+ params['q'] = keyword
233
+ backup_response = requests.get(base_url, params=params)
234
+ if backup_response.status_code == 200:
235
+ backup_data = backup_response.json()
236
+ hits.extend(backup_data.get('hits', []))
237
+ if hits:
238
+ break
239
+
240
+ # Structure response
241
+ result = {
242
+ 'primary': [],
243
+ 'secondary': [],
244
+ 'general': []
245
+ }
246
+
247
+ # Ensure we have at least some images
248
+ if not hits:
249
+ result['general'] = [{'url': url, 'keyword': 'technology', 'relevance': 'Fallback', 'tags': 'technology'}
250
+ for url in self.get_stock_images()]
251
+ return result
252
 
253
+ # Categorize found images
254
+ for hit in hits[:num_images]:
255
+ image_data = {
256
+ 'url': hit['largeImageURL'],
257
+ 'keyword': query,
258
+ 'relevance': 'Primary match',
259
+ 'tags': hit.get('tags', '')
260
+ }
261
+
262
+ if len(result['primary']) < num_images // 3:
263
+ result['primary'].append(image_data)
264
+ elif len(result['secondary']) < num_images // 3:
265
+ result['secondary'].append(image_data)
266
+ else:
267
+ result['general'].append(image_data)
268
+
269
+ return result
270
+
271
+ except Exception as e:
272
+ print(f"Error in get_images: {str(e)}") # Debug log
273
+ # Return stock images as fallback
274
+ return {
275
+ 'general': [{'url': url, 'keyword': 'technology', 'relevance': 'Fallback', 'tags': 'technology'}
276
+ for url in self.get_stock_images()]
277
+ }
278
+
279
 
280
  def get_fallback_keywords(self) -> List[Dict[str, str]]:
281
  """Return fallback keywords if AI extraction fails"""