doniramdani820 commited on
Commit
14d4429
Β·
verified Β·
1 Parent(s): d14495b

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +51 -48
app.py CHANGED
@@ -8,7 +8,10 @@ Features:
8
  - Fuzzy label matching
9
  - Memory-efficient model loading
10
  - ONNX CPU optimization
11
- - Request caching for performance
 
 
 
12
  """
13
 
14
  import os
@@ -146,10 +149,10 @@ MODEL_ROUTING = [
146
  (['spiral', 'galaxy'], 'spiral_galaxy')
147
  ]
148
 
149
- # Global cache untuk models dan responses
150
  LOADED_MODELS: Dict[str, Dict[str, Any]] = {}
151
- RESPONSE_CACHE: Dict[str, Dict[str, Any]] = {}
152
- CACHE_MAX_SIZE = 100
153
 
154
  class ModelManager:
155
  """Manager untuk loading dan caching models"""
@@ -348,35 +351,39 @@ def get_button_index(x_center: float, y_center: float, img_width: int, img_heigh
348
  return button_index
349
 
350
  # =================================================================
351
- # CACHING SYSTEM
352
  # =================================================================
353
 
354
- def get_cache_key(request_data: dict) -> str:
355
- """Generate cache key dari request data"""
356
- cache_string = f"{request_data.get('challenge_type')}_{request_data.get('target_label')}_{request_data.get('image_b64', '')[:100]}"
357
- return hashlib.md5(cache_string.encode()).hexdigest()
358
-
359
- def get_cached_response(cache_key: str) -> Optional[dict]:
360
- """Get response dari cache jika ada"""
361
- return RESPONSE_CACHE.get(cache_key)
362
-
363
- def cache_response(cache_key: str, response: dict):
364
- """Cache response dengan size limit"""
365
- if len(RESPONSE_CACHE) >= CACHE_MAX_SIZE:
366
- # Remove oldest entry
367
- oldest_key = next(iter(RESPONSE_CACHE))
368
- del RESPONSE_CACHE[oldest_key]
369
-
370
- RESPONSE_CACHE[cache_key] = response
 
371
 
372
  # =================================================================
373
  # CHALLENGE HANDLERS
374
  # =================================================================
375
 
376
  async def handle_pick_the_challenge(data: dict) -> dict:
377
- """Handle 'pick the' challenges dengan fuzzy matching"""
378
  start_time = datetime.now()
379
 
 
 
 
380
  target_label_original = data['target_label']
381
  image_b64 = data['image_b64']
382
  target_label = target_label_original
@@ -552,9 +559,12 @@ async def handle_pick_the_challenge(data: dict) -> dict:
552
  }
553
 
554
  async def handle_upright_challenge(data: dict) -> dict:
555
- """Handle 'upright' challenges"""
556
  start_time = datetime.now()
557
 
 
 
 
558
  try:
559
  image_b64 = data['image_b64']
560
  model_data = await ModelManager.get_model('upright')
@@ -708,7 +718,7 @@ async def root():
708
  "/docs": "GET - API documentation"
709
  },
710
  "models_loaded": len(LOADED_MODELS),
711
- "cache_size": len(RESPONSE_CACHE)
712
  }
713
 
714
  @app.get("/health")
@@ -737,27 +747,27 @@ async def health_check():
737
  "tensorflow_available": TF_AVAILABLE,
738
  "models_loaded": len(LOADED_MODELS),
739
  "available_models": list(CONFIGS.keys()),
740
- "cache_entries": len(RESPONSE_CACHE),
 
741
  "warnings": warnings
742
  }
743
 
744
  @app.post("/clear-cache")
745
  async def clear_cache(authenticated: bool = Depends(verify_api_key)):
746
- """πŸ—‘οΈ Clear model and response caches - useful after fixes"""
747
  try:
748
  models_cleared = len(LOADED_MODELS)
749
- cache_cleared = len(RESPONSE_CACHE)
750
 
751
  LOADED_MODELS.clear()
752
- RESPONSE_CACHE.clear()
753
 
754
- logger.info(f"πŸ—‘οΈ Cache cleared: {models_cleared} models, {cache_cleared} responses")
755
 
756
  return {
757
  "status": "success",
758
- "message": "Cache cleared successfully",
759
  "models_cleared": models_cleared,
760
- "cache_entries_cleared": cache_cleared
761
  }
762
  except Exception as e:
763
  logger.error(f"❌ Error clearing cache: {e}")
@@ -769,7 +779,7 @@ async def solve_funcaptcha(
769
  authenticated: bool = Depends(verify_api_key)
770
  ) -> FunCaptchaResponse:
771
  """
772
- 🧩 Solve FunCaptcha challenges
773
 
774
  Supports:
775
  - pick_the: Pick specific objects dari images
@@ -777,19 +787,14 @@ async def solve_funcaptcha(
777
 
778
  Features:
779
  - Fuzzy label matching
780
- - Response caching
781
  - Multi-model support
782
  """
783
 
784
- # Generate cache key
785
  request_dict = request.dict()
786
- cache_key = get_cache_key(request_dict)
787
 
788
- # Check cache first
789
- cached_response = get_cached_response(cache_key)
790
- if cached_response:
791
- logger.info(f"Cache hit for challenge: {request.challenge_type}")
792
- return FunCaptchaResponse(**cached_response)
793
 
794
  # Process request
795
  if request.challenge_type == 'pick_the':
@@ -801,10 +806,8 @@ async def solve_funcaptcha(
801
  else:
802
  raise HTTPException(status_code=400, detail=f"Unsupported challenge type: {request.challenge_type}")
803
 
804
- # Cache response
805
- cache_response(cache_key, result)
806
-
807
- logger.info(f"Challenge solved: {request.challenge_type} -> {result['status']}")
808
 
809
  return FunCaptchaResponse(**result)
810
 
@@ -849,11 +852,11 @@ async def shutdown_event():
849
  """Cleanup saat shutdown"""
850
  logger.info("πŸ›‘ Shutting down FunCaptcha Solver API...")
851
 
852
- # Clear caches
853
  LOADED_MODELS.clear()
854
- RESPONSE_CACHE.clear()
855
 
856
- logger.info("βœ… Cleanup completed")
857
 
858
  # =================================================================
859
  # DEVELOPMENT SERVER
 
8
  - Fuzzy label matching
9
  - Memory-efficient model loading
10
  - ONNX CPU optimization
11
+ - NO RESPONSE CACHING for fresh/accurate predictions
12
+ - Model caching only (for performance)
13
+
14
+ πŸ”„ IMPORTANT: Response caching DISABLED untuk memastikan prediksi selalu fresh dan akurat
15
  """
16
 
17
  import os
 
149
  (['spiral', 'galaxy'], 'spiral_galaxy')
150
  ]
151
 
152
+ # Global cache untuk models saja (response cache DISABLED untuk prediksi fresh)
153
  LOADED_MODELS: Dict[str, Dict[str, Any]] = {}
154
+ # RESPONSE_CACHE: Dict[str, Dict[str, Any]] = {} # ❌ DISABLED - No response caching
155
+ # CACHE_MAX_SIZE = 100 # ❌ DISABLED
156
 
157
  class ModelManager:
158
  """Manager untuk loading dan caching models"""
 
351
  return button_index
352
 
353
  # =================================================================
354
+ # CACHING SYSTEM - DISABLED FOR FRESH PREDICTIONS
355
  # =================================================================
356
 
357
+ # ❌ CACHE FUNCTIONS DISABLED - No response caching for fresh predictions
358
+ # def get_cache_key(request_data: dict) -> str:
359
+ # """Generate cache key dari request data"""
360
+ # cache_string = f"{request_data.get('challenge_type')}_{request_data.get('target_label')}_{request_data.get('image_b64', '')[:100]}"
361
+ # return hashlib.md5(cache_string.encode()).hexdigest()
362
+
363
+ # def get_cached_response(cache_key: str) -> Optional[dict]:
364
+ # """Get response dari cache jika ada"""
365
+ # return RESPONSE_CACHE.get(cache_key)
366
+
367
+ # def cache_response(cache_key: str, response: dict):
368
+ # """Cache response dengan size limit"""
369
+ # if len(RESPONSE_CACHE) >= CACHE_MAX_SIZE:
370
+ # # Remove oldest entry
371
+ # oldest_key = next(iter(RESPONSE_CACHE))
372
+ # del RESPONSE_CACHE[oldest_key]
373
+ #
374
+ # RESPONSE_CACHE[cache_key] = response
375
 
376
  # =================================================================
377
  # CHALLENGE HANDLERS
378
  # =================================================================
379
 
380
  async def handle_pick_the_challenge(data: dict) -> dict:
381
+ """Handle 'pick the' challenges dengan fuzzy matching - ALWAYS FRESH PREDICTIONS"""
382
  start_time = datetime.now()
383
 
384
+ # πŸ”„ ALWAYS FRESH - No response caching for accurate pick_the predictions
385
+ logger.info(f"πŸ”„ Processing FRESH pick_the prediction (no response cache)")
386
+
387
  target_label_original = data['target_label']
388
  image_b64 = data['image_b64']
389
  target_label = target_label_original
 
559
  }
560
 
561
  async def handle_upright_challenge(data: dict) -> dict:
562
+ """Handle 'upright' challenges - ALWAYS FRESH PREDICTIONS"""
563
  start_time = datetime.now()
564
 
565
+ # πŸ”„ ALWAYS FRESH - No response caching for accurate upright predictions
566
+ logger.info(f"πŸ”„ Processing FRESH upright prediction (no response cache)")
567
+
568
  try:
569
  image_b64 = data['image_b64']
570
  model_data = await ModelManager.get_model('upright')
 
718
  "/docs": "GET - API documentation"
719
  },
720
  "models_loaded": len(LOADED_MODELS),
721
+ "response_caching": "disabled" # ❌ No response caching for fresh predictions
722
  }
723
 
724
  @app.get("/health")
 
747
  "tensorflow_available": TF_AVAILABLE,
748
  "models_loaded": len(LOADED_MODELS),
749
  "available_models": list(CONFIGS.keys()),
750
+ "response_caching": "disabled", # ❌ No response caching for fresh predictions
751
+ "cache_entries": 0, # Always 0 since response cache disabled
752
  "warnings": warnings
753
  }
754
 
755
  @app.post("/clear-cache")
756
  async def clear_cache(authenticated: bool = Depends(verify_api_key)):
757
+ """πŸ—‘οΈ Clear model cache only (response cache disabled for fresh predictions)"""
758
  try:
759
  models_cleared = len(LOADED_MODELS)
 
760
 
761
  LOADED_MODELS.clear()
762
+ # RESPONSE_CACHE.clear() # ❌ DISABLED - No response caching
763
 
764
+ logger.info(f"πŸ—‘οΈ Model cache cleared: {models_cleared} models (response cache disabled)")
765
 
766
  return {
767
  "status": "success",
768
+ "message": "Model cache cleared successfully (response cache disabled for fresh predictions)",
769
  "models_cleared": models_cleared,
770
+ "response_caching": "disabled"
771
  }
772
  except Exception as e:
773
  logger.error(f"❌ Error clearing cache: {e}")
 
779
  authenticated: bool = Depends(verify_api_key)
780
  ) -> FunCaptchaResponse:
781
  """
782
+ 🧩 Solve FunCaptcha challenges - ALWAYS FRESH PREDICTIONS
783
 
784
  Supports:
785
  - pick_the: Pick specific objects dari images
 
787
 
788
  Features:
789
  - Fuzzy label matching
790
+ - NO response caching (always fresh predictions)
791
  - Multi-model support
792
  """
793
 
 
794
  request_dict = request.dict()
 
795
 
796
+ # ❌ NO CACHING - Always process fresh for accurate results
797
+ logger.info(f"πŸ”„ Processing FRESH prediction for challenge: {request.challenge_type} (no cache)")
 
 
 
798
 
799
  # Process request
800
  if request.challenge_type == 'pick_the':
 
806
  else:
807
  raise HTTPException(status_code=400, detail=f"Unsupported challenge type: {request.challenge_type}")
808
 
809
+ # ❌ NO CACHING - Direct return for fresh results
810
+ logger.info(f"βœ… Fresh challenge solved: {request.challenge_type} -> {result['status']}")
 
 
811
 
812
  return FunCaptchaResponse(**result)
813
 
 
852
  """Cleanup saat shutdown"""
853
  logger.info("πŸ›‘ Shutting down FunCaptcha Solver API...")
854
 
855
+ # Clear model cache only (response cache disabled)
856
  LOADED_MODELS.clear()
857
+ # RESPONSE_CACHE.clear() # ❌ DISABLED - No response caching
858
 
859
+ logger.info("βœ… Cleanup completed (response cache disabled)")
860
 
861
  # =================================================================
862
  # DEVELOPMENT SERVER