rajkumarrawal commited on
Commit
c8afbb1
·
1 Parent(s): f6c1694

refactor(autonomous_engine): Remove LRU cache from `_analyze_input_hash` method and fix indentation

Browse files

Removed the `@lruCache` decorator from `_analyze_input_hash` as caching was not functioning correctly with the current implementation. Also corrected the indentation of the returned dictionary to improve code readability and adhere to PEP8 standards.

Files changed (1) hide show
  1. autonomous_engine.py +6 -7
autonomous_engine.py CHANGED
@@ -291,14 +291,13 @@ class CachedReasoningEngine:
291
  else:
292
  self.logger = logging.getLogger(__name__)
293
 
294
- @lru_cache(maxsize=1000)
295
  def _analyze_input_hash(self, user_input_hash: str) -> Dict[str, Any]:
296
- """Cached analysis to avoid recomputing identical requests."""
297
- return {
298
- "cached": True,
299
- "analysis_id": user_input_hash,
300
- "timestamp": datetime.utcnow()
301
- }
302
 
303
  def analyze_situation(self, user_input: str, context: Dict[str, Any]) -> Dict[str, Any]:
304
  """Analyze situation with caching and optimization."""
 
291
  else:
292
  self.logger = logging.getLogger(__name__)
293
 
 
294
  def _analyze_input_hash(self, user_input_hash: str) -> Dict[str, Any]:
295
+ """Cached analysis to avoid recomputing identical requests."""
296
+ return {
297
+ "cached": True,
298
+ "analysis_id": user_input_hash,
299
+ "timestamp": datetime.utcnow()
300
+ }
301
 
302
  def analyze_situation(self, user_input: str, context: Dict[str, Any]) -> Dict[str, Any]:
303
  """Analyze situation with caching and optimization."""