MogensR commited on
Commit
f77b2f9
·
1 Parent(s): 8468955

Rename cache_cleaner.py to utils/cache/cache_cleaner.py

Browse files
cache_cleaner.py → utils/cache/cache_cleaner.py RENAMED
@@ -1,6 +1,7 @@
1
- # ============================================================================ #
2
- # HARD CACHE CLEANER + WORKING SAM2 LOADER FOR HUGGINGFACE SPACES
3
- # ============================================================================ #
 
4
 
5
  import os
6
  import gc
@@ -25,7 +26,7 @@ def clean_all_caches(verbose: bool = True):
25
  """Clean all caches that might interfere with SAM2 loading"""
26
 
27
  if verbose:
28
- logger.info("🧹 Starting comprehensive cache cleanup...")
29
 
30
  # 1. Clean Python module cache
31
  HardCacheCleaner._clean_python_cache(verbose)
@@ -46,7 +47,7 @@ def clean_all_caches(verbose: bool = True):
46
  HardCacheCleaner._force_gc_cleanup(verbose)
47
 
48
  if verbose:
49
- logger.info("Cache cleanup completed")
50
 
51
  @staticmethod
52
  def _clean_python_cache(verbose: bool = True):
@@ -56,7 +57,7 @@ def _clean_python_cache(verbose: bool = True):
56
  sam2_modules = [key for key in sys.modules.keys() if 'sam2' in key.lower()]
57
  for module in sam2_modules:
58
  if verbose:
59
- logger.info(f"🗑️ Removing cached module: {module}")
60
  del sys.modules[module]
61
 
62
  # Clear __pycache__ directories
@@ -65,7 +66,7 @@ def _clean_python_cache(verbose: bool = True):
65
  if dir_name == "__pycache__":
66
  cache_path = os.path.join(root, dir_name)
67
  if verbose:
68
- logger.info(f"🗑️ Removing __pycache__: {cache_path}")
69
  shutil.rmtree(cache_path, ignore_errors=True)
70
  dirs.remove(dir_name)
71
 
@@ -76,9 +77,14 @@ def _clean_python_cache(verbose: bool = True):
76
  def _clean_huggingface_cache(verbose: bool = True):
77
  """Clean HuggingFace model cache"""
78
  try:
 
 
 
 
79
  cache_paths = [
80
  os.path.expanduser("~/.cache/huggingface/"),
81
  os.path.expanduser("~/.cache/torch/"),
 
82
  "./checkpoints/",
83
  "./.cache/",
84
  ]
@@ -86,7 +92,7 @@ def _clean_huggingface_cache(verbose: bool = True):
86
  for cache_path in cache_paths:
87
  if os.path.exists(cache_path):
88
  if verbose:
89
- logger.info(f"🗑️ Cleaning cache directory: {cache_path}")
90
 
91
  # Remove SAM2 specific files
92
  for root, dirs, files in os.walk(cache_path):
@@ -96,7 +102,7 @@ def _clean_huggingface_cache(verbose: bool = True):
96
  try:
97
  os.remove(file_path)
98
  if verbose:
99
- logger.info(f"🗑️ Removed cached file: {file_path}")
100
  except:
101
  pass
102
 
@@ -106,7 +112,7 @@ def _clean_huggingface_cache(verbose: bool = True):
106
  try:
107
  shutil.rmtree(dir_path, ignore_errors=True)
108
  if verbose:
109
- logger.info(f"🗑️ Removed cached directory: {dir_path}")
110
  dirs.remove(dir_name)
111
  except:
112
  pass
@@ -122,7 +128,7 @@ def _clean_pytorch_cache(verbose: bool = True):
122
  if torch.cuda.is_available():
123
  torch.cuda.empty_cache()
124
  if verbose:
125
- logger.info("🗑️ Cleared PyTorch CUDA cache")
126
  except Exception as e:
127
  logger.warning(f"PyTorch cache cleanup failed: {e}")
128
 
@@ -130,7 +136,16 @@ def _clean_pytorch_cache(verbose: bool = True):
130
  def _clean_temp_directories(verbose: bool = True):
131
  """Clean temporary directories"""
132
  try:
133
- temp_dirs = [tempfile.gettempdir(), "/tmp", "./tmp", "./temp"]
 
 
 
 
 
 
 
 
 
134
 
135
  for temp_dir in temp_dirs:
136
  if os.path.exists(temp_dir):
@@ -143,7 +158,7 @@ def _clean_temp_directories(verbose: bool = True):
143
  elif os.path.isdir(item_path):
144
  shutil.rmtree(item_path, ignore_errors=True)
145
  if verbose:
146
- logger.info(f"🗑️ Removed temp item: {item_path}")
147
  except:
148
  pass
149
 
@@ -160,7 +175,7 @@ def _clear_import_cache(verbose: bool = True):
160
  importlib.invalidate_caches()
161
 
162
  if verbose:
163
- logger.info("🗑️ Cleared Python import cache")
164
 
165
  except Exception as e:
166
  logger.warning(f"Import cache cleanup failed: {e}")
@@ -171,7 +186,7 @@ def _force_gc_cleanup(verbose: bool = True):
171
  try:
172
  collected = gc.collect()
173
  if verbose:
174
- logger.info(f"🗑️ Garbage collection freed {collected} objects")
175
  except Exception as e:
176
  logger.warning(f"Garbage collection failed: {e}")
177
 
@@ -189,7 +204,7 @@ def load_sam2_transformers_approach(device: str = "cuda", model_size: str = "lar
189
  This method works reliably on HuggingFace Spaces
190
  """
191
  try:
192
- logger.info("🤖 Loading SAM2 via HuggingFace Transformers...")
193
 
194
  # Model size mapping
195
  model_map = {
@@ -212,7 +227,7 @@ def load_sam2_transformers_approach(device: str = "cuda", model_size: str = "lar
212
  device=0 if device == "cuda" else -1
213
  )
214
 
215
- logger.info("SAM2 loaded successfully via Transformers pipeline")
216
  return sam2_pipeline
217
 
218
  except Exception as e:
@@ -225,7 +240,7 @@ def load_sam2_transformers_approach(device: str = "cuda", model_size: str = "lar
225
  processor = Sam2Processor.from_pretrained(model_id)
226
  model = Sam2Model.from_pretrained(model_id).to(device)
227
 
228
- logger.info("SAM2 loaded successfully via Transformers classes")
229
  return {"model": model, "processor": processor}
230
 
231
  except Exception as e:
@@ -237,7 +252,7 @@ def load_sam2_transformers_approach(device: str = "cuda", model_size: str = "lar
237
 
238
  predictor = SAM2ImagePredictor.from_pretrained(model_id)
239
 
240
- logger.info("SAM2 loaded successfully via official from_pretrained")
241
  return predictor
242
 
243
  except Exception as e:
@@ -255,7 +270,7 @@ def load_sam2_fallback_approach(device: str = "cuda") -> Optional[Any]:
255
  Fallback approach using direct model loading
256
  """
257
  try:
258
- logger.info("🔄 Trying fallback SAM2 loading approach...")
259
 
260
  # Try the simplest possible approach
261
  from huggingface_hub import hf_hub_download
@@ -286,10 +301,6 @@ def load_sam2_fallback_approach(device: str = "cuda") -> Optional[Any]:
286
  return None
287
 
288
 
289
- # ============================================================================ #
290
- # INTEGRATED MODEL LOADER WITH CACHE CLEANING
291
- # ============================================================================ #
292
-
293
  def load_sam2_with_cache_cleanup(
294
  device: str = "cuda",
295
  model_size: str = "large",
@@ -308,57 +319,32 @@ def load_sam2_with_cache_cleanup(
308
  try:
309
  # Step 1: Clean caches if requested
310
  if force_cache_clean:
311
- status_messages.append("🧹 Cleaning caches...")
312
  HardCacheCleaner.clean_all_caches(verbose=verbose)
313
- status_messages.append("Cache cleanup completed")
314
 
315
  # Step 2: Try primary loading method
316
- status_messages.append("🤖 Loading SAM2 (primary method)...")
317
  model = WorkingSAM2Loader.load_sam2_transformers_approach(device, model_size)
318
 
319
  if model is not None:
320
- status_messages.append("SAM2 loaded successfully!")
321
  return model, "\n".join(status_messages)
322
 
323
  # Step 3: Try fallback method
324
- status_messages.append("🔄 Trying fallback loading method...")
325
  model = WorkingSAM2Loader.load_sam2_fallback_approach(device)
326
 
327
  if model is not None:
328
- status_messages.append("SAM2 loaded successfully (fallback)!")
329
  return model, "\n".join(status_messages)
330
 
331
  # Step 4: All methods failed
332
- status_messages.append("All SAM2 loading methods failed")
333
  return None, "\n".join(status_messages)
334
 
335
  except Exception as e:
336
- error_msg = f"Critical error in SAM2 loading: {e}"
337
  logger.error(f"{error_msg}\n{traceback.format_exc()}")
338
  status_messages.append(error_msg)
339
- return None, "\n".join(status_messages)
340
-
341
-
342
- # ============================================================================ #
343
- # USAGE EXAMPLE
344
- # ============================================================================ #
345
-
346
- if __name__ == "__main__":
347
- # Clean example usage
348
- print("Testing SAM2 loader with cache cleanup...")
349
-
350
- # Load SAM2 with full cache cleanup
351
- model, status = load_sam2_with_cache_cleanup(
352
- device="cuda",
353
- model_size="large",
354
- force_cache_clean=True,
355
- verbose=True
356
- )
357
-
358
- print("Status:", status)
359
-
360
- if model is not None:
361
- print("SAM2 loaded successfully!")
362
- print("Model type:", type(model))
363
- else:
364
- print("SAM2 loading failed completely")
 
1
+ """
2
+ Cache Management and SAM2 Loading Utilities
3
+ Comprehensive cache cleaning system to resolve model loading issues on HF Spaces
4
+ """
5
 
6
  import os
7
  import gc
 
26
  """Clean all caches that might interfere with SAM2 loading"""
27
 
28
  if verbose:
29
+ logger.info("Starting comprehensive cache cleanup...")
30
 
31
  # 1. Clean Python module cache
32
  HardCacheCleaner._clean_python_cache(verbose)
 
47
  HardCacheCleaner._force_gc_cleanup(verbose)
48
 
49
  if verbose:
50
+ logger.info("Cache cleanup completed")
51
 
52
  @staticmethod
53
  def _clean_python_cache(verbose: bool = True):
 
57
  sam2_modules = [key for key in sys.modules.keys() if 'sam2' in key.lower()]
58
  for module in sam2_modules:
59
  if verbose:
60
+ logger.info(f"Removing cached module: {module}")
61
  del sys.modules[module]
62
 
63
  # Clear __pycache__ directories
 
66
  if dir_name == "__pycache__":
67
  cache_path = os.path.join(root, dir_name)
68
  if verbose:
69
+ logger.info(f"Removing __pycache__: {cache_path}")
70
  shutil.rmtree(cache_path, ignore_errors=True)
71
  dirs.remove(dir_name)
72
 
 
77
  def _clean_huggingface_cache(verbose: bool = True):
78
  """Clean HuggingFace model cache"""
79
  try:
80
+ # Get config for cache directories
81
+ from config.app_config import get_config
82
+ config = get_config()
83
+
84
  cache_paths = [
85
  os.path.expanduser("~/.cache/huggingface/"),
86
  os.path.expanduser("~/.cache/torch/"),
87
+ config.model_cache_dir,
88
  "./checkpoints/",
89
  "./.cache/",
90
  ]
 
92
  for cache_path in cache_paths:
93
  if os.path.exists(cache_path):
94
  if verbose:
95
+ logger.info(f"Cleaning cache directory: {cache_path}")
96
 
97
  # Remove SAM2 specific files
98
  for root, dirs, files in os.walk(cache_path):
 
102
  try:
103
  os.remove(file_path)
104
  if verbose:
105
+ logger.info(f"Removed cached file: {file_path}")
106
  except:
107
  pass
108
 
 
112
  try:
113
  shutil.rmtree(dir_path, ignore_errors=True)
114
  if verbose:
115
+ logger.info(f"Removed cached directory: {dir_path}")
116
  dirs.remove(dir_name)
117
  except:
118
  pass
 
128
  if torch.cuda.is_available():
129
  torch.cuda.empty_cache()
130
  if verbose:
131
+ logger.info("Cleared PyTorch CUDA cache")
132
  except Exception as e:
133
  logger.warning(f"PyTorch cache cleanup failed: {e}")
134
 
 
136
  def _clean_temp_directories(verbose: bool = True):
137
  """Clean temporary directories"""
138
  try:
139
+ from config.app_config import get_config
140
+ config = get_config()
141
+
142
+ temp_dirs = [
143
+ config.temp_dir,
144
+ tempfile.gettempdir(),
145
+ "/tmp",
146
+ "./tmp",
147
+ "./temp"
148
+ ]
149
 
150
  for temp_dir in temp_dirs:
151
  if os.path.exists(temp_dir):
 
158
  elif os.path.isdir(item_path):
159
  shutil.rmtree(item_path, ignore_errors=True)
160
  if verbose:
161
+ logger.info(f"Removed temp item: {item_path}")
162
  except:
163
  pass
164
 
 
175
  importlib.invalidate_caches()
176
 
177
  if verbose:
178
+ logger.info("Cleared Python import cache")
179
 
180
  except Exception as e:
181
  logger.warning(f"Import cache cleanup failed: {e}")
 
186
  try:
187
  collected = gc.collect()
188
  if verbose:
189
+ logger.info(f"Garbage collection freed {collected} objects")
190
  except Exception as e:
191
  logger.warning(f"Garbage collection failed: {e}")
192
 
 
204
  This method works reliably on HuggingFace Spaces
205
  """
206
  try:
207
+ logger.info("Loading SAM2 via HuggingFace Transformers...")
208
 
209
  # Model size mapping
210
  model_map = {
 
227
  device=0 if device == "cuda" else -1
228
  )
229
 
230
+ logger.info("SAM2 loaded successfully via Transformers pipeline")
231
  return sam2_pipeline
232
 
233
  except Exception as e:
 
240
  processor = Sam2Processor.from_pretrained(model_id)
241
  model = Sam2Model.from_pretrained(model_id).to(device)
242
 
243
+ logger.info("SAM2 loaded successfully via Transformers classes")
244
  return {"model": model, "processor": processor}
245
 
246
  except Exception as e:
 
252
 
253
  predictor = SAM2ImagePredictor.from_pretrained(model_id)
254
 
255
+ logger.info("SAM2 loaded successfully via official from_pretrained")
256
  return predictor
257
 
258
  except Exception as e:
 
270
  Fallback approach using direct model loading
271
  """
272
  try:
273
+ logger.info("Trying fallback SAM2 loading approach...")
274
 
275
  # Try the simplest possible approach
276
  from huggingface_hub import hf_hub_download
 
301
  return None
302
 
303
 
 
 
 
 
304
  def load_sam2_with_cache_cleanup(
305
  device: str = "cuda",
306
  model_size: str = "large",
 
319
  try:
320
  # Step 1: Clean caches if requested
321
  if force_cache_clean:
322
+ status_messages.append("Cleaning caches...")
323
  HardCacheCleaner.clean_all_caches(verbose=verbose)
324
+ status_messages.append("Cache cleanup completed")
325
 
326
  # Step 2: Try primary loading method
327
+ status_messages.append("Loading SAM2 (primary method)...")
328
  model = WorkingSAM2Loader.load_sam2_transformers_approach(device, model_size)
329
 
330
  if model is not None:
331
+ status_messages.append("SAM2 loaded successfully!")
332
  return model, "\n".join(status_messages)
333
 
334
  # Step 3: Try fallback method
335
+ status_messages.append("Trying fallback loading method...")
336
  model = WorkingSAM2Loader.load_sam2_fallback_approach(device)
337
 
338
  if model is not None:
339
+ status_messages.append("SAM2 loaded successfully (fallback)!")
340
  return model, "\n".join(status_messages)
341
 
342
  # Step 4: All methods failed
343
+ status_messages.append("All SAM2 loading methods failed")
344
  return None, "\n".join(status_messages)
345
 
346
  except Exception as e:
347
+ error_msg = f"Critical error in SAM2 loading: {e}"
348
  logger.error(f"{error_msg}\n{traceback.format_exc()}")
349
  status_messages.append(error_msg)
350
+ return None, "\n".join(status_messages)