rdz-falcon commited on
Commit
6893647
·
verified ·
1 Parent(s): e75c717

Update src/rag.py

Browse files
Files changed (1) hide show
  1. src/rag.py +17 -1
src/rag.py CHANGED
@@ -64,7 +64,23 @@ def load_emotion_classifier(api_base_url="http://127.0.0.1:1234/v1"):
64
  Returns:
65
  ChatOpenAI: A LangChain ChatOpenAI instance configured for the API.
66
  """
67
- print(f"=== CONFIGURING LLM CLIENT FOR API: {api_base_url} ===")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
  from llama_cpp import Llama
70
 
 
64
  Returns:
65
  ChatOpenAI: A LangChain ChatOpenAI instance configured for the API.
66
  """
67
+ model_snapshot_dir = "/app/.cache_app/huggingface_hub/hub/models--rdz-falcon--llma_fine-tuned/snapshots/7bd0f3b7ab734b69313ae09898904d57a1c9ac00"
68
+ # More general, remove the whole model repo cache
69
+ model_repo_dir = "/app/.cache_app/huggingface_hub/hub/models--rdz-falcon--llma_fine-tuned"
70
+
71
+
72
+ # Choose one of the directories to remove (model_repo_dir is more thorough for this model)
73
+ dir_to_remove = model_repo_dir # Or model_snapshot_dir
74
+
75
+ if os.path.exists(dir_to_remove):
76
+ print(f"Attempting to remove cached directory: {dir_to_remove}")
77
+ try:
78
+ shutil.rmtree(dir_to_remove)
79
+ print(f"Successfully removed {dir_to_remove}. Model will be re-downloaded.")
80
+ except Exception as e:
81
+ print(f"Error removing directory {dir_to_remove}: {e}")
82
+ else:
83
+ print(f"Cache directory {dir_to_remove} not found, model will be downloaded.")
84
 
85
  from llama_cpp import Llama
86