NavyDevilDoc commited on
Commit
9ea268c
·
verified ·
1 Parent(s): 9ceba29

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -28
app.py CHANGED
@@ -17,59 +17,60 @@ META_FILE = "navy_metadata.pkl"
17
 
18
  st.set_page_config(page_title="Navy Policy Architect", layout="wide", page_icon="⚓")
19
 
20
- # --- CLOUD SYNC MANAGER (UPGRADED) ---
21
  class SyncManager:
22
  """Handles downloading/uploading the Database & Index to Hugging Face"""
23
 
24
  @staticmethod
25
  def get_remote_dbs():
26
- """Scans the Hugging Face Repo for available .db files"""
27
  if not HF_TOKEN: return []
28
  try:
29
  api = HfApi(token=HF_TOKEN)
30
  files = api.list_repo_files(repo_id=DATASET_REPO_ID, repo_type="dataset")
31
- # Filter for .db files (excluding potential system files)
32
  dbs = [f for f in files if f.endswith(".db")]
33
  return dbs
34
  except Exception as e:
35
- print(f"Error listing DBs: {e}")
36
  return []
37
 
38
  @staticmethod
39
  def pull_data(db_filename):
40
- if not HF_TOKEN: return False
 
 
41
  try:
42
- # Download Specific SQLite DB
43
- if not os.path.exists(db_filename):
44
- hf_hub_download(repo_id=DATASET_REPO_ID, filename=db_filename, local_dir=".", token=HF_TOKEN)
 
 
 
 
 
 
 
 
 
 
 
 
 
45
 
46
- # Download FAISS Index (Note: We use one shared index for simplicity in this demo,
47
- # but ideally you'd have 'navy_index_eng.faiss', etc.)
48
- if not os.path.exists(INDEX_FILE):
49
- try:
50
- hf_hub_download(repo_id=DATASET_REPO_ID, filename=INDEX_FILE, local_dir=".", token=HF_TOKEN)
51
- hf_hub_download(repo_id=DATASET_REPO_ID, filename=META_FILE, local_dir=".", token=HF_TOKEN)
52
- except:
53
- pass # It's okay if index doesn't exist yet
54
  return True
55
  except Exception as e:
56
- st.error(f"Sync Error (Pull): {e}")
57
- return False
58
 
59
  @staticmethod
60
  def push_data(db_filename):
61
  if not HF_TOKEN: return
62
  api = HfApi(token=HF_TOKEN)
63
  try:
64
- # Upload Specific SQLite DB
65
  api.upload_file(path_or_fileobj=db_filename, path_in_repo=db_filename, repo_id=DATASET_REPO_ID, repo_type="dataset")
66
- # Upload FAISS Index
67
  api.upload_file(path_or_fileobj=INDEX_FILE, path_in_repo=INDEX_FILE, repo_id=DATASET_REPO_ID, repo_type="dataset")
68
  api.upload_file(path_or_fileobj=META_FILE, path_in_repo=META_FILE, repo_id=DATASET_REPO_ID, repo_type="dataset")
69
  st.toast("Cloud Sync Complete!", icon="☁️")
70
  except Exception as e:
71
  st.error(f"Sync Error (Push): {e}")
72
-
73
  # --- SIDEBAR: KNOWLEDGE BASE SELECTOR ---
74
  with st.sidebar:
75
  st.header("🗄️ Knowledge Base")
@@ -96,14 +97,27 @@ with st.sidebar:
96
  # --- INITIALIZATION (Dynamic based on selection) ---
97
  # If the DB has changed or isn't loaded, load it now
98
  if 'current_db_name' not in st.session_state or st.session_state.current_db_name != selected_db:
 
 
 
 
99
  with st.spinner(f"Loading {selected_db} from Cloud..."):
100
- SyncManager.pull_data(selected_db)
101
- st.session_state.db = DatabaseManager(selected_db)
102
- st.session_state.search_engine = SearchEngine() # This resets the search engine for the new DB
103
- st.session_state.current_db_name = selected_db
104
- st.rerun() # Refresh to ensure everything is synced
105
-
106
- st.divider()
 
 
 
 
 
 
 
 
 
107
 
108
  # 3. Upload Section
109
  if "uploader_key" not in st.session_state:
 
17
 
18
  st.set_page_config(page_title="Navy Policy Architect", layout="wide", page_icon="⚓")
19
 
20
+ # --- CLOUD SYNC MANAGER (ROBUST) ---
21
  class SyncManager:
22
  """Handles downloading/uploading the Database & Index to Hugging Face"""
23
 
24
  @staticmethod
25
  def get_remote_dbs():
 
26
  if not HF_TOKEN: return []
27
  try:
28
  api = HfApi(token=HF_TOKEN)
29
  files = api.list_repo_files(repo_id=DATASET_REPO_ID, repo_type="dataset")
 
30
  dbs = [f for f in files if f.endswith(".db")]
31
  return dbs
32
  except Exception as e:
 
33
  return []
34
 
35
  @staticmethod
36
  def pull_data(db_filename):
37
+ if not HF_TOKEN:
38
+ st.error("HF_TOKEN missing.")
39
+ return False
40
  try:
41
+ # FIX: Remove "if os.path.exists" check.
42
+ # We ALWAYS let hf_hub_download check for updates/integrity.
43
+ hf_hub_download(
44
+ repo_id=DATASET_REPO_ID,
45
+ filename=db_filename,
46
+ local_dir=".",
47
+ token=HF_TOKEN,
48
+ force_download=False # It will only download if the cloud version is newer
49
+ )
50
+
51
+ # Download Index (Best effort)
52
+ try:
53
+ hf_hub_download(repo_id=DATASET_REPO_ID, filename=INDEX_FILE, local_dir=".", token=HF_TOKEN)
54
+ hf_hub_download(repo_id=DATASET_REPO_ID, filename=META_FILE, local_dir=".", token=HF_TOKEN)
55
+ except:
56
+ pass
57
 
 
 
 
 
 
 
 
 
58
  return True
59
  except Exception as e:
60
+ # We return the actual error message so the UI can show it permanently
61
+ return str(e)
62
 
63
  @staticmethod
64
  def push_data(db_filename):
65
  if not HF_TOKEN: return
66
  api = HfApi(token=HF_TOKEN)
67
  try:
 
68
  api.upload_file(path_or_fileobj=db_filename, path_in_repo=db_filename, repo_id=DATASET_REPO_ID, repo_type="dataset")
 
69
  api.upload_file(path_or_fileobj=INDEX_FILE, path_in_repo=INDEX_FILE, repo_id=DATASET_REPO_ID, repo_type="dataset")
70
  api.upload_file(path_or_fileobj=META_FILE, path_in_repo=META_FILE, repo_id=DATASET_REPO_ID, repo_type="dataset")
71
  st.toast("Cloud Sync Complete!", icon="☁️")
72
  except Exception as e:
73
  st.error(f"Sync Error (Push): {e}")
 
74
  # --- SIDEBAR: KNOWLEDGE BASE SELECTOR ---
75
  with st.sidebar:
76
  st.header("🗄️ Knowledge Base")
 
97
  # --- INITIALIZATION (Dynamic based on selection) ---
98
  # If the DB has changed or isn't loaded, load it now
99
  if 'current_db_name' not in st.session_state or st.session_state.current_db_name != selected_db:
100
+
101
+ # We use an empty container to hold the error if it happens
102
+ error_container = st.empty()
103
+
104
  with st.spinner(f"Loading {selected_db} from Cloud..."):
105
+
106
+ # 1. Attempt the Pull
107
+ result = SyncManager.pull_data(selected_db)
108
+
109
+ # 2. Check the Result
110
+ if result is True:
111
+ # Success! Initialize normally
112
+ st.session_state.db = DatabaseManager(selected_db)
113
+ st.session_state.search_engine = SearchEngine()
114
+ st.session_state.current_db_name = selected_db
115
+ st.rerun()
116
+ else:
117
+ # Failure! Show the error and STOP.
118
+ # Do NOT create an empty database. Do NOT rerun.
119
+ error_container.error(f"CRITICAL ERROR: Could not load database.\n\nDetails: {result}")
120
+ st.stop()
121
 
122
  # 3. Upload Section
123
  if "uploader_key" not in st.session_state: