File size: 6,977 Bytes
9b68bd7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
# utils/storage.py

import os
import shutil
import json
from pathlib import Path
from typing import Optional
import faiss
import pickle
import streamlit as st
from datetime import datetime
import numpy as np

class PersistentStorage:
    """Handles persistent storage for the application."""
    
    def __init__(self):
        # Base paths
        self.base_path = Path("/data")
        
        # Create necessary subdirectories
        self.db_path = self.base_path / "database"
        self.files_path = self.base_path / "files"
        self.vectorstore_path = self.base_path / "vectorstore"
        self.metadata_path = self.base_path / "metadata"
        
        # Ensure directories exist
        self._create_directories()
    
    def _create_directories(self):
        """Create necessary directory structure."""
        for path in [self.db_path, self.files_path, self.vectorstore_path, self.metadata_path]:
            path.mkdir(parents=True, exist_ok=True)
    
    def get_db_path(self) -> str:
        """Get the path to the SQLite database file."""
        return str(self.db_path / "rfp_analysis.db")
    
    def save_uploaded_file(self, uploaded_file, collection_id: Optional[int] = None) -> Path:
        """Save an uploaded file to persistent storage."""
        # Create collection subdirectory if needed
        if collection_id:
            save_dir = self.files_path / str(collection_id)
            save_dir.mkdir(exist_ok=True)
        else:
            save_dir = self.files_path
            
        # Create timestamped filename
        timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
        filename = f"{timestamp}_{uploaded_file.name}"
        file_path = save_dir / filename
        
        # Save file
        with file_path.open("wb") as f:
            f.write(uploaded_file.getbuffer())
            
        # Save metadata
        metadata = {
            "original_name": uploaded_file.name,
            "upload_time": timestamp,
            "collection_id": collection_id,
            "size": uploaded_file.size,
            "type": uploaded_file.type
        }
        self._save_metadata(file_path.stem, metadata)
        
        return file_path
    
    def _save_metadata(self, file_id: str, metadata: dict):
        """Save metadata for a file."""
        metadata_file = self.metadata_path / f"{file_id}.json"
        with metadata_file.open("w") as f:
            json.dump(metadata, f)
    
    def save_vectorstore(self, vectorstore, collection_id: Optional[int] = None):
        """Save FAISS vector store to persistent storage."""
        # Determine save path
        if collection_id:
            save_path = self.vectorstore_path / f"collection_{collection_id}"
        else:
            save_path = self.vectorstore_path / "main"
            
        save_path.mkdir(exist_ok=True)
        
        # Save the index
        faiss.write_index(vectorstore.index, str(save_path / "index.faiss"))
        
        # Save the documents and metadata
        with (save_path / "store.pkl").open("wb") as f:
            store_data = {
                "documents": vectorstore.docstore._dict,
                "index_to_docstore_id": vectorstore.index_to_docstore_id
            }
            pickle.dump(store_data, f)
    
    def load_vectorstore(self, collection_id: Optional[int] = None):
        """Load FAISS vector store from persistent storage."""
        # Determine load path
        if collection_id:
            load_path = self.vectorstore_path / f"collection_{collection_id}"
        else:
            load_path = self.vectorstore_path / "main"
            
        if not load_path.exists():
            return None
            
        try:
            # Load the index
            index = faiss.read_index(str(load_path / "index.faiss"))
            
            # Load the documents and metadata
            with (load_path / "store.pkl").open("rb") as f:
                store_data = pickle.load(f)
            
            # Reconstruct the vector store
            vectorstore = FAISS(
                embedding_function=get_embeddings_model(),
                index=index,
                docstore=store_data["documents"],
                index_to_docstore_id=store_data["index_to_docstore_id"]
            )
            
            return vectorstore
        except Exception as e:
            st.error(f"Error loading vector store: {e}")
            return None
    
    def get_file_path(self, file_id: str, collection_id: Optional[int] = None) -> Optional[Path]:
        """Get the path to a stored file."""
        if collection_id:
            file_path = self.files_path / str(collection_id) / file_id
        else:
            file_path = self.files_path / file_id
            
        return file_path if file_path.exists() else None
    
    def cleanup_old_files(self, max_age_days: int = 30):
        """Clean up files older than specified days."""
        current_time = datetime.now()
        
        for file_path in self.files_path.rglob("*"):
            if file_path.is_file():
                file_age = current_time - datetime.fromtimestamp(file_path.stat().st_mtime)
                if file_age.days > max_age_days:
                    file_path.unlink()
                    
                    # Remove associated metadata
                    metadata_file = self.metadata_path / f"{file_path.stem}.json"
                    if metadata_file.exists():
                        metadata_file.unlink()

# Update database.py to use persistent storage
def create_connection(storage):
    """Create database connection using persistent storage."""
    try:
        conn = sqlite3.connect(storage.get_db_path(), check_same_thread=False)
        return conn
    except Error as e:
        st.error(f"Failed to connect to database: {e}")
        return None

# Update document handling to use persistent storage
def handle_document_upload(uploaded_files, **kwargs):
    try:
        storage = PersistentStorage()
        collection_id = kwargs.get('collection_id')
        
        for uploaded_file in uploaded_files:
            # Save file to persistent storage
            file_path = storage.save_uploaded_file(uploaded_file, collection_id)
            
            # Process document
            chunks, content = process_document(str(file_path))
            
            # Store in database
            doc_id = insert_document(st.session_state.db_conn, uploaded_file.name, content)
            
            # Add to collection if specified
            if collection_id:
                add_document_to_collection(st.session_state.db_conn, doc_id, collection_id)
            
            # Update vector store
            vector_store = process_chunks_to_vectorstore(chunks)
            storage.save_vectorstore(vector_store, collection_id)
            
        return True
    except Exception as e:
        st.error(f"Error processing documents: {e}")
        return False