File size: 11,243 Bytes
982ec2b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
"""
Database module for the Construction Intelligence Hub.
Handles SQLite database creation, CRUD operations, and queries.
"""

import sqlite3
import json
import os
from datetime import datetime
from typing import Optional, List, Dict, Any
from contextlib import contextmanager

DATABASE_PATH = os.getenv("DATABASE_PATH", "/data/construction_hub.db")


def get_db_path():
    return DATABASE_PATH


@contextmanager
def get_connection():
    """Context manager for database connections."""
    db_path = get_db_path()
    os.makedirs(os.path.dirname(db_path), exist_ok=True)
    conn = sqlite3.connect(db_path)
    conn.row_factory = sqlite3.Row
    conn.execute("PRAGMA journal_mode=WAL")
    conn.execute("PRAGMA foreign_keys=ON")
    try:
        yield conn
        conn.commit()
    except Exception:
        conn.rollback()
        raise
    finally:
        conn.close()


def init_database():
    """Initialize the database schema."""
    with get_connection() as conn:
        conn.executescript("""
            CREATE TABLE IF NOT EXISTS emails (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                message_id TEXT UNIQUE NOT NULL,
                subject TEXT,
                sender TEXT,
                recipients TEXT,
                date_received TIMESTAMP,
                date_processed TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                body_text TEXT,
                body_html TEXT,
                project_name TEXT,
                document_type TEXT,
                document_reference_number TEXT,
                status TEXT,
                consultant_comments TEXT,
                action_required TEXT,
                assigned_discipline TEXT,
                priority TEXT DEFAULT 'Normal',
                ai_raw_response TEXT,
                processing_status TEXT DEFAULT 'pending',
                processing_error TEXT,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
            );

            CREATE TABLE IF NOT EXISTS attachments (
                id INTEGER PRIMARY KEY AUTOINCREMENT,
                email_id INTEGER NOT NULL,
                filename TEXT NOT NULL,
                mime_type TEXT,
                file_size INTEGER,
                file_path TEXT,
                processed_by_ai BOOLEAN DEFAULT 0,
                processing_error TEXT,
                created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
                FOREIGN KEY (email_id) REFERENCES emails(id) ON DELETE CASCADE
            );

            CREATE INDEX IF NOT EXISTS idx_emails_message_id ON emails(message_id);
            CREATE INDEX IF NOT EXISTS idx_emails_document_type ON emails(document_type);
            CREATE INDEX IF NOT EXISTS idx_emails_status ON emails(status);
            CREATE INDEX IF NOT EXISTS idx_emails_date_received ON emails(date_received);
            CREATE INDEX IF NOT EXISTS idx_emails_processing_status ON emails(processing_status);
            CREATE INDEX IF NOT EXISTS idx_attachments_email_id ON attachments(email_id);
        """)


def email_exists(message_id: str) -> bool:
    """Check if an email has already been processed."""
    with get_connection() as conn:
        cursor = conn.execute(
            "SELECT 1 FROM emails WHERE message_id = ?", (message_id,)
        )
        return cursor.fetchone() is not None


def insert_email(email_data: Dict[str, Any]) -> int:
    """Insert a new email record and return its ID."""
    with get_connection() as conn:
        cursor = conn.execute("""
            INSERT INTO emails (
                message_id, subject, sender, recipients, date_received,
                body_text, body_html, processing_status
            ) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
        """, (
            email_data["message_id"],
            email_data.get("subject"),
            email_data.get("sender"),
            email_data.get("recipients"),
            email_data.get("date_received"),
            email_data.get("body_text"),
            email_data.get("body_html"),
            "pending"
        ))
        return cursor.lastrowid


def update_email_ai_results(email_id: int, ai_results: Dict[str, Any]):
    """Update an email record with AI extraction results."""
    with get_connection() as conn:
        conn.execute("""
            UPDATE emails SET
                project_name = ?,
                document_type = ?,
                document_reference_number = ?,
                status = ?,
                consultant_comments = ?,
                action_required = ?,
                assigned_discipline = ?,
                priority = ?,
                ai_raw_response = ?,
                processing_status = 'processed',
                updated_at = CURRENT_TIMESTAMP
            WHERE id = ?
        """, (
            ai_results.get("project_name"),
            ai_results.get("document_type"),
            ai_results.get("document_reference_number"),
            ai_results.get("status"),
            ai_results.get("consultant_comments"),
            ai_results.get("action_required"),
            ai_results.get("assigned_discipline"),
            ai_results.get("priority", "Normal"),
            json.dumps(ai_results),
            email_id
        ))


def mark_email_failed(email_id: int, error_message: str):
    """Mark an email as failed processing."""
    with get_connection() as conn:
        conn.execute("""
            UPDATE emails SET
                processing_status = 'failed',
                processing_error = ?,
                updated_at = CURRENT_TIMESTAMP
            WHERE id = ?
        """, (error_message, email_id))


def insert_attachment(email_id: int, attachment_data: Dict[str, Any]) -> int:
    """Insert an attachment record."""
    with get_connection() as conn:
        cursor = conn.execute("""
            INSERT INTO attachments (
                email_id, filename, mime_type, file_size, file_path
            ) VALUES (?, ?, ?, ?, ?)
        """, (
            email_id,
            attachment_data["filename"],
            attachment_data.get("mime_type"),
            attachment_data.get("file_size"),
            attachment_data.get("file_path")
        ))
        return cursor.lastrowid


def mark_attachment_processed(attachment_id: int, success: bool, error: str = None):
    """Mark an attachment as processed or failed."""
    with get_connection() as conn:
        conn.execute("""
            UPDATE attachments SET
                processed_by_ai = ?,
                processing_error = ?
            WHERE id = ?
        """, (1 if success else 0, error, attachment_id))


def get_all_emails(
    document_type: Optional[str] = None,
    status: Optional[str] = None,
    discipline: Optional[str] = None,
    search_query: Optional[str] = None,
    limit: int = 500,
    offset: int = 0
) -> List[Dict[str, Any]]:
    """Get all processed emails with optional filters."""
    query = "SELECT * FROM emails WHERE processing_status = 'processed'"
    params = []

    if document_type:
        query += " AND document_type = ?"
        params.append(document_type)
    if status:
        query += " AND status = ?"
        params.append(status)
    if discipline:
        query += " AND assigned_discipline = ?"
        params.append(discipline)
    if search_query:
        query += " AND (subject LIKE ? OR document_reference_number LIKE ? OR consultant_comments LIKE ?)"
        params.extend([f"%{search_query}%"] * 3)

    query += " ORDER BY date_received DESC LIMIT ? OFFSET ?"
    params.extend([limit, offset])

    with get_connection() as conn:
        rows = conn.execute(query, params).fetchall()
        return [dict(row) for row in rows]


def get_email_by_id(email_id: int) -> Optional[Dict[str, Any]]:
    """Get a single email by ID."""
    with get_connection() as conn:
        row = conn.execute("SELECT * FROM emails WHERE id = ?", (email_id,)).fetchone()
        return dict(row) if row else None


def get_attachments_for_email(email_id: int) -> List[Dict[str, Any]]:
    """Get all attachments for an email."""
    with get_connection() as conn:
        rows = conn.execute(
            "SELECT * FROM attachments WHERE email_id = ?", (email_id,)
        ).fetchall()
        return [dict(row) for row in rows]


def get_dashboard_stats() -> Dict[str, Any]:
    """Get statistics for the dashboard."""
    with get_connection() as conn:
        stats = {}

        row = conn.execute(
            "SELECT COUNT(*) as cnt FROM emails WHERE processing_status = 'processed'"
        ).fetchone()
        stats["total_processed"] = row["cnt"]

        rows = conn.execute("""
            SELECT document_type, COUNT(*) as cnt
            FROM emails WHERE processing_status = 'processed'
            GROUP BY document_type
        """).fetchall()
        stats["by_document_type"] = {r["document_type"]: r["cnt"] for r in rows}

        rows = conn.execute("""
            SELECT status, COUNT(*) as cnt
            FROM emails WHERE processing_status = 'processed'
            GROUP BY status
        """).fetchall()
        stats["by_status"] = {r["status"]: r["cnt"] for r in rows}

        rows = conn.execute("""
            SELECT assigned_discipline, COUNT(*) as cnt
            FROM emails WHERE processing_status = 'processed'
            GROUP BY assigned_discipline
        """).fetchall()
        stats["by_discipline"] = {r["assigned_discipline"]: r["cnt"] for r in rows}

        row = conn.execute(
            "SELECT COUNT(*) as cnt FROM emails WHERE processing_status = 'pending'"
        ).fetchone()
        stats["pending"] = row["cnt"]

        row = conn.execute(
            "SELECT COUNT(*) as cnt FROM emails WHERE processing_status = 'failed'"
        ).fetchone()
        stats["failed"] = row["cnt"]

        rows = conn.execute("""
            SELECT DATE(date_received) as day, COUNT(*) as cnt
            FROM emails WHERE processing_status = 'processed'
            AND date_received >= DATE('now', '-7 days')
            GROUP BY DATE(date_received)
            ORDER BY day
        """).fetchall()
        stats["recent_activity"] = {r["day"]: r["cnt"] for r in rows}

        return stats


def get_all_emails_for_export(
    document_type: Optional[str] = None,
    status: Optional[str] = None,
    discipline: Optional[str] = None,
) -> List[Dict[str, Any]]:
    """Get all emails for CSV/Excel export (no limit)."""
    query = """
        SELECT
            date_received, subject, sender, project_name, document_type,
            document_reference_number, status, assigned_discipline,
            consultant_comments, action_required, priority
        FROM emails
        WHERE processing_status = 'processed'
    """
    params = []

    if document_type:
        query += " AND document_type = ?"
        params.append(document_type)
    if status:
        query += " AND status = ?"
        params.append(status)
    if discipline:
        query += " AND assigned_discipline = ?"
        params.append(discipline)

    query += " ORDER BY date_received DESC"

    with get_connection() as conn:
        rows = conn.execute(query, params).fetchall()
        return [dict(row) for row in rows]