File size: 12,038 Bytes
1acc8d7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
"""
ResearchRadar — SQLite wrapper with migrations.

All write operations use parameterised queries exclusively.
Never format SQL strings with user or API data.
"""

from __future__ import annotations

import json
import logging
import os
import sqlite3
import time
from datetime import date, datetime
from typing import List, Optional

from app.core.config import DB_VERSION
from app.core.models import Digest, Paper

logger = logging.getLogger(__name__)

# ---------------------------------------------------------------------------
# Schema DDL (Version 1)
# ---------------------------------------------------------------------------

_SCHEMA_V1 = """
CREATE TABLE IF NOT EXISTS meta (
    key   TEXT PRIMARY KEY,
    value TEXT NOT NULL
);

CREATE TABLE IF NOT EXISTS papers (
    paper_id        TEXT PRIMARY KEY,
    source          TEXT NOT NULL,
    title           TEXT NOT NULL,
    abstract        TEXT NOT NULL,
    summary_llm     TEXT,
    authors         TEXT NOT NULL,
    published_date  TEXT NOT NULL,
    categories      TEXT NOT NULL,
    app_category    TEXT NOT NULL,
    pdf_url         TEXT,
    abstract_url    TEXT NOT NULL,
    citation_count  INTEGER DEFAULT 0,
    relevance_score REAL    DEFAULT 0.0,
    composite_score REAL    DEFAULT 0.0,
    fetched_at      TEXT NOT NULL,
    is_bookmarked   INTEGER DEFAULT 0,
    is_read         INTEGER DEFAULT 0
);

CREATE TABLE IF NOT EXISTS digests (
    digest_id     TEXT PRIMARY KEY,
    week_start    TEXT NOT NULL,
    generated_at  TEXT NOT NULL,
    total_fetched INTEGER,
    total_ranked  INTEGER,
    fetch_errors  TEXT,
    videos_json   TEXT
);

CREATE TABLE IF NOT EXISTS digest_papers (
    digest_id  TEXT NOT NULL,
    paper_id   TEXT NOT NULL,
    rank_order INTEGER NOT NULL,
    PRIMARY KEY (digest_id, paper_id),
    FOREIGN KEY (digest_id) REFERENCES digests(digest_id),
    FOREIGN KEY (paper_id)  REFERENCES papers(paper_id)
);
"""

# ---------------------------------------------------------------------------
# Connection
# ---------------------------------------------------------------------------

_DB_RETRY_MAX = 3
_DB_RETRY_SLEEP = 0.5


def get_connection(db_path: str) -> sqlite3.Connection:
    """Return a connection with row_factory and WAL mode enabled."""
    conn = sqlite3.connect(db_path)
    conn.row_factory = sqlite3.Row
    conn.execute('PRAGMA journal_mode=WAL')
    conn.execute('PRAGMA foreign_keys=ON')
    return conn


def _retry_on_locked(func):
    """Decorator: retry up to _DB_RETRY_MAX times on 'database is locked'."""
    def wrapper(*args, **kwargs):
        for attempt in range(_DB_RETRY_MAX):
            try:
                return func(*args, **kwargs)
            except sqlite3.OperationalError as exc:
                if 'database is locked' in str(exc) and attempt < _DB_RETRY_MAX - 1:
                    logger.warning('DB locked — retrying (%d/%d)', attempt + 1, _DB_RETRY_MAX)
                    time.sleep(_DB_RETRY_SLEEP)
                else:
                    raise
    return wrapper


# ---------------------------------------------------------------------------
# Initialisation & Migrations
# ---------------------------------------------------------------------------

def initialize(db_path: str) -> None:
    """Create tables and run any pending migrations."""
    conn = get_connection(db_path)
    try:
        conn.executescript(_SCHEMA_V1)
        # Set version if not present
        row = conn.execute(
            "SELECT value FROM meta WHERE key = 'db_version'"
        ).fetchone()
        if row is None:
            conn.execute(
                "INSERT INTO meta (key, value) VALUES ('db_version', ?)",
                (str(DB_VERSION),),
            )
        else:
            current = int(row['value'])
            if current < DB_VERSION:
                run_migrations(conn, current, DB_VERSION)
        conn.commit()
    finally:
        conn.close()


def run_migrations(conn: sqlite3.Connection, current: int, target: int) -> None:
    """Apply sequential migrations from *current* to *target* version."""
    logger.info('Migrating DB from v%d to v%d', current, target)
    
    if current < 2:
        try:
            conn.execute("ALTER TABLE papers ADD COLUMN summary_llm TEXT")
            logger.info('V2 Migration: Added summary_llm column to papers table.')
        except sqlite3.OperationalError as e:
            if 'duplicate column name' in str(e).lower():
                pass # Already exists
            else:
                raise

    if current < 3:
        try:
            conn.execute("ALTER TABLE digests ADD COLUMN videos_json TEXT")
            logger.info('V3 Migration: Added videos_json column to digests table.')
        except sqlite3.OperationalError as e:
            if 'duplicate column name' in str(e).lower():
                pass # Already exists
            else:
                raise

    conn.execute(
        "UPDATE meta SET value = ? WHERE key = 'db_version'",
        (str(target),),
    )


# ---------------------------------------------------------------------------
# Paper helpers
# ---------------------------------------------------------------------------

def _paper_to_row(paper: Paper) -> tuple:
    return (
        paper.paper_id,
        paper.source,
        paper.title,
        paper.abstract,
        paper.summary_llm,
        json.dumps(paper.authors),
        paper.published_date.isoformat(),
        json.dumps(paper.categories),
        paper.app_category,
        paper.pdf_url,
        paper.abstract_url,
        paper.citation_count,
        paper.relevance_score,
        paper.composite_score,
        paper.fetched_at.isoformat(),
        int(paper.is_bookmarked),
        int(paper.is_read),
    )


def _row_to_paper(row: sqlite3.Row) -> Paper:
    return Paper(
        paper_id=row['paper_id'],
        source=row['source'],
        title=row['title'],
        abstract=row['abstract'],
        summary_llm=row['summary_llm'],
        authors=json.loads(row['authors']),
        published_date=date.fromisoformat(row['published_date']),
        categories=json.loads(row['categories']),
        app_category=row['app_category'],
        pdf_url=row['pdf_url'],
        abstract_url=row['abstract_url'],
        citation_count=row['citation_count'],
        relevance_score=row['relevance_score'],
        composite_score=row['composite_score'],
        fetched_at=datetime.fromisoformat(row['fetched_at']),
        is_bookmarked=bool(row['is_bookmarked']),
        is_read=bool(row['is_read']),
    )


# ---------------------------------------------------------------------------
# CRUD Operations
# ---------------------------------------------------------------------------

@_retry_on_locked
def save_digest(db_path: str, digest: Digest) -> None:
    """Transactional insert of a digest + all its papers."""
    conn = get_connection(db_path)
    try:
        conn.execute('BEGIN')

        # Insert digest record
        conn.execute(
            """INSERT OR REPLACE INTO digests
               (digest_id, week_start, generated_at, total_fetched,
                total_ranked, fetch_errors, videos_json)
               VALUES (?, ?, ?, ?, ?, ?, ?)""",
            (
                digest.digest_id,
                digest.week_start.isoformat(),
                digest.generated_at.isoformat(),
                digest.total_fetched,
                digest.total_ranked,
                json.dumps(digest.fetch_errors),
                json.dumps(digest.videos),
            ),
        )

        # Insert papers and link to digest
        rank = 0
        for category, papers in digest.papers.items():
            for paper in papers:
                conn.execute(
                    """INSERT OR REPLACE INTO papers
                       (paper_id, source, title, abstract, summary_llm, authors,
                        published_date, categories, app_category, pdf_url,
                        abstract_url, citation_count, relevance_score,
                        composite_score, fetched_at, is_bookmarked, is_read)
                       VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)""",
                    _paper_to_row(paper),
                )
                rank += 1
                conn.execute(
                    """INSERT OR REPLACE INTO digest_papers
                       (digest_id, paper_id, rank_order) VALUES (?, ?, ?)""",
                    (digest.digest_id, paper.paper_id, rank),
                )

        conn.commit()
        logger.info('Saved digest %s with %d papers', digest.digest_id, rank)
    except Exception:
        conn.rollback()
        logger.exception('Failed to save digest — rolled back')
        raise
    finally:
        conn.close()


@_retry_on_locked
def get_latest_digest(db_path: str) -> Optional[Digest]:
    """Load the most recent digest."""
    conn = get_connection(db_path)
    try:
        row = conn.execute(
            'SELECT * FROM digests ORDER BY generated_at DESC LIMIT 1'
        ).fetchone()
        if row is None:
            return None

        digest = Digest(
            digest_id=row['digest_id'],
            week_start=date.fromisoformat(row['week_start']),
            generated_at=datetime.fromisoformat(row['generated_at']),
            total_fetched=row['total_fetched'],
            total_ranked=row['total_ranked'],
            fetch_errors=json.loads(row['fetch_errors'] or '[]'),
            videos=json.loads(row.get('videos_json', '[]')),
        )

        # Load papers linked to this digest
        paper_rows = conn.execute(
            """SELECT p.* FROM papers p
               INNER JOIN digest_papers dp ON p.paper_id = dp.paper_id
               WHERE dp.digest_id = ?
               ORDER BY dp.rank_order""",
            (digest.digest_id,),
        ).fetchall()

        papers_by_cat: dict = {}
        for pr in paper_rows:
            paper = _row_to_paper(pr)
            papers_by_cat.setdefault(paper.app_category, []).append(paper)
        digest.papers = papers_by_cat
        return digest
    finally:
        conn.close()


@_retry_on_locked
def get_papers(db_path: str, category: str, limit: int = 10) -> List[Paper]:
    """Get papers for a category, ordered by composite score."""
    conn = get_connection(db_path)
    try:
        rows = conn.execute(
            """SELECT * FROM papers
               WHERE app_category = ?
               ORDER BY composite_score DESC
               LIMIT ?""",
            (category, limit),
        ).fetchall()
        return [_row_to_paper(r) for r in rows]
    finally:
        conn.close()


@_retry_on_locked
def toggle_bookmark(db_path: str, paper_id: str) -> bool:
    """Toggle bookmark state; returns the new state."""
    conn = get_connection(db_path)
    try:
        conn.execute(
            """UPDATE papers
               SET is_bookmarked = CASE WHEN is_bookmarked = 0 THEN 1 ELSE 0 END
               WHERE paper_id = ?""",
            (paper_id,),
        )
        conn.commit()
        row = conn.execute(
            'SELECT is_bookmarked FROM papers WHERE paper_id = ?',
            (paper_id,),
        ).fetchone()
        return bool(row['is_bookmarked']) if row else False
    finally:
        conn.close()


@_retry_on_locked
def mark_read(db_path: str, paper_id: str) -> None:
    """Mark a paper as read."""
    conn = get_connection(db_path)
    try:
        conn.execute(
            'UPDATE papers SET is_read = 1 WHERE paper_id = ?',
            (paper_id,),
        )
        conn.commit()
    finally:
        conn.close()


@_retry_on_locked
def get_bookmarked_papers(db_path: str) -> List[Paper]:
    """Return all bookmarked papers ordered by composite score."""
    conn = get_connection(db_path)
    try:
        rows = conn.execute(
            """SELECT * FROM papers
               WHERE is_bookmarked = 1
               ORDER BY composite_score DESC"""
        ).fetchall()
        return [_row_to_paper(r) for r in rows]
    finally:
        conn.close()