File size: 13,138 Bytes
6aa12cc
 
 
c7bcb60
 
 
2bfaee6
b028028
6aa12cc
1e04309
6aa12cc
 
 
0a899b3
 
 
 
6aa12cc
 
 
 
 
 
c7bcb60
 
 
 
 
2bfaee6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7bcb60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b028028
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7bcb60
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6aa12cc
 
 
 
 
c7bcb60
b028028
 
 
 
 
 
c7bcb60
 
b028028
 
c7bcb60
b028028
c7bcb60
 
b028028
c7bcb60
 
cfcca52
2bfaee6
 
 
cfcca52
2bfaee6
 
 
 
 
b028028
c7bcb60
 
 
 
 
 
 
 
 
 
2bfaee6
c7bcb60
 
 
 
cfcca52
2bfaee6
c7bcb60
 
 
 
 
 
 
6aa12cc
 
 
 
 
 
c7bcb60
6aa12cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7bcb60
6aa12cc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
b028028
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
6aa12cc
c7bcb60
6aa12cc
 
 
 
c7bcb60
b028028
 
 
 
 
 
 
 
c7bcb60
 
 
cab9dc9
1e04309
cab9dc9
b028028
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
c7bcb60
cab9dc9
 
 
 
 
 
 
 
 
 
 
 
 
 
c7bcb60
 
 
 
 
cab9dc9
 
c7bcb60
 
6aa12cc
 
 
 
 
 
c7bcb60
3e016a0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
0383b17
 
 
b028028
0383b17
 
 
 
 
b028028
0383b17
b028028
 
 
 
 
 
 
0383b17
 
 
b028028
 
0383b17
b028028
 
 
0383b17
b028028
0383b17
 
c7bcb60
 
 
6aa12cc
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
import os
import secrets
import urllib.parse
import sqlite3
import json
import time
from datetime import datetime, timezone
from flask import Flask, redirect, request, session, render_template, url_for, g, jsonify
import requests

app = Flask(__name__)
app.secret_key = os.environ.get("SECRET_KEY", secrets.token_hex(32))

# Configure session cookies for iframe compatibility
app.config["SESSION_COOKIE_SAMESITE"] = "None"
app.config["SESSION_COOKIE_SECURE"] = True

# OAuth configuration from HF Space environment
OAUTH_CLIENT_ID = os.environ.get("OAUTH_CLIENT_ID")
OAUTH_CLIENT_SECRET = os.environ.get("OAUTH_CLIENT_SECRET")
OPENID_PROVIDER_URL = os.environ.get("OPENID_PROVIDER_URL", "https://huggingface.co")
SPACE_HOST = os.environ.get("SPACE_HOST", "localhost:7860")

# Cache settings
CACHE_TTL = 300  # 5 minutes
DB_PATH = "cache.db"


def relative_time(iso_timestamp):
    if not iso_timestamp:
        return ""
    try:
        dt = datetime.fromisoformat(iso_timestamp.replace("Z", "+00:00"))
        now = datetime.now(timezone.utc)
        diff = now - dt

        seconds = diff.total_seconds()
        if seconds < 60:
            return "now"
        elif seconds < 3600:
            mins = int(seconds // 60)
            return f"{mins}m"
        elif seconds < 86400:
            hours = int(seconds // 3600)
            return f"{hours}h"
        elif seconds < 604800:
            days = int(seconds // 86400)
            return f"{days}d"
        elif seconds < 2592000:
            weeks = int(seconds // 604800)
            return f"{weeks}w"
        elif seconds < 31536000:
            months = int(seconds // 2592000)
            return f"{months}mo"
        else:
            years = int(seconds // 31536000)
            return f"{years}y"
    except Exception:
        return ""


def get_db():
    if "db" not in g:
        g.db = sqlite3.connect(DB_PATH)
        g.db.row_factory = sqlite3.Row
    return g.db


@app.teardown_appcontext
def close_db(exception):
    db = g.pop("db", None)
    if db is not None:
        db.close()


def init_db():
    with sqlite3.connect(DB_PATH) as conn:
        conn.execute("""
            CREATE TABLE IF NOT EXISTS cache (
                key TEXT PRIMARY KEY,
                value TEXT,
                expires_at REAL
            )
        """)
        conn.execute("""
            CREATE TABLE IF NOT EXISTS jobs (
                id TEXT PRIMARY KEY,
                type TEXT,
                user_id TEXT,
                status TEXT DEFAULT 'pending',
                progress_current INTEGER DEFAULT 0,
                progress_total INTEGER DEFAULT 0,
                progress_stage TEXT DEFAULT '',
                result TEXT,
                error TEXT,
                created_at REAL,
                updated_at REAL
            )
        """)
        conn.commit()


def cache_get(key):
    db = get_db()
    row = db.execute(
        "SELECT value, expires_at FROM cache WHERE key = ?", (key,)
    ).fetchone()
    if row and row["expires_at"] > time.time():
        return json.loads(row["value"])
    return None


def cache_set(key, value, ttl=CACHE_TTL):
    db = get_db()
    expires_at = time.time() + ttl
    db.execute(
        "INSERT OR REPLACE INTO cache (key, value, expires_at) VALUES (?, ?, ?)",
        (key, json.dumps(value), expires_at),
    )
    db.commit()


def get_base_url():
    if "localhost" in SPACE_HOST or "127.0.0.1" in SPACE_HOST:
        return f"http://{SPACE_HOST}"
    return f"https://{SPACE_HOST}"


# Import jobs module after app is defined
from jobs import (
    create_job, get_job, update_job_progress, complete_job,
    start_job_thread, run_initial_load_job, run_wake_job
)
from hf_api import HuggingFaceAPI


def get_discussions_feed(spaces, discussions_map, logged_in_user=None):
    """Build discussions feed from cached data."""
    all_discussions = []

    for space in spaces:
        space_id = space.get("id", "")
        discussions = discussions_map.get(space_id, [])

        for d in discussions:
            owner_responded = d.get("repoOwner", {}).get("isParticipating", False)
            discussion_author = d.get("author", {}).get("name", "")
            is_own_discussion = logged_in_user and discussion_author.lower() == logged_in_user.lower()

            base_score = d.get("numComments", 0) + d.get("numReactionUsers", 0) * 2
            score = base_score
            if owner_responded:
                score -= 100
            if is_own_discussion:
                score -= 1000

            all_discussions.append({
                "space_id": space_id,
                "space_name": space_id.split("/")[-1] if "/" in space_id else space_id,
                "num": d.get("num"),
                "title": d.get("title"),
                "status": d.get("status"),
                "is_pr": d.get("isPullRequest", False),
                "author": d.get("author", {}).get("name", "unknown"),
                "author_avatar": d.get("author", {}).get("avatarUrl", ""),
                "created_at": d.get("createdAt", ""),
                "relative_time": relative_time(d.get("createdAt", "")),
                "num_comments": d.get("numComments", 0),
                "num_reactions": d.get("numReactionUsers", 0),
                "top_reactions": d.get("topReactions", []),
                "score": score,
                "owner_responded": owner_responded,
                "is_own": is_own_discussion,
                "url": f"https://huggingface.co/spaces/{space_id}/discussions/{d.get('num')}",
            })

    all_discussions.sort(key=lambda x: x["score"], reverse=True)
    return all_discussions


@app.route("/")
def index():
    if "user" in session:
        return redirect(url_for("dashboard"))
    return render_template("index.html")


@app.route("/login")
def login():
    if not OAUTH_CLIENT_ID:
        return "OAuth not configured. Make sure hf_oauth: true is set in your Space's README.md", 500

    state = secrets.token_urlsafe(32)
    session["oauth_state"] = state

    redirect_uri = f"{get_base_url()}/login/callback"
    params = {
        "client_id": OAUTH_CLIENT_ID,
        "redirect_uri": redirect_uri,
        "scope": "openid profile",
        "response_type": "code",
        "state": state,
    }
    auth_url = f"{OPENID_PROVIDER_URL}/oauth/authorize?{urllib.parse.urlencode(params)}"

    return redirect(auth_url)


@app.route("/login/callback")
def callback():
    state = request.args.get("state")
    if state != session.get("oauth_state"):
        return "Invalid state parameter", 400

    code = request.args.get("code")
    if not code:
        error = request.args.get("error", "Unknown error")
        return f"Authorization failed: {error}", 400

    redirect_uri = f"{get_base_url()}/login/callback"
    token_url = f"{OPENID_PROVIDER_URL}/oauth/token"

    token_response = requests.post(
        token_url,
        data={
            "client_id": OAUTH_CLIENT_ID,
            "client_secret": OAUTH_CLIENT_SECRET,
            "code": code,
            "grant_type": "authorization_code",
            "redirect_uri": redirect_uri,
        },
        headers={
            "Content-Type": "application/x-www-form-urlencoded",
        },
    )

    if token_response.status_code != 200:
        return f"Token exchange failed: {token_response.text}", 400

    tokens = token_response.json()
    access_token = tokens.get("access_token")

    userinfo_url = f"{OPENID_PROVIDER_URL}/oauth/userinfo"
    userinfo_response = requests.get(
        userinfo_url,
        headers={"Authorization": f"Bearer {access_token}"},
    )

    if userinfo_response.status_code != 200:
        return f"Failed to get user info: {userinfo_response.text}", 400

    user_info = userinfo_response.json()

    session["user"] = {
        "sub": user_info.get("sub"),
        "username": user_info.get("preferred_username"),
        "name": user_info.get("name"),
        "email": user_info.get("email"),
        "avatar_url": user_info.get("picture"),
    }
    session["access_token"] = access_token
    session.pop("oauth_state", None)

    # Start background job to load data
    username = session["user"]["username"]
    job_id = create_job("initial_load", username)
    session["loading_job_id"] = job_id

    start_job_thread(run_initial_load_job, job_id, username, access_token)

    return redirect(url_for("loading"))


@app.route("/loading")
def loading():
    if "user" not in session:
        return redirect(url_for("index"))

    job_id = session.get("loading_job_id")
    if not job_id:
        return redirect(url_for("dashboard"))

    job = get_job(job_id)
    if job and job["status"] == "completed":
        session.pop("loading_job_id", None)
        return redirect(url_for("dashboard"))

    return render_template("loading.html", job_id=job_id)


@app.route("/api/job/<job_id>")
def get_job_status(job_id):
    job = get_job(job_id)
    if not job:
        return jsonify({"error": "Job not found"}), 404
    return jsonify(job)


@app.route("/dashboard")
def dashboard():
    if "user" not in session:
        return redirect(url_for("index"))

    # Check if still loading
    job_id = session.get("loading_job_id")
    if job_id:
        job = get_job(job_id)
        if job and job["status"] not in ("completed", "failed"):
            return redirect(url_for("loading"))
        session.pop("loading_job_id", None)

    username = session["user"]["username"]
    token = session.get("access_token")

    sort_by = request.args.get("sort", "score")
    filter_status = request.args.get("status", "open")

    # Try to get from cache first
    spaces = cache_get(f"spaces:{username}")

    # If no cache, trigger a reload
    if spaces is None:
        job_id = create_job("initial_load", username)
        session["loading_job_id"] = job_id
        start_job_thread(run_initial_load_job, job_id, username, token)
        return redirect(url_for("loading"))

    # Build discussions from cache
    discussions_map = {}
    for space in spaces:
        space_id = space.get("id", "")
        cached_discussions = cache_get(f"discussions:{space_id}")
        if cached_discussions is not None:
            discussions_map[space_id] = cached_discussions

    discussions = get_discussions_feed(spaces, discussions_map, logged_in_user=username)

    # Filter by status
    if filter_status == "open":
        discussions = [d for d in discussions if d["status"] == "open"]
    elif filter_status == "closed":
        discussions = [d for d in discussions if d["status"] in ("closed", "merged")]

    # Sort discussions
    if sort_by == "comments":
        discussions.sort(key=lambda x: x["num_comments"], reverse=True)
    elif sort_by == "reactions":
        discussions.sort(key=lambda x: x["num_reactions"], reverse=True)
    else:
        discussions.sort(key=lambda x: x["score"], reverse=True)

    return render_template(
        "dashboard.html",
        user=session["user"],
        spaces=spaces,
        discussions=discussions,
        sort_by=sort_by,
        filter_status=filter_status,
    )


@app.route("/logout")
def logout():
    session.clear()
    return redirect(url_for("index"))


@app.route("/api/refresh", methods=["POST"])
def force_refresh():
    if "user" not in session:
        return jsonify({"error": "Not authenticated"}), 401

    username = session["user"]["username"]
    token = session.get("access_token")

    if not token:
        return jsonify({"error": "No access token"}), 401

    # Clear all user cache
    db = get_db()
    db.execute("DELETE FROM cache WHERE key LIKE ?", (f"spaces:{username}%",))
    db.execute("DELETE FROM cache WHERE key LIKE ?", (f"discussions:%",))
    db.execute("DELETE FROM cache WHERE key LIKE ?", (f"space_detail:%",))
    db.commit()

    # Start background job to reload
    job_id = create_job("initial_load", username)
    start_job_thread(run_initial_load_job, job_id, username, token)

    return jsonify({"job_id": job_id})


@app.route("/api/wake-all", methods=["POST"])
def wake_all_spaces():
    if "user" not in session:
        return jsonify({"error": "Not authenticated"}), 401

    username = session["user"]["username"]
    token = session.get("access_token")

    if not token:
        return jsonify({"error": "No access token"}), 401

    # Get sleeping spaces from cache
    spaces = cache_get(f"spaces:{username}")
    if not spaces:
        return jsonify({"error": "No spaces cached, please refresh"}), 400

    sleeping_space_ids = [
        s.get("id", "") for s in spaces
        if s.get("runtime", {}).get("stage", "").upper() == "SLEEPING"
    ]

    if not sleeping_space_ids:
        return jsonify({"job_id": None, "total": 0, "message": "No sleeping spaces"})

    # Create background job
    job_id = create_job("wake_spaces", username)
    start_job_thread(run_wake_job, job_id, username, token, sleeping_space_ids)

    return jsonify({"job_id": job_id, "total": len(sleeping_space_ids)})


# Initialize database on startup
init_db()

if __name__ == "__main__":
    app.run(host="0.0.0.0", port=7860, debug=True)