Nymbo commited on
Commit
22b3c95
·
verified ·
1 Parent(s): 71b8a5a

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +698 -0
app.py ADDED
@@ -0,0 +1,698 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Reddit Finder: Discover new subreddits and grow your outreach list.
2
+
3
+ This app connects to Reddit through PRAW and lets you search for subreddits,
4
+ apply filters, and curate a master list that you can export or copy into other
5
+ tools. It is designed as a companion to the Reddit Poster workflow.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from dataclasses import dataclass
11
+ from datetime import datetime
12
+ from typing import Iterable, List, Optional
13
+
14
+ import gradio as gr
15
+
16
+ try: # pragma: no cover - handled gracefully in the UI
17
+ import praw # type: ignore
18
+ except Exception: # pragma: no cover
19
+ praw = None # type: ignore
20
+
21
+ try: # pragma: no cover
22
+ from prawcore import Forbidden, OAuthException, ResponseException # type: ignore
23
+ except Exception: # pragma: no cover
24
+ Forbidden = OAuthException = ResponseException = None # type: ignore
25
+
26
+
27
+ @dataclass(slots=True)
28
+ class SearchFilters:
29
+ min_subscribers: Optional[int] = None
30
+ max_subscribers: Optional[int] = None
31
+ min_active_users: Optional[int] = None
32
+ include_nsfw: bool = False
33
+ require_images: bool = False
34
+ require_videos: bool = False
35
+ language_whitelist: Optional[set[str]] = None
36
+ include_keywords: Optional[List[str]] = None
37
+ exclude_keywords: Optional[List[str]] = None
38
+ skip_existing: bool = True
39
+
40
+
41
+ @dataclass(slots=True)
42
+ class SubredditInfo:
43
+ name: str
44
+ title: str
45
+ subscribers: int
46
+ active_user_count: Optional[int]
47
+ over18: bool
48
+ allow_images: bool
49
+ allow_videos: bool
50
+ language: Optional[str]
51
+ created_utc: float
52
+ public_description: str
53
+ primary_topic: Optional[str]
54
+ url: str
55
+
56
+ @property
57
+ def created_iso(self) -> str:
58
+ return datetime.utcfromtimestamp(self.created_utc).strftime("%Y-%m-%d")
59
+
60
+ @property
61
+ def keyword_text(self) -> str:
62
+ return " ".join(
63
+ filter(
64
+ none_or_nonempty,
65
+ [
66
+ self.name,
67
+ self.title,
68
+ self.public_description,
69
+ self.primary_topic or "",
70
+ ],
71
+ ),
72
+ ).lower()
73
+
74
+ def matches(self, filters: SearchFilters, existing_norm: set[str]) -> bool:
75
+ if filters.min_subscribers is not None and self.subscribers < filters.min_subscribers:
76
+ return False
77
+ if filters.max_subscribers is not None and self.subscribers > filters.max_subscribers:
78
+ return False
79
+ if filters.min_active_users is not None:
80
+ if self.active_user_count is None or self.active_user_count < filters.min_active_users:
81
+ return False
82
+ if not filters.include_nsfw and self.over18:
83
+ return False
84
+ if filters.require_images and not self.allow_images:
85
+ return False
86
+ if filters.require_videos and not self.allow_videos:
87
+ return False
88
+ if filters.language_whitelist and self.language:
89
+ if self.language.lower() not in filters.language_whitelist:
90
+ return False
91
+ if filters.include_keywords:
92
+ kw_text = self.keyword_text
93
+ if not all(keyword in kw_text for keyword in filters.include_keywords):
94
+ return False
95
+ if filters.exclude_keywords:
96
+ kw_text = self.keyword_text
97
+ if any(keyword in kw_text for keyword in filters.exclude_keywords):
98
+ return False
99
+ if filters.skip_existing and normalize_name(self.name) in existing_norm:
100
+ return False
101
+ return True
102
+
103
+ def to_row(self) -> List[str]:
104
+ return [
105
+ self.name,
106
+ self.title,
107
+ f"{self.subscribers:,}",
108
+ "?" if self.active_user_count is None else f"{self.active_user_count:,}",
109
+ "NSFW" if self.over18 else "Safe",
110
+ "Yes" if self.allow_images else "No",
111
+ "Yes" if self.allow_videos else "No",
112
+ self.language or "",
113
+ self.primary_topic or "",
114
+ self.created_iso,
115
+ self.url,
116
+ ]
117
+
118
+
119
+ def none_or_nonempty(value: Optional[str]) -> bool:
120
+ return bool(value)
121
+
122
+
123
+ def normalize_name(name: str) -> str:
124
+ return name.strip().removeprefix("r/").removeprefix("/r/").casefold()
125
+
126
+
127
+ class _State:
128
+ reddit = None # praw.Reddit | None
129
+ latest_results: List[SubredditInfo] = []
130
+ master_list: List[str] = []
131
+ connected_user: Optional[str] = None
132
+ authenticated: bool = False
133
+
134
+
135
+ _STATE = _State()
136
+
137
+
138
+ def _ensure_praw_available() -> Optional[str]:
139
+ if praw is None:
140
+ return (
141
+ "PRAW is not available. Please install dependencies via requirements.txt "
142
+ "and restart the app."
143
+ )
144
+ return None
145
+
146
+
147
+ def _is_unauthorized(exc: Exception) -> bool:
148
+ if ResponseException and isinstance(exc, ResponseException):
149
+ resp = getattr(exc, "response", None)
150
+ return getattr(resp, "status_code", None) == 401
151
+ if OAuthException and isinstance(exc, OAuthException):
152
+ return True
153
+ return False
154
+
155
+
156
+ def _response_status(exc: Exception) -> Optional[int]:
157
+ if ResponseException and isinstance(exc, ResponseException):
158
+ resp = getattr(exc, "response", None)
159
+ return getattr(resp, "status_code", None)
160
+ if Forbidden and isinstance(exc, Forbidden):
161
+ return 403
162
+ return None
163
+
164
+
165
+ def create_reddit(
166
+ client_id: str,
167
+ client_secret: str,
168
+ user_agent: str,
169
+ username: Optional[str] = None,
170
+ password: Optional[str] = None,
171
+ ) -> tuple[object, Optional[str], bool]:
172
+ base_missing = [
173
+ k
174
+ for k, v in {
175
+ "client_id": client_id,
176
+ "client_secret": client_secret,
177
+ "user_agent": user_agent,
178
+ }.items()
179
+ if not v
180
+ ]
181
+ if base_missing:
182
+ raise ValueError(f"Missing required field(s): {', '.join(base_missing)}")
183
+
184
+ username = (username or "").strip()
185
+ password = (password or "").strip()
186
+ if username or password:
187
+ if not (username and password):
188
+ raise ValueError("Provide both username and password for authenticated login.")
189
+
190
+ kwargs = dict(client_id=client_id, client_secret=client_secret, user_agent=user_agent)
191
+ if username and password:
192
+ kwargs.update(username=username, password=password)
193
+
194
+ reddit = praw.Reddit(**kwargs)
195
+ try:
196
+ me = reddit.user.me()
197
+ authenticated = bool(username and password)
198
+ if me is None:
199
+ if authenticated:
200
+ return reddit, (username or None), True
201
+ reddit.read_only = True
202
+ return reddit, None, False
203
+ user_name = getattr(me, "name", username or None)
204
+ return reddit, user_name, authenticated
205
+ except Exception as exc:
206
+ if _is_unauthorized(exc):
207
+ if username and password:
208
+ raise
209
+ reddit.read_only = True
210
+ return reddit, None, False
211
+ raise
212
+
213
+
214
+ def _build_info(sr) -> SubredditInfo:
215
+ return SubredditInfo(
216
+ name=sr.display_name,
217
+ title=sr.title or sr.display_name,
218
+ subscribers=int(getattr(sr, "subscribers", 0) or 0),
219
+ active_user_count=getattr(sr, "active_user_count", None),
220
+ over18=bool(getattr(sr, "over18", False)),
221
+ allow_images=bool(getattr(sr, "allow_images", False)),
222
+ allow_videos=bool(getattr(sr, "allow_videos", False)),
223
+ language=(getattr(sr, "lang", None) or getattr(sr, "language", None) or None),
224
+ created_utc=float(getattr(sr, "created_utc", datetime.utcnow().timestamp())),
225
+ public_description=(getattr(sr, "public_description", "") or ""),
226
+ primary_topic=getattr(sr, "primary_topic", None),
227
+ url=f"https://reddit.com{sr.url}",
228
+ )
229
+
230
+
231
+ def _parse_keywords(text: str) -> Optional[List[str]]:
232
+ words = [w.strip().lower() for w in text.split(",") if w.strip()]
233
+ return words or None
234
+
235
+
236
+ def _parse_language_codes(text: str) -> Optional[set[str]]:
237
+ codes = {part.strip().lower() for part in text.split(",") if part.strip()}
238
+ return codes or None
239
+
240
+
241
+ def _filters_from_inputs(
242
+ min_subs: Optional[float],
243
+ max_subs: Optional[float],
244
+ min_active: Optional[float],
245
+ include_nsfw: bool,
246
+ require_images: bool,
247
+ require_videos: bool,
248
+ language_codes_text: str,
249
+ include_keywords_text: str,
250
+ exclude_keywords_text: str,
251
+ skip_existing: bool,
252
+ ) -> SearchFilters:
253
+ return SearchFilters(
254
+ min_subscribers=int(min_subs) if min_subs else None,
255
+ max_subscribers=int(max_subs) if max_subs else None,
256
+ min_active_users=int(min_active) if min_active else None,
257
+ include_nsfw=bool(include_nsfw),
258
+ require_images=bool(require_images),
259
+ require_videos=bool(require_videos),
260
+ language_whitelist=_parse_language_codes(language_codes_text),
261
+ include_keywords=_parse_keywords(include_keywords_text),
262
+ exclude_keywords=_parse_keywords(exclude_keywords_text),
263
+ skip_existing=bool(skip_existing),
264
+ )
265
+
266
+
267
+ def search_subreddits(
268
+ reddit,
269
+ query: str,
270
+ limit: int,
271
+ sort: str,
272
+ time_filter: str,
273
+ filters: SearchFilters,
274
+ ) -> tuple[List[SubredditInfo], Optional[str]]:
275
+ if not query.strip():
276
+ raise ValueError("Search query must not be empty.")
277
+ # Perform a robust search without passing params that can trip certain PRAW builds.
278
+ results: Optional[List[SubredditInfo]] = None
279
+ note: Optional[str] = None
280
+ max_limit = max(0, min(int(limit) if limit else 0, 100)) or None
281
+
282
+ def _execute_search(include_nsfw_flag: bool) -> List[SubredditInfo]:
283
+ params = {
284
+ "include_over_18": "true" if include_nsfw_flag else "false",
285
+ "type": "sr",
286
+ }
287
+ generator = reddit.subreddits.search(
288
+ query=query,
289
+ limit=max_limit,
290
+ params=params,
291
+ )
292
+ return [_build_info(sr) for sr in generator]
293
+
294
+ try:
295
+ results = _execute_search(filters.include_nsfw)
296
+ except Exception as exc:
297
+ if _is_unauthorized(exc):
298
+ raise RuntimeError("Reddit returned 401 Unauthorized. Check credentials or reconnect with script login.") from exc
299
+ status = _response_status(exc)
300
+ if status == 403 and filters.include_nsfw:
301
+ try:
302
+ results = _execute_search(False)
303
+ note = (
304
+ "Primary search returned 403 (likely NSFW restricted). Retried without NSFW communities. "
305
+ "Authenticate with script credentials and enable NSFW if you need adult results. "
306
+ f"Original error: {exc}"
307
+ )
308
+ except Exception as inner:
309
+ if _is_unauthorized(inner):
310
+ raise RuntimeError("Reddit returned 401 Unauthorized during retry search.") from inner
311
+ exc = inner
312
+ status = _response_status(inner)
313
+ results = None
314
+ if results is None:
315
+ # Fallback to name-based search if the main path fails.
316
+ try:
317
+ found = reddit.subreddits.search_by_name(query, include_nsfw=filters.include_nsfw, exact=False)
318
+ except Exception as inner:
319
+ if _is_unauthorized(inner):
320
+ raise RuntimeError("Reddit returned 401 Unauthorized during fallback search.") from inner
321
+ raise RuntimeError(f"Reddit search failed: {inner}") from inner
322
+ note = (
323
+ "Primary search failed; fell back to name-only search (Reddit limits this to ~10 results). "
324
+ f"Error: {exc}"
325
+ )
326
+ limit_slice = max(0, min(int(limit) if limit else 0, 100))
327
+ results = [_build_info(sr) for sr in (found[: limit_slice])]
328
+
329
+ # Apply client-side filtering (NSFW, images/videos, language, keywords, existing list, etc.)
330
+ existing_norm = {normalize_name(name) for name in _STATE.master_list}
331
+ filtered = [info for info in results if info.matches(filters, existing_norm)]
332
+
333
+ # Apply client-side sorting approximation for convenience
334
+ def sort_key(info: SubredditInfo):
335
+ if sort == "top":
336
+ return (-(info.subscribers or 0),)
337
+ if sort in ("activity", "hot"):
338
+ return (-(info.active_user_count or 0),)
339
+ if sort == "new":
340
+ return (-(info.created_utc or 0.0),)
341
+ # relevance -> keep original order; return zero key
342
+ return (0,)
343
+
344
+ if sort and sort != "relevance":
345
+ filtered.sort(key=sort_key)
346
+
347
+ # Final cap (in case filtering/sorting expanded aspects)
348
+ limit_cap = max(0, min(int(limit) if limit else 0, 100))
349
+ return filtered[: limit_cap or None], note
350
+
351
+
352
+ def _format_results_table(infos: Iterable[SubredditInfo]) -> List[List[str]]:
353
+ return [info.to_row() for info in infos]
354
+
355
+
356
+ def _master_text() -> str:
357
+ return "\n".join(_STATE.master_list)
358
+
359
+ def _master_urls_text() -> str:
360
+ urls: List[str] = []
361
+ for name in _STATE.master_list:
362
+ clean = name.strip()
363
+ lower = clean.lower()
364
+ if lower.startswith("/r/"):
365
+ clean = clean[3:]
366
+ elif lower.startswith("r/"):
367
+ clean = clean[2:]
368
+ urls.append(f"https://www.reddit.com/r/{clean}")
369
+ return "\n".join(urls)
370
+
371
+ def ui_connect(client_id: str, client_secret: str, user_agent: str, username: Optional[str] = None, password: Optional[str] = None):
372
+ err = _ensure_praw_available()
373
+ if err:
374
+ return gr.update(value=f"⚠️ {err}"), gr.update(value="")
375
+ try:
376
+ reddit, resolved_user, authenticated = create_reddit(
377
+ client_id,
378
+ client_secret,
379
+ user_agent,
380
+ username=username,
381
+ password=password,
382
+ )
383
+ _STATE.reddit = reddit
384
+ _STATE.connected_user = resolved_user
385
+ _STATE.authenticated = authenticated
386
+ if authenticated:
387
+ who = resolved_user or (username.strip() if isinstance(username, str) else None) or "authenticated user"
388
+ status = f"✅ Connected as {who}"
389
+ mode_note = "Authenticated mode with script credentials."
390
+ else:
391
+ status = "✅ Connected in read-only mode"
392
+ mode_note = (
393
+ "Public search only. Provide username & password for script-level access (recommended for 401 issues)."
394
+ )
395
+ info = (
396
+ "Credentials stored in memory for this session only. "
397
+ "You can now search for subreddits. "
398
+ f"{mode_note}"
399
+ )
400
+ return gr.update(value=status), gr.update(value=info)
401
+ except Exception as exc: # pragma: no cover - auth errors
402
+ _STATE.reddit = None
403
+ _STATE.connected_user = None
404
+ _STATE.authenticated = False
405
+ if _is_unauthorized(exc):
406
+ message = (
407
+ "❌ Authorization failed (401). Double-check client ID/secret and ensure the app is enabled for "
408
+ "script access. If you're using read-only credentials, Reddit may be rejecting them due to "
409
+ "missing scopes—consider adding username/password or refreshing the credentials."
410
+ )
411
+ else:
412
+ message = f"❌ Connection failed: {exc}"
413
+ return gr.update(value=message), gr.update(value="")
414
+
415
+
416
+ def ui_search(
417
+ query: str,
418
+ limit: int,
419
+ sort: str,
420
+ time_filter: str,
421
+ min_subs: Optional[float],
422
+ max_subs: Optional[float],
423
+ min_active: Optional[float],
424
+ include_nsfw: bool,
425
+ require_images: bool,
426
+ require_videos: bool,
427
+ language_codes_text: str,
428
+ include_keywords_text: str,
429
+ exclude_keywords_text: str,
430
+ skip_existing: bool,
431
+ ):
432
+ if _STATE.reddit is None:
433
+ return (
434
+ gr.update(value="Please connect with Reddit credentials first."),
435
+ gr.update(value=[]),
436
+ gr.update(choices=[], value=[]),
437
+ )
438
+ filters = _filters_from_inputs(
439
+ min_subs,
440
+ max_subs,
441
+ min_active,
442
+ include_nsfw,
443
+ require_images,
444
+ require_videos,
445
+ language_codes_text,
446
+ include_keywords_text,
447
+ exclude_keywords_text,
448
+ skip_existing,
449
+ )
450
+ try:
451
+ found, note = search_subreddits(
452
+ _STATE.reddit,
453
+ query=query,
454
+ limit=limit,
455
+ sort=sort,
456
+ time_filter=time_filter,
457
+ filters=filters,
458
+ )
459
+ _STATE.latest_results = found
460
+ table = _format_results_table(found)
461
+ choices = [info.name for info in found]
462
+ status = (
463
+ f"Found {len(found)} subreddit(s) matching filters. "
464
+ "Use the checklist below to add to your master list."
465
+ )
466
+ if note:
467
+ status += f"\n\n*{note}*"
468
+ return gr.update(value=status), gr.update(value=table), gr.update(choices=choices, value=[])
469
+ except Exception as exc:
470
+ return gr.update(value=f"❌ Search failed: {exc}"), gr.update(value=[]), gr.update(choices=[], value=[])
471
+
472
+
473
+ def ui_add_to_master(selected: List[str], existing_text: str):
474
+ if not selected:
475
+ return (
476
+ gr.update(value=_master_text()),
477
+ gr.update(value="Nothing selected."),
478
+ gr.update(value=[]),
479
+ gr.update(value=_master_urls_text()),
480
+ )
481
+ current = {normalize_name(item): item for item in _STATE.master_list}
482
+ added = 0
483
+ for name in selected:
484
+ norm = normalize_name(name)
485
+ if norm not in current:
486
+ current[norm] = name
487
+ added += 1
488
+ _STATE.master_list = sorted(current.values(), key=str.casefold)
489
+ return (
490
+ gr.update(value=_master_text()),
491
+ gr.update(value=f"Added {added} subreddit(s) to master list." if added else "No new subreddits added."),
492
+ gr.update(value=[]),
493
+ gr.update(value=_master_urls_text()),
494
+ )
495
+
496
+
497
+ def ui_remove_from_master(remove_text: str, status: str):
498
+ names = [normalize_name(line) for line in (remove_text or "").splitlines() if line.strip()]
499
+ if not names:
500
+ return (
501
+ gr.update(value=_master_text()),
502
+ gr.update(value="Provide subreddit names (one per line) to remove."),
503
+ gr.update(value=""),
504
+ gr.update(value=_master_urls_text()),
505
+ )
506
+ to_remove = set(names)
507
+ remaining = [item for item in _STATE.master_list if normalize_name(item) not in to_remove]
508
+ removed = len(_STATE.master_list) - len(remaining)
509
+ _STATE.master_list = remaining
510
+ return (
511
+ gr.update(value=_master_text()),
512
+ gr.update(value=f"Removed {removed} subreddit(s)."),
513
+ gr.update(value=""),
514
+ gr.update(value=_master_urls_text()),
515
+ )
516
+
517
+
518
+ def ui_load_master(text: str):
519
+ items = [line.strip() for line in (text or "").splitlines() if line.strip()]
520
+ unique = {normalize_name(item): item for item in items}
521
+ _STATE.master_list = sorted(unique.values(), key=str.casefold)
522
+ return (
523
+ gr.update(value=_master_text()),
524
+ gr.update(value=f"Loaded {len(_STATE.master_list)} unique subreddit(s)."),
525
+ gr.update(value=""),
526
+ gr.update(value=_master_urls_text()),
527
+ )
528
+
529
+
530
+ def ui_clear_master():
531
+ _STATE.master_list = []
532
+ return (
533
+ gr.update(value=""),
534
+ gr.update(value="Master list cleared."),
535
+ gr.update(value=""),
536
+ gr.update(value=_master_urls_text()),
537
+ )
538
+
539
+
540
+ def ui_download_master():
541
+ text = _master_text()
542
+ return gr.FileData(data=text.encode("utf-8"), file_name="reddit_master_list.txt")
543
+
544
+
545
+ with gr.Blocks(title="Reddit Finder", fill_width=True, theme="Nymbo/Nymbo_Theme") as demo:
546
+ gr.Markdown(
547
+ """
548
+ # Reddit Finder
549
+ Explore Reddit and build a curated list of subreddits for future outreach.
550
+ 1. Connect with your Reddit app credentials (script or installed app).
551
+ 2. Search for subreddits, tweak filters, and review the results table.
552
+ 3. Add selected communities to your master list and export when ready.
553
+ """
554
+ )
555
+
556
+ with gr.Accordion("1) Credentials", open=True):
557
+ with gr.Row():
558
+ client_id = gr.Textbox(label="Client ID", placeholder="Your Reddit app client_id")
559
+ client_secret = gr.Textbox(label="Client Secret", type="password")
560
+ user_agent = gr.Textbox(label="User Agent", value="reddit-finder by u/yourname")
561
+ with gr.Row():
562
+ username_tb = gr.Textbox(label="Username (optional)", placeholder="reddit_username")
563
+ password_tb = gr.Textbox(label="Password (optional)", type="password")
564
+ gr.Markdown(
565
+ "Provide username & password for script applications (recommended). Leave blank to use read-only app-only access."
566
+ )
567
+ connect_btn = gr.Button("Connect", variant="primary")
568
+ conn_status = gr.Textbox(label="Status", interactive=False)
569
+ conn_info = gr.Textbox(label="Details", interactive=False)
570
+
571
+ with gr.Accordion("2) Search", open=True):
572
+ query = gr.Textbox(label="Search query", placeholder="e.g. productivity apps, cozy coding")
573
+ with gr.Row():
574
+ limit = gr.Slider(label="Max results", minimum=5, maximum=100, step=5, value=25)
575
+ sort = gr.Radio(label="Sort", choices=["relevance", "activity", "hot", "new", "top"], value="relevance")
576
+ time_filter = gr.Radio(label="Time filter", choices=["hour", "day", "week", "month", "year", "all"], value="all")
577
+ with gr.Row():
578
+ min_subs = gr.Number(label="Min subscribers", value=None)
579
+ max_subs = gr.Number(label="Max subscribers", value=None)
580
+ min_active = gr.Number(label="Min active users", value=None)
581
+ with gr.Row():
582
+ include_nsfw = gr.Checkbox(label="Include NSFW", value=False)
583
+ require_images = gr.Checkbox(label="Require images enabled", value=False)
584
+ require_videos = gr.Checkbox(label="Require videos enabled", value=False)
585
+ with gr.Row():
586
+ language_codes = gr.Textbox(label="Language codes (comma separated)", placeholder="en, es, fr")
587
+ include_keywords = gr.Textbox(label="Include keywords (comma separated)", placeholder="mechanical, diy")
588
+ exclude_keywords = gr.Textbox(label="Exclude keywords (comma separated)", placeholder="nsfw, politics")
589
+ skip_existing = gr.Checkbox(label="Skip subreddits already in master list", value=True)
590
+ search_btn = gr.Button("Search", variant="primary")
591
+ search_status = gr.Markdown()
592
+
593
+ with gr.Accordion("3) Results", open=True):
594
+ results_table = gr.Dataframe(
595
+ headers=[
596
+ "subreddit",
597
+ "title",
598
+ "subscribers",
599
+ "active",
600
+ "nsfw",
601
+ "images",
602
+ "videos",
603
+ "lang",
604
+ "topic",
605
+ "created",
606
+ "url",
607
+ ],
608
+ interactive=False,
609
+ wrap=True,
610
+ )
611
+ selection = gr.CheckboxGroup(label="Select subreddit(s) to add", choices=[])
612
+ add_btn = gr.Button("Add to master list", variant="secondary")
613
+ result_feedback = gr.Textbox(label="Result feedback", interactive=False)
614
+
615
+ with gr.Accordion("4) Master List", open=True):
616
+ master_text = gr.Textbox(label="Master list", lines=12, interactive=True, placeholder="Subreddit names, one per line")
617
+ master_urls = gr.Textbox(
618
+ label="Master list as URLs",
619
+ lines=12,
620
+ interactive=False,
621
+ show_copy_button=True,
622
+ value=_master_urls_text(),
623
+ )
624
+ gr.Markdown("This URL view updates automatically when the master list changes.")
625
+ with gr.Row():
626
+ load_btn = gr.Button("Load from textbox")
627
+ remove_box = gr.Textbox(label="Remove (one per line)")
628
+ remove_btn = gr.Button("Remove from list")
629
+ with gr.Row():
630
+ clear_btn = gr.Button("Clear all", variant="stop")
631
+ download_btn = gr.DownloadButton("Download master list", variant="primary")
632
+ master_status = gr.Textbox(label="Master status", interactive=False)
633
+
634
+ connect_btn.click(
635
+ fn=ui_connect,
636
+ inputs=[client_id, client_secret, user_agent, username_tb, password_tb],
637
+ outputs=[conn_status, conn_info],
638
+ api_name="connect",
639
+ )
640
+
641
+ search_btn.click(
642
+ fn=ui_search,
643
+ inputs=[
644
+ query,
645
+ limit,
646
+ sort,
647
+ time_filter,
648
+ min_subs,
649
+ max_subs,
650
+ min_active,
651
+ include_nsfw,
652
+ require_images,
653
+ require_videos,
654
+ language_codes,
655
+ include_keywords,
656
+ exclude_keywords,
657
+ skip_existing,
658
+ ],
659
+ outputs=[search_status, results_table, selection],
660
+ api_name="search",
661
+ concurrency_limit=1,
662
+ )
663
+
664
+ add_btn.click(
665
+ fn=ui_add_to_master,
666
+ inputs=[selection, master_text],
667
+ outputs=[master_text, result_feedback, selection, master_urls],
668
+ )
669
+
670
+ load_btn.click(
671
+ fn=ui_load_master,
672
+ inputs=[master_text],
673
+ outputs=[master_text, master_status, remove_box, master_urls],
674
+ )
675
+
676
+ remove_btn.click(
677
+ fn=ui_remove_from_master,
678
+ inputs=[remove_box, master_status],
679
+ outputs=[master_text, master_status, remove_box, master_urls],
680
+ )
681
+
682
+ clear_btn.click(
683
+ fn=ui_clear_master,
684
+ inputs=None,
685
+ outputs=[master_text, master_status, remove_box, master_urls],
686
+ )
687
+
688
+ download_btn.click(
689
+ fn=ui_download_master,
690
+ inputs=None,
691
+ outputs=download_btn,
692
+ )
693
+
694
+ demo.queue(default_concurrency_limit=1)
695
+
696
+
697
+ if __name__ == "__main__":
698
+ demo.launch()