GodsDevProject commited on
Commit
ef5f53e
·
verified ·
1 Parent(s): 596e23c

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -36
app.py CHANGED
@@ -1,27 +1,25 @@
1
  import gradio as gr
2
  import hashlib
3
- import zipfile
4
  import io
 
5
  from datetime import datetime
6
  from urllib.parse import quote_plus, urlparse
7
  from collections import Counter
8
 
9
  import plotly.graph_objects as go
10
- from reportlab.platypus import SimpleDocTemplate, Paragraph, Spacer
11
- from reportlab.lib.styles import getSampleStyleSheet
12
 
13
  from bluebook import bluebook_full
14
- from semantic import SemanticIndex, FAISS_AVAILABLE
15
 
16
  # ======================================================
17
  # CONFIG
18
  # ======================================================
19
 
20
  ENABLE_AI = True
21
- ENABLE_JOURNALIST_ZIP = True
22
 
23
  # ======================================================
24
- # FOIA ADAPTER
25
  # ======================================================
26
 
27
  class FOIAAdapter:
@@ -44,15 +42,14 @@ class FBI(FOIAAdapter):
44
  agency = "FBI"
45
  search_url = "https://vault.fbi.gov/search?SearchableText={q}"
46
 
47
- LIVE_ADAPTERS = [CIA(), FBI()]
48
 
49
  # ======================================================
50
  # STATE
51
  # ======================================================
52
 
53
  RESULTS = []
54
- SEMANTIC = None
55
- SELECTED = None
56
 
57
  # ======================================================
58
  # SEARCH
@@ -63,64 +60,94 @@ def run_search(query):
63
  RESULTS = []
64
  rows = []
65
 
66
- for a in LIVE_ADAPTERS:
67
  for r in a.search(query):
68
  r["hash"] = hashlib.sha256(r["url"].encode()).hexdigest()[:16]
69
  RESULTS.append(r)
70
  rows.append([r["agency"], r["title"], r["url"], r["hash"]])
71
 
72
- return rows
73
 
74
  # ======================================================
75
- # AI ASK
76
  # ======================================================
77
 
78
- def ask_ai(opt_in, question):
79
- if not opt_in or not SELECTED:
80
- return "AI disabled or no document selected."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
81
 
82
- r = SELECTED
83
- return (
84
- f"AI ANALYSIS\n\n"
85
- f"{r['title']}\n{r['url']}\n\n"
86
- f"Question:\n{question}\n\n"
87
- f"Analysis is informational only.\n\n"
88
- "AI DISCLOSURE:\n"
89
- "• User-initiated\n"
90
- " Public FOIA documents only\n"
91
- " Verify against original source\n"
92
- )
93
 
94
  # ======================================================
95
- # EXPORT
96
  # ======================================================
97
 
98
  def journalist_zip():
99
  buf = io.BytesIO()
100
  with zipfile.ZipFile(buf, "w") as z:
101
  z.writestr("citations.txt", "\n".join(bluebook_full(r) for r in RESULTS))
 
 
 
 
 
102
  buf.seek(0)
103
  return buf
104
 
 
 
 
105
  # ======================================================
106
  # UI
107
  # ======================================================
108
 
109
  with gr.Blocks(title="Federal FOIA Intelligence Search") as app:
 
 
 
 
 
110
  with gr.Tabs():
111
- with gr.Tab("Search"):
112
- q = gr.Textbox()
113
  btn = gr.Button("Search")
114
  table = gr.Dataframe(headers=["Agency","Title","URL","Hash"])
115
- btn.click(run_search, q, table)
 
 
 
 
 
 
116
 
117
- with gr.Tab("AI Ask"):
118
- opt = gr.Checkbox(label="Enable AI (Opt-In)")
119
- question = gr.Textbox(lines=4)
120
- answer = gr.Textbox(lines=12)
121
- gr.Button("Ask AI").click(ask_ai, [opt, question], answer)
122
 
123
- with gr.Tab("Exports"):
124
  gr.Button("Journalist ZIP").click(journalist_zip, outputs=gr.File())
125
 
126
  app.launch()
 
1
  import gradio as gr
2
  import hashlib
 
3
  import io
4
+ import zipfile
5
  from datetime import datetime
6
  from urllib.parse import quote_plus, urlparse
7
  from collections import Counter
8
 
9
  import plotly.graph_objects as go
 
 
10
 
11
  from bluebook import bluebook_full
12
+ from appendix import build_litigation_appendix
13
 
14
  # ======================================================
15
  # CONFIG
16
  # ======================================================
17
 
18
  ENABLE_AI = True
19
+ ENABLE_PUBLIC_SHARE = True
20
 
21
  # ======================================================
22
+ # ADAPTER
23
  # ======================================================
24
 
25
  class FOIAAdapter:
 
42
  agency = "FBI"
43
  search_url = "https://vault.fbi.gov/search?SearchableText={q}"
44
 
45
+ LIVE = [CIA(), FBI()]
46
 
47
  # ======================================================
48
  # STATE
49
  # ======================================================
50
 
51
  RESULTS = []
52
+ SHARES = {}
 
53
 
54
  # ======================================================
55
  # SEARCH
 
60
  RESULTS = []
61
  rows = []
62
 
63
+ for a in LIVE:
64
  for r in a.search(query):
65
  r["hash"] = hashlib.sha256(r["url"].encode()).hexdigest()[:16]
66
  RESULTS.append(r)
67
  rows.append([r["agency"], r["title"], r["url"], r["hash"]])
68
 
69
+ return rows, render_cards()
70
 
71
  # ======================================================
72
+ # CARDS
73
  # ======================================================
74
 
75
+ def render_cards():
76
+ cards = []
77
+ for i, r in enumerate(RESULTS):
78
+ cards.append(f"""
79
+ <div style="border:1px solid #ddd;border-radius:12px;padding:14px;margin-bottom:16px">
80
+ <b>{r['agency']}</b><br>
81
+ {r['title']}<br><br>
82
+ <a href="{r['url']}" target="_blank">View</a> |
83
+ <a href="{r['url']}" download>Download</a> |
84
+ <a href="#" onclick="share({i})">Share</a> |
85
+ <i>Ask AI (opt-in)</i>
86
+ </div>
87
+ """)
88
+ return "".join(cards)
89
+
90
+ # ======================================================
91
+ # SHARE PAGE
92
+ # ======================================================
93
 
94
+ def create_share():
95
+ sid = hashlib.sha256(str(RESULTS).encode()).hexdigest()[:12]
96
+ SHARES[sid] = RESULTS.copy()
97
+ return f"Share ID: `{sid}`"
98
+
99
+ def load_share(sid):
100
+ recs = SHARES.get(sid)
101
+ if not recs:
102
+ return "Invalid share ID"
103
+ return "\n".join(bluebook_full(r) for r in recs)
 
104
 
105
  # ======================================================
106
+ # EXPORTS
107
  # ======================================================
108
 
109
  def journalist_zip():
110
  buf = io.BytesIO()
111
  with zipfile.ZipFile(buf, "w") as z:
112
  z.writestr("citations.txt", "\n".join(bluebook_full(r) for r in RESULTS))
113
+ z.writestr(
114
+ "links.csv",
115
+ "agency,title,url\n" +
116
+ "\n".join(f"{r['agency']},{r['title']},{r['url']}" for r in RESULTS)
117
+ )
118
  buf.seek(0)
119
  return buf
120
 
121
+ def appendix_pdf():
122
+ return build_litigation_appendix(RESULTS)
123
+
124
  # ======================================================
125
  # UI
126
  # ======================================================
127
 
128
  with gr.Blocks(title="Federal FOIA Intelligence Search") as app:
129
+ gr.Markdown("""
130
+ # 🏛️ Federal FOIA Intelligence Search
131
+ **Public FOIA Electronic Reading Rooms**
132
+ """)
133
+
134
  with gr.Tabs():
135
+ with gr.Tab("🔍 Search"):
136
+ q = gr.Textbox(label="Search FOIA Libraries")
137
  btn = gr.Button("Search")
138
  table = gr.Dataframe(headers=["Agency","Title","URL","Hash"])
139
+ gallery = gr.HTML()
140
+ btn.click(run_search, q, [table, gallery])
141
+
142
+ with gr.Tab("📤 Share"):
143
+ gr.Button("Create Share Page").click(create_share, outputs=gr.Textbox())
144
+ sid = gr.Textbox(label="Load Share ID")
145
+ gr.Button("Load").click(load_share, sid, gr.Textbox(lines=10))
146
 
147
+ with gr.Tab("⚖️ Litigation"):
148
+ gr.Button("Generate Appendix PDF").click(appendix_pdf, outputs=gr.File())
 
 
 
149
 
150
+ with gr.Tab("🗂 Exports"):
151
  gr.Button("Journalist ZIP").click(journalist_zip, outputs=gr.File())
152
 
153
  app.launch()