Spaces:
Sleeping
Sleeping
Upload 29 files
Browse files- AGENCY_COVERAGE.md +11 -0
- CODE_OF_CONDUCT.md +28 -0
- Dockerfile.hf +5 -0
- ETHICS.md +24 -0
- FILE_INVENTORY.txt +60 -0
- HASH_MANIFEST.json +62 -0
- HF_JUSTIFICATION.md +12 -0
- HF_SPACE_README.md +32 -0
- LEGAL_MEMO.md +6 -0
- README.md +35 -0
- README_PROD.md +40 -0
- SOURCES.md +23 -0
- __init__.py +0 -0
- app.py +51 -0
- appeal_pdf.py +8 -0
- audit.py +18 -0
- collaboration.py +17 -0
- entity_graph.py +19 -0
- export_utils.py +7 -0
- file_structure.txt +20 -0
- foia_pdf.py +70 -0
- foia_requests.py +52 -0
- gitattributes.txt +35 -0
- requirements.txt +11 -0
- schemas.py +8 -0
- semantic.py +18 -0
- space.yaml +8 -0
- trust_safety.md +12 -0
- vector_store.py +17 -0
AGENCY_COVERAGE.md
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
# Agency Coverage Map
|
| 3 |
+
|
| 4 |
+
| Agency | Public FOIA Reading Room |
|
| 5 |
+
|------|---------------------------|
|
| 6 |
+
| CIA | https://www.cia.gov/readingroom/ |
|
| 7 |
+
| FBI | https://vault.fbi.gov/ |
|
| 8 |
+
| DoD | https://www.foia.mil/ |
|
| 9 |
+
| NSA | https://www.nsa.gov/readingroom/ |
|
| 10 |
+
| NRO | https://www.nro.gov/FOIA/ |
|
| 11 |
+
| DHS | https://www.dhs.gov/foia-reading-room |
|
CODE_OF_CONDUCT.md
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Code of Conduct
|
| 2 |
+
|
| 3 |
+
## Our Pledge
|
| 4 |
+
|
| 5 |
+
This project is committed to providing a respectful, inclusive, and responsible environment for all contributors and users.
|
| 6 |
+
|
| 7 |
+
## Acceptable Use
|
| 8 |
+
|
| 9 |
+
Participants agree to:
|
| 10 |
+
- Use this project for lawful, ethical, and non-harmful purposes
|
| 11 |
+
- Respect the public-record nature of FOIA documents
|
| 12 |
+
- Avoid speculative, defamatory, or misleading interpretations
|
| 13 |
+
|
| 14 |
+
## Unacceptable Use
|
| 15 |
+
|
| 16 |
+
This project must not be used to:
|
| 17 |
+
- Harass or target individuals
|
| 18 |
+
- Make unsubstantiated allegations
|
| 19 |
+
- Claim access to classified or restricted information
|
| 20 |
+
- Bypass legal or ethical safeguards
|
| 21 |
+
|
| 22 |
+
## Enforcement
|
| 23 |
+
|
| 24 |
+
Maintainers may remove content or restrict access that violates this Code of Conduct.
|
| 25 |
+
|
| 26 |
+
---
|
| 27 |
+
|
| 28 |
+
This project is intended for civic transparency, education, and research.
|
Dockerfile.hf
ADDED
|
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM python:3.10-slim
|
| 2 |
+
WORKDIR /app
|
| 3 |
+
COPY . /app
|
| 4 |
+
RUN pip install --no-cache-dir -r requirements.txt
|
| 5 |
+
CMD ["python", "app.py"]
|
ETHICS.md
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Ethics Policy
|
| 2 |
+
|
| 3 |
+
## Purpose
|
| 4 |
+
|
| 5 |
+
This project exists to support transparency, research, and public understanding of government records released under the Freedom of Information Act (FOIA).
|
| 6 |
+
|
| 7 |
+
## Guiding Principles
|
| 8 |
+
|
| 9 |
+
- **Public Sources Only:** All data must originate from publicly released documents.
|
| 10 |
+
- **No Speculation:** The project does not infer, predict, or hypothesize beyond document text.
|
| 11 |
+
- **Citation First:** Outputs must be traceable to source material.
|
| 12 |
+
- **No Harm:** The tool must not be used to defame, harass, or mislead.
|
| 13 |
+
|
| 14 |
+
## Redactions
|
| 15 |
+
|
| 16 |
+
Redacted content is respected. This project does not attempt to reconstruct or infer withheld information.
|
| 17 |
+
|
| 18 |
+
## Accountability
|
| 19 |
+
|
| 20 |
+
Users are responsible for how they interpret and use results. This tool provides analytical assistance, not conclusions.
|
| 21 |
+
|
| 22 |
+
---
|
| 23 |
+
|
| 24 |
+
Ethical transparency is foundational to this project.
|
FILE_INVENTORY.txt
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
CODE_OF_CONDUCT.md
|
| 2 |
+
ETHICS.md
|
| 3 |
+
README.md
|
| 4 |
+
README_PROD.md
|
| 5 |
+
__init__.py
|
| 6 |
+
adapters/__init__.py
|
| 7 |
+
adapters/cia.py
|
| 8 |
+
adapters/common.py
|
| 9 |
+
adapters/dea.py
|
| 10 |
+
adapters/dhs.py
|
| 11 |
+
adapters/dia.py
|
| 12 |
+
adapters/dod.py
|
| 13 |
+
adapters/doj.py
|
| 14 |
+
adapters/fbi.py
|
| 15 |
+
adapters/ice.py
|
| 16 |
+
adapters/nia.py
|
| 17 |
+
adapters/nsa.py
|
| 18 |
+
app.py
|
| 19 |
+
appeal_pdf.py
|
| 20 |
+
appeals/__init__.py
|
| 21 |
+
appeals/pdf_appeal.py
|
| 22 |
+
audit.py
|
| 23 |
+
collaboration.py
|
| 24 |
+
collaboration/__init__.py
|
| 25 |
+
collaboration/icij.py
|
| 26 |
+
core/__init__.py
|
| 27 |
+
core/analysis.py
|
| 28 |
+
core/appeals.py
|
| 29 |
+
core/explain.py
|
| 30 |
+
core/index.py
|
| 31 |
+
core/multi_program.py
|
| 32 |
+
core/redaction.py
|
| 33 |
+
core/search.py
|
| 34 |
+
core/vector.py
|
| 35 |
+
data/demo/documents/aatip_sample.txt
|
| 36 |
+
data/demo/documents/tencap_sample.txt
|
| 37 |
+
data/demo/metadata.json
|
| 38 |
+
data/foia_sources.json
|
| 39 |
+
entity_graph.py
|
| 40 |
+
export_utils.py
|
| 41 |
+
file_structure.txt
|
| 42 |
+
foia_pdf.py
|
| 43 |
+
foia_requests.py
|
| 44 |
+
gitattributes.txt
|
| 45 |
+
ingest/__init__.py
|
| 46 |
+
ingest/agency_registry.py
|
| 47 |
+
ingest/cia_reading_room.py
|
| 48 |
+
ingest/fbi_vault.py
|
| 49 |
+
ingest/generic_public_foia.py
|
| 50 |
+
ingest/loader.py
|
| 51 |
+
ingest/sources.py
|
| 52 |
+
requirements.txt
|
| 53 |
+
schemas.py
|
| 54 |
+
search/__init__.py
|
| 55 |
+
search/semantic.py
|
| 56 |
+
semantic.py
|
| 57 |
+
tests/__init__.py
|
| 58 |
+
tests/test_core.py
|
| 59 |
+
tests/test_schema.py
|
| 60 |
+
vector_store.py
|
HASH_MANIFEST.json
ADDED
|
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"CODE_OF_CONDUCT.md": "b674f96cae26f0050be863c4b8782510fcae5ab855f0822ec4a0217763a84601",
|
| 3 |
+
"ETHICS.md": "d4f7c23c1e60297712786e392800158fcbe21116576496632e8221b0b8a16ff2",
|
| 4 |
+
"README.md": "e9bfdd2d6a4422fcb132bd4033a69d2241574c31fab71820e4643491b3b1225b",
|
| 5 |
+
"app.py": "c2a2b16ce45a327de0d42196104cb7fc50ec29ff1cb1fb95517a8ca655a3192a",
|
| 6 |
+
"appeal_pdf.py": "2d28ca1d0e796bfb5da25eac05a91354aadd58deefd041acded9a01a64055f9c",
|
| 7 |
+
"audit.py": "01c286d4067c6fffcb990391d8f750719c1ccac07eafc4477ccbdd1be4dd11e8",
|
| 8 |
+
"collaboration.py": "7cbd52c0da9be9f205b2901d8a94f28cb96612ffe506bcad1c7991885cd2d947",
|
| 9 |
+
"entity_graph.py": "dbe21fa0d8e7528daeee34d598efba836ab6370ad609de80746be1b12a4e0ff5",
|
| 10 |
+
"export_utils.py": "a01a088fd650a947a7831e795508208d3caa430d099aa5a8d7823ba462f0a80e",
|
| 11 |
+
"file_structure.txt": "6eee55e586751e3ae1405349f01dd35703e678d8e105ea19fc58eb15e4c2a6fa",
|
| 12 |
+
"foia_pdf.py": "babbd69a2da67681f15596ab254174310b8381d5853da72fe068d31d746725ab",
|
| 13 |
+
"foia_requests.py": "ca9c765bb7a591c462a94b0aa42957d1b3124128266d4880f0654895ce0ca6c0",
|
| 14 |
+
"gitattributes.txt": "11ad7efa24975ee4b0c3c3a38ed18737f0658a5f75a0a96787b576a78a023361",
|
| 15 |
+
"requirements.txt": "444bc9beedfa3fde82790f47c1e9b94bab90be2fefd0648de0ffdebbcc2eb61c",
|
| 16 |
+
"schemas.py": "e08b38513be2572af7d022e013f037c4f614f2117db85d4d776c408be96815ef",
|
| 17 |
+
"semantic.py": "4ffcf9149f08b8e69473e5418588dd370bbd470b137f2d0761901fccf09238cf",
|
| 18 |
+
"vector_store.py": "c61701e38e12150c541d284e13824341dde1794d3b4149d2a7d332b8023ad923",
|
| 19 |
+
"__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 20 |
+
"README_PROD.md": "3b5d0a9f882f8f980a08452ca589a788b3c7cfe2ed8b7ca13a01f9c4a12e9060",
|
| 21 |
+
"adapters/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 22 |
+
"adapters/cia.py": "a934e6a67aa6662391814036a9084779f15ad9ca5059f5461e2c374dfa9c3344",
|
| 23 |
+
"adapters/common.py": "c76c7ea1ce1616a2c99bfeec47ca046e75088f4d807f9c94ce5f87c9eeed5714",
|
| 24 |
+
"adapters/dea.py": "f1b9832aeaabecf5da8f1125e33883ce28e37d081149f6c75bf9ef49ac3ead8a",
|
| 25 |
+
"adapters/dhs.py": "66c44ee323135ee8e3c0cb7a2bb83d2d9dc20f7b88b6db4823e1bd5d03be6227",
|
| 26 |
+
"adapters/dia.py": "5c003321750582f502bcf0e2115956edb9af3aa8937917b72d7b25036b493f6d",
|
| 27 |
+
"adapters/dod.py": "410726bcab164fa9991d0ba61b3d9586d271ee4d55f65d1bd02193e84f02ed30",
|
| 28 |
+
"adapters/doj.py": "56080addcaef0a01d2395b6d44a93e9e271bc569a688f65657617d730a054eac",
|
| 29 |
+
"adapters/fbi.py": "b81b80972adf70b8283f2c16b241d17f46ab3ab73cd3ab4155dc88f7afbbcfc2",
|
| 30 |
+
"adapters/ice.py": "f0d06239d483933ba53966bc8015b9ca9f3ead3ebb535f4f963f5a26afd340b3",
|
| 31 |
+
"adapters/nia.py": "cbc240d23d7ac144d0ca0a49e83341df579903092c13c7603cfe438e7dd58a84",
|
| 32 |
+
"adapters/nsa.py": "a5a7ff4f8d3b1397bccc6095471de814aad75e2711566065f8cf7f4f43c59303",
|
| 33 |
+
"appeals/pdf_appeal.py": "cfe7ca493bf9a4280eff3d90494b2e2afc8bfed92ee99d5e175c1daf49ddadf6",
|
| 34 |
+
"appeals/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 35 |
+
"collaboration/icij.py": "bd02217afd54664762594dfcd1e8088ac3666c641acd450d3b233cf05f08a641",
|
| 36 |
+
"collaboration/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 37 |
+
"core/analysis.py": "e745cc6ad43d5193c92b5d7c417db4546ec301a96761090a319f7a477722dd99",
|
| 38 |
+
"core/appeals.py": "9ac66f34fdb2e741b6341de258291fd99db7f4a95862e39aa4cae94448726609",
|
| 39 |
+
"core/explain.py": "accdde04f5faf85b48302917f6274a12f06b9058fac5941cfa7ce9a64a6c45a3",
|
| 40 |
+
"core/index.py": "d266fc0aacbc2445b25cafbc29530e9138bb626090fb716681f300976927903c",
|
| 41 |
+
"core/multi_program.py": "444928c79f9778ebffcdb47262ba63b2eb19d2ed4d97d5632682a92e91861138",
|
| 42 |
+
"core/redaction.py": "b99bbbcb659e1f60902bca7e2bde5b0c28f371b7a6feb9daff489bb8fd96b878",
|
| 43 |
+
"core/search.py": "5843e5ee44d88688862b73e5457ff596dd229fc9433600c2e1a978868c8a2296",
|
| 44 |
+
"core/vector.py": "518e78f8c363735f5629584d2d5e25876a7f80063cd74e72a080723380141ce8",
|
| 45 |
+
"core/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 46 |
+
"data/foia_sources.json": "8fe166a285717548afb937ad7a669020c60f91d9ec9f06dbdee9954f3396bd2a",
|
| 47 |
+
"data/demo/metadata.json": "89d069dd00b20d1c74eb6f192a09b9d11226d86d5f7754159b3b1717512302d3",
|
| 48 |
+
"data/demo/documents/aatip_sample.txt": "8b8d9a6167699a123885330093dac739025bfe0d7fabfdfd596707ab53db9f81",
|
| 49 |
+
"data/demo/documents/tencap_sample.txt": "e1930579e04e76cc2ced2b5b253fa59e907b28bbffba2f8c1710693cfc84b167",
|
| 50 |
+
"ingest/agency_registry.py": "89581ae5dcf6f0e5614939ce8538e17f4e22a1751d806bfce5cd51fbf9d35f85",
|
| 51 |
+
"ingest/cia_reading_room.py": "ebfa118842937a7929a1ce58998650f11081306e8a017d53f01e11262917f2e5",
|
| 52 |
+
"ingest/fbi_vault.py": "9a24fd572db556cc182239738ca2c551d6cb6a393a325f3fc8f6db9cbf1c157b",
|
| 53 |
+
"ingest/generic_public_foia.py": "60f174b9ada68330a70ca11898ae3fbb7d225e2f404265903a5079aaa274baa1",
|
| 54 |
+
"ingest/loader.py": "12b2b68d4c3a902270be73bebb1218314f19b225f7df4e436191f433378aca18",
|
| 55 |
+
"ingest/sources.py": "4b995bff081e14cbe3b66deb516abc74fce09e29f3e36463f60bbbcaf11b075b",
|
| 56 |
+
"ingest/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 57 |
+
"search/semantic.py": "974faa592af9a67ec50a691180ad68d90e00d38244871680c0c45f31a77f8f36",
|
| 58 |
+
"search/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
|
| 59 |
+
"tests/test_core.py": "58e5d87c0de8482328abcc27d7a1452cdc6a69740eb0de4395c78a250d12d79e",
|
| 60 |
+
"tests/test_schema.py": "04c0343db5c7516679395717a1dd4c2eca4e325cf038e5c6ee794c2a62649119",
|
| 61 |
+
"tests/__init__.py": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"
|
| 62 |
+
}
|
HF_JUSTIFICATION.md
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
This Hugging Face Space provides a public-interest federated search interface
|
| 3 |
+
across U.S. Government FOIA Electronic Reading Rooms.
|
| 4 |
+
|
| 5 |
+
Safeguards:
|
| 6 |
+
- Public sources only
|
| 7 |
+
- No authentication bypass
|
| 8 |
+
- Rate limiting and health checks
|
| 9 |
+
- Redaction-aware previews
|
| 10 |
+
- Metadata indexing only
|
| 11 |
+
|
| 12 |
+
Intended for journalism, research, and accountability.
|
HF_SPACE_README.md
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FOIA Federated Document Search (Public Interest)
|
| 2 |
+
|
| 3 |
+
🚀 **Hugging Face Space – Transparency & Accountability Tool**
|
| 4 |
+
|
| 5 |
+
This application provides **semantic search across publicly released U.S. Government FOIA electronic reading rooms**.
|
| 6 |
+
It does **not** access classified, private, or restricted systems.
|
| 7 |
+
|
| 8 |
+
## What This Is
|
| 9 |
+
- Federated FOIA document search
|
| 10 |
+
- Semantic + keyword hybrid retrieval
|
| 11 |
+
- Redaction-aware exports
|
| 12 |
+
- Audit logging
|
| 13 |
+
|
| 14 |
+
## What This Is NOT
|
| 15 |
+
- Surveillance
|
| 16 |
+
- Intelligence gathering
|
| 17 |
+
- Law enforcement tooling
|
| 18 |
+
- Political persuasion
|
| 19 |
+
|
| 20 |
+
## Data Sources
|
| 21 |
+
- CIA FOIA Electronic Reading Room
|
| 22 |
+
- FBI Vault
|
| 23 |
+
- Other agency FOIA libraries (public releases only)
|
| 24 |
+
|
| 25 |
+
## Compliance
|
| 26 |
+
- FOIA-only sources
|
| 27 |
+
- robots.txt respected
|
| 28 |
+
- Rate-limited adapters
|
| 29 |
+
- Redaction before export
|
| 30 |
+
|
| 31 |
+
## Intended Users
|
| 32 |
+
Researchers, journalists, historians, and the general public.
|
LEGAL_MEMO.md
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
|
| 2 |
+
FOIA Federated Search – Legal Summary
|
| 3 |
+
|
| 4 |
+
This system indexes publicly released FOIA records.
|
| 5 |
+
No restricted access, no scraping of protected systems.
|
| 6 |
+
Fully compliant with 5 U.S.C. § 552.
|
README.md
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
title: FOIA Federated Search
|
| 3 |
+
emoji: 📜
|
| 4 |
+
colorFrom: blue
|
| 5 |
+
colorTo: purple
|
| 6 |
+
sdk: gradio
|
| 7 |
+
sdk_version: "4.0"
|
| 8 |
+
app_file: app.py
|
| 9 |
+
pinned: false
|
| 10 |
+
license: mit
|
| 11 |
+
---
|
| 12 |
+
|
| 13 |
+
# FOIA Federated Search (Public Interest)
|
| 14 |
+
|
| 15 |
+
A Hugging Face Space that provides **live federated search** across publicly available
|
| 16 |
+
U.S. Government FOIA Electronic Reading Rooms (CIA, FBI, DoD, and more).
|
| 17 |
+
|
| 18 |
+
## Key Features
|
| 19 |
+
- Live async fan-out search (no scraping beyond public endpoints)
|
| 20 |
+
- Per-agency source toggles + result counts
|
| 21 |
+
- Semantic *search-in-results* using FAISS + sentence-transformers
|
| 22 |
+
- Local caching + deduplication
|
| 23 |
+
- PDF export of search results
|
| 24 |
+
- Inline document preview (where permitted by source)
|
| 25 |
+
- Rate-limited, health-checked agency adapters
|
| 26 |
+
|
| 27 |
+
## Trust & Safety
|
| 28 |
+
- Queries only **public FOIA reading rooms**
|
| 29 |
+
- Honors robots.txt, rate limits, and agency terms
|
| 30 |
+
- No authentication bypass or restricted content
|
| 31 |
+
- Designed for research, journalism, and public accountability
|
| 32 |
+
|
| 33 |
+
## Legal
|
| 34 |
+
All content remains hosted by the originating agency.
|
| 35 |
+
This tool indexes metadata and snippets for discovery only.
|
README_PROD.md
ADDED
|
@@ -0,0 +1,40 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FOIA HF Document Search — Production Build
|
| 2 |
+
|
| 3 |
+
## Entry Point
|
| 4 |
+
- `app.py` — orchestrates ingestion, semantic search, export, and audit hooks.
|
| 5 |
+
|
| 6 |
+
## Ingestion Adapters (Present)
|
| 7 |
+
- CIA Reading Room
|
| 8 |
+
- FBI Vault
|
| 9 |
+
- Generic Public FOIA
|
| 10 |
+
|
| 11 |
+
## Missing / Stubbed Adapters (Recommended)
|
| 12 |
+
- DoD (incl. components)
|
| 13 |
+
- NSA
|
| 14 |
+
- DIA
|
| 15 |
+
- DHS
|
| 16 |
+
- DEA
|
| 17 |
+
- ICE
|
| 18 |
+
|
| 19 |
+
## Vector Backend Assumptions
|
| 20 |
+
- Current code supports abstract vector ops.
|
| 21 |
+
- Recommended backends:
|
| 22 |
+
- FAISS (local)
|
| 23 |
+
- Chroma (persistent)
|
| 24 |
+
- HuggingFace embeddings
|
| 25 |
+
- OpenAI embeddings (optional)
|
| 26 |
+
|
| 27 |
+
## Live Federated Search Upgrade
|
| 28 |
+
- Async querying via `asyncio` + `httpx`
|
| 29 |
+
- Adapter interface with rate limits
|
| 30 |
+
- Response caching + deduplication
|
| 31 |
+
- Circuit breakers for abuse prevention
|
| 32 |
+
|
| 33 |
+
## Compliance
|
| 34 |
+
- Respect robots.txt where applicable
|
| 35 |
+
- Rate limiting per agency
|
| 36 |
+
- Redaction before export
|
| 37 |
+
- Audit logging enabled
|
| 38 |
+
|
| 39 |
+
## Build Timestamp
|
| 40 |
+
2026-01-09T23:51:16.728748Z
|
SOURCES.md
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# FOIA Public Sources
|
| 2 |
+
|
| 3 |
+
All sources listed here are **public FOIA electronic reading rooms** or official public-release libraries.
|
| 4 |
+
|
| 5 |
+
## Intelligence & Defense
|
| 6 |
+
- CIA FOIA Electronic Reading Room — https://www.cia.gov/readingroom/
|
| 7 |
+
- FBI Vault — https://vault.fbi.gov/
|
| 8 |
+
- DARPA FOIA Library — https://www.darpa.mil/work-with-us/foia
|
| 9 |
+
- NRO FOIA Reading Room — https://www.nro.gov/FOIA/
|
| 10 |
+
- DoD FOIA Reading Room — https://www.esd.whs.mil/FOIA/Reading-Room/
|
| 11 |
+
|
| 12 |
+
## Military Branches
|
| 13 |
+
- U.S. Army FOIA — https://www.army.mil/foia
|
| 14 |
+
- U.S. Navy FOIA — https://www.secnav.navy.mil/foia
|
| 15 |
+
- U.S. Air Force FOIA — https://www.af.mil/FOIA/
|
| 16 |
+
- U.S. Marine Corps FOIA — https://www.hqmc.marines.mil/Agencies/FOIA/
|
| 17 |
+
- U.S. Space Force FOIA — https://www.spaceforce.mil/FOIA/
|
| 18 |
+
- U.S. Coast Guard FOIA — https://www.uscg.mil/FOIA/
|
| 19 |
+
|
| 20 |
+
## Other Agencies
|
| 21 |
+
- DHS FOIA Library — https://www.dhs.gov/foia-library
|
| 22 |
+
- DEA FOIA Reading Room — https://www.dea.gov/foia
|
| 23 |
+
- Secret Service FOIA — https://www.secretservice.gov/foia
|
__init__.py
ADDED
|
File without changes
|
app.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import asyncio
|
| 3 |
+
from ingest.cia_reading_room import CIAAdapter
|
| 4 |
+
from ingest.fbi_vault_live import FBIAdapter
|
| 5 |
+
from ingest.dod_reading_room_live import DoDAdapter
|
| 6 |
+
from core.async_search import fanout_search
|
| 7 |
+
from core.cache import dedupe
|
| 8 |
+
from core.cluster import cluster_results
|
| 9 |
+
from core.citations import citation_block
|
| 10 |
+
from core.redaction import redaction_confidence
|
| 11 |
+
from core.journalist import journalist_export
|
| 12 |
+
from core.explain import explain
|
| 13 |
+
|
| 14 |
+
cia, fbi, dod = CIAAdapter(), FBIAdapter(), DoDAdapter()
|
| 15 |
+
|
| 16 |
+
async def run(q):
|
| 17 |
+
res = await fanout_search([cia,fbi,dod], q)
|
| 18 |
+
return dedupe(res)
|
| 19 |
+
|
| 20 |
+
with gr.Blocks() as demo:
|
| 21 |
+
gr.Markdown("# FOIA Federated Search — Supreme")
|
| 22 |
+
|
| 23 |
+
q = gr.Textbox(label="Query")
|
| 24 |
+
results_state = gr.State([])
|
| 25 |
+
|
| 26 |
+
with gr.Tabs():
|
| 27 |
+
with gr.Tab("Clustered Results"):
|
| 28 |
+
clusters = gr.JSON()
|
| 29 |
+
with gr.Tab("Citations"):
|
| 30 |
+
cites = gr.Markdown()
|
| 31 |
+
with gr.Tab("Explainability"):
|
| 32 |
+
explain_box = gr.JSON()
|
| 33 |
+
|
| 34 |
+
preview = gr.JSON(label="Redaction Confidence")
|
| 35 |
+
|
| 36 |
+
def _run(q):
|
| 37 |
+
res = asyncio.run(run(q))
|
| 38 |
+
cl = cluster_results(res)
|
| 39 |
+
cites_md = "\n".join(citation_block(r) for r in res[:5])
|
| 40 |
+
explain_data = explain(res)
|
| 41 |
+
red = {r.get("url"): redaction_confidence(r) for r in res}
|
| 42 |
+
return res, cl, cites_md, explain_data, red
|
| 43 |
+
|
| 44 |
+
btn = gr.Button("Search")
|
| 45 |
+
btn.click(_run, inputs=q, outputs=[results_state, clusters, cites, explain_box, preview])
|
| 46 |
+
|
| 47 |
+
exp = gr.Button("Journalist Export")
|
| 48 |
+
out = gr.File()
|
| 49 |
+
exp.click(lambda r: journalist_export(r, "/tmp/journalist_export.zip"), inputs=results_state, outputs=out)
|
| 50 |
+
|
| 51 |
+
demo.launch()
|
appeal_pdf.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from reportlab.platypus import SimpleDocTemplate, Paragraph
|
| 2 |
+
from reportlab.lib.styles import getSampleStyleSheet
|
| 3 |
+
|
| 4 |
+
def generate_appeal_pdf(text, filename="appeal.pdf"):
|
| 5 |
+
doc = SimpleDocTemplate(filename)
|
| 6 |
+
styles = getSampleStyleSheet()
|
| 7 |
+
doc.build([Paragraph(text, styles["BodyText"])])
|
| 8 |
+
return filename
|
audit.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import uuid
|
| 2 |
+
from datetime import datetime
|
| 3 |
+
from typing import Dict, List
|
| 4 |
+
|
| 5 |
+
_AUDIT_LOG: List[Dict] = []
|
| 6 |
+
|
| 7 |
+
def log_event(action: str, payload: Dict) -> Dict:
|
| 8 |
+
entry = {
|
| 9 |
+
"id": str(uuid.uuid4()),
|
| 10 |
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
| 11 |
+
"action": action,
|
| 12 |
+
"payload": payload
|
| 13 |
+
}
|
| 14 |
+
_AUDIT_LOG.append(entry)
|
| 15 |
+
return entry
|
| 16 |
+
|
| 17 |
+
def export_audit_log() -> List[Dict]:
|
| 18 |
+
return list(_AUDIT_LOG)
|
collaboration.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from datasets import Dataset
|
| 2 |
+
from typing import Dict, List
|
| 3 |
+
|
| 4 |
+
_COLLAB: List[Dict] = []
|
| 5 |
+
|
| 6 |
+
def add_collaboration_note(document: str, note: str) -> Dict:
|
| 7 |
+
record = {
|
| 8 |
+
"document": document,
|
| 9 |
+
"note": note
|
| 10 |
+
}
|
| 11 |
+
_COLLAB.append(record)
|
| 12 |
+
return record
|
| 13 |
+
|
| 14 |
+
def get_collaboration_dataset() -> Dataset:
|
| 15 |
+
if not _COLLAB:
|
| 16 |
+
return Dataset.from_dict({"document": [], "note": []})
|
| 17 |
+
return Dataset.from_list(_COLLAB)
|
entity_graph.py
ADDED
|
@@ -0,0 +1,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import networkx as nx
|
| 2 |
+
from typing import List, Dict
|
| 3 |
+
|
| 4 |
+
def build_entity_graph(docs: List[Dict]) -> Dict:
|
| 5 |
+
G = nx.Graph()
|
| 6 |
+
|
| 7 |
+
for d in docs:
|
| 8 |
+
agency = d.get("agency", "Unknown")
|
| 9 |
+
G.add_node(agency, group="agency")
|
| 10 |
+
|
| 11 |
+
for token in d.get("content", "").split():
|
| 12 |
+
if token.isupper() and len(token) > 2:
|
| 13 |
+
G.add_node(token, group="entity")
|
| 14 |
+
G.add_edge(agency, token)
|
| 15 |
+
|
| 16 |
+
return {
|
| 17 |
+
"nodes": [{"id": n, "group": G.nodes[n]["group"]} for n in G.nodes],
|
| 18 |
+
"links": [{"source": u, "target": v} for u, v in G.edges]
|
| 19 |
+
}
|
export_utils.py
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
|
| 3 |
+
def export_json(data):
|
| 4 |
+
path = "/tmp/results.json"
|
| 5 |
+
with open(path, "w") as f:
|
| 6 |
+
json.dump(data, f, indent=2)
|
| 7 |
+
return path
|
file_structure.txt
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
foia-chatbot/
|
| 2 |
+
├── app.py
|
| 3 |
+
├── requirements.txt
|
| 4 |
+
│
|
| 5 |
+
├── core/
|
| 6 |
+
│ ├── search.py
|
| 7 |
+
│ ├── analysis.py
|
| 8 |
+
│ ├── vector.py
|
| 9 |
+
│ ├── index.py
|
| 10 |
+
│ ├── explain.py
|
| 11 |
+
│ ├── multi_program.py
|
| 12 |
+
│ ├── redaction.py
|
| 13 |
+
│ └── appeals.py
|
| 14 |
+
│
|
| 15 |
+
├── data/
|
| 16 |
+
│ └── demo/
|
| 17 |
+
│ ├── metadata.json
|
| 18 |
+
│ └── documents/
|
| 19 |
+
│ ├── tencap_sample.txt
|
| 20 |
+
│ └── aatip_sample.txt
|
foia_pdf.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# foia_pdf.py
|
| 2 |
+
from reportlab.lib.pagesizes import LETTER
|
| 3 |
+
from reportlab.pdfgen import canvas
|
| 4 |
+
from datetime import datetime
|
| 5 |
+
from typing import Dict
|
| 6 |
+
import os
|
| 7 |
+
import uuid
|
| 8 |
+
|
| 9 |
+
OUTPUT_DIR = "generated_pdfs"
|
| 10 |
+
os.makedirs(OUTPUT_DIR, exist_ok=True)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def generate_foia_appeal_pdf(record: Dict) -> str:
|
| 14 |
+
"""
|
| 15 |
+
Generates a FOIA appeal draft PDF.
|
| 16 |
+
This does NOT submit anything to any agency.
|
| 17 |
+
"""
|
| 18 |
+
|
| 19 |
+
filename = f"foia_appeal_{uuid.uuid4().hex}.pdf"
|
| 20 |
+
path = os.path.join(OUTPUT_DIR, filename)
|
| 21 |
+
|
| 22 |
+
c = canvas.Canvas(path, pagesize=LETTER)
|
| 23 |
+
width, height = LETTER
|
| 24 |
+
|
| 25 |
+
text = c.beginText(40, height - 50)
|
| 26 |
+
text.setFont("Times-Roman", 11)
|
| 27 |
+
|
| 28 |
+
text.textLine(f"FOIA Appeal Draft")
|
| 29 |
+
text.textLine("")
|
| 30 |
+
text.textLine(f"Date: {datetime.utcnow().strftime('%Y-%m-%d')}")
|
| 31 |
+
text.textLine("")
|
| 32 |
+
text.textLine(f"Agency: {record.get('agency')}")
|
| 33 |
+
text.textLine(f"Subject: {record.get('subject')}")
|
| 34 |
+
text.textLine("")
|
| 35 |
+
text.textLine("To Whom It May Concern,")
|
| 36 |
+
text.textLine("")
|
| 37 |
+
text.textLine(
|
| 38 |
+
"This letter serves as a formal appeal regarding the handling of a "
|
| 39 |
+
"Freedom of Information Act (FOIA) request."
|
| 40 |
+
)
|
| 41 |
+
text.textLine("")
|
| 42 |
+
text.textLine(
|
| 43 |
+
"The requested materials concern publicly released or previously "
|
| 44 |
+
"acknowledged records. Disclosure would contribute significantly "
|
| 45 |
+
"to public understanding of government operations."
|
| 46 |
+
)
|
| 47 |
+
text.textLine("")
|
| 48 |
+
text.textLine("Request Description:")
|
| 49 |
+
text.textLine(record.get("description", ""))
|
| 50 |
+
text.textLine("")
|
| 51 |
+
text.textLine(
|
| 52 |
+
"This appeal is submitted in good faith for journalistic, academic, "
|
| 53 |
+
"or public-interest review."
|
| 54 |
+
)
|
| 55 |
+
text.textLine("")
|
| 56 |
+
text.textLine("Sincerely,")
|
| 57 |
+
text.textLine("FOIA Declassified Document Search")
|
| 58 |
+
text.textLine("")
|
| 59 |
+
text.textLine("—")
|
| 60 |
+
text.textLine(
|
| 61 |
+
"Disclaimer: This document is a draft generated for reference only. "
|
| 62 |
+
"It does not constitute legal advice and does not submit a request "
|
| 63 |
+
"to any agency."
|
| 64 |
+
)
|
| 65 |
+
|
| 66 |
+
c.drawText(text)
|
| 67 |
+
c.showPage()
|
| 68 |
+
c.save()
|
| 69 |
+
|
| 70 |
+
return path
|
foia_requests.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import Dict, List
|
| 2 |
+
import json
|
| 3 |
+
import os
|
| 4 |
+
import uuid
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
|
| 7 |
+
FOIA_STORE = "data/foia_requests.json"
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
def _load_requests() -> List[Dict]:
|
| 11 |
+
if not os.path.exists(FOIA_STORE):
|
| 12 |
+
return []
|
| 13 |
+
try:
|
| 14 |
+
with open(FOIA_STORE, "r", encoding="utf-8") as f:
|
| 15 |
+
return json.load(f)
|
| 16 |
+
except Exception:
|
| 17 |
+
return []
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _save_requests(requests: List[Dict]) -> None:
|
| 21 |
+
os.makedirs(os.path.dirname(FOIA_STORE), exist_ok=True)
|
| 22 |
+
with open(FOIA_STORE, "w", encoding="utf-8") as f:
|
| 23 |
+
json.dump(requests, f, indent=2)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def add_foia_request(
|
| 27 |
+
agency: str,
|
| 28 |
+
subject: str,
|
| 29 |
+
description: str,
|
| 30 |
+
requester_type: str = "Journalist"
|
| 31 |
+
) -> Dict:
|
| 32 |
+
"""
|
| 33 |
+
Store a FOIA request record (tracking only).
|
| 34 |
+
No submission to agencies is performed.
|
| 35 |
+
"""
|
| 36 |
+
|
| 37 |
+
record = {
|
| 38 |
+
"id": str(uuid.uuid4()),
|
| 39 |
+
"timestamp": datetime.utcnow().isoformat() + "Z",
|
| 40 |
+
"agency": agency,
|
| 41 |
+
"subject": subject,
|
| 42 |
+
"description": description,
|
| 43 |
+
"requester_type": requester_type,
|
| 44 |
+
"status": "Draft",
|
| 45 |
+
"notes": "Generated by FOIA Declassified Document Search (tracking only)"
|
| 46 |
+
}
|
| 47 |
+
|
| 48 |
+
requests = _load_requests()
|
| 49 |
+
requests.append(record)
|
| 50 |
+
_save_requests(requests)
|
| 51 |
+
|
| 52 |
+
return record
|
gitattributes.txt
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
*.7z filter=lfs diff=lfs merge=lfs -text
|
| 2 |
+
*.arrow filter=lfs diff=lfs merge=lfs -text
|
| 3 |
+
*.bin filter=lfs diff=lfs merge=lfs -text
|
| 4 |
+
*.bz2 filter=lfs diff=lfs merge=lfs -text
|
| 5 |
+
*.ckpt filter=lfs diff=lfs merge=lfs -text
|
| 6 |
+
*.ftz filter=lfs diff=lfs merge=lfs -text
|
| 7 |
+
*.gz filter=lfs diff=lfs merge=lfs -text
|
| 8 |
+
*.h5 filter=lfs diff=lfs merge=lfs -text
|
| 9 |
+
*.joblib filter=lfs diff=lfs merge=lfs -text
|
| 10 |
+
*.lfs.* filter=lfs diff=lfs merge=lfs -text
|
| 11 |
+
*.mlmodel filter=lfs diff=lfs merge=lfs -text
|
| 12 |
+
*.model filter=lfs diff=lfs merge=lfs -text
|
| 13 |
+
*.msgpack filter=lfs diff=lfs merge=lfs -text
|
| 14 |
+
*.npy filter=lfs diff=lfs merge=lfs -text
|
| 15 |
+
*.npz filter=lfs diff=lfs merge=lfs -text
|
| 16 |
+
*.onnx filter=lfs diff=lfs merge=lfs -text
|
| 17 |
+
*.ot filter=lfs diff=lfs merge=lfs -text
|
| 18 |
+
*.parquet filter=lfs diff=lfs merge=lfs -text
|
| 19 |
+
*.pb filter=lfs diff=lfs merge=lfs -text
|
| 20 |
+
*.pickle filter=lfs diff=lfs merge=lfs -text
|
| 21 |
+
*.pkl filter=lfs diff=lfs merge=lfs -text
|
| 22 |
+
*.pt filter=lfs diff=lfs merge=lfs -text
|
| 23 |
+
*.pth filter=lfs diff=lfs merge=lfs -text
|
| 24 |
+
*.rar filter=lfs diff=lfs merge=lfs -text
|
| 25 |
+
*.safetensors filter=lfs diff=lfs merge=lfs -text
|
| 26 |
+
saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
| 27 |
+
*.tar.* filter=lfs diff=lfs merge=lfs -text
|
| 28 |
+
*.tar filter=lfs diff=lfs merge=lfs -text
|
| 29 |
+
*.tflite filter=lfs diff=lfs merge=lfs -text
|
| 30 |
+
*.tgz filter=lfs diff=lfs merge=lfs -text
|
| 31 |
+
*.wasm filter=lfs diff=lfs merge=lfs -text
|
| 32 |
+
*.xz filter=lfs diff=lfs merge=lfs -text
|
| 33 |
+
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
+
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
+
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
requirements.txt
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
beautifulsoup4
|
| 2 |
+
datasets
|
| 3 |
+
faiss-cpu
|
| 4 |
+
gradio
|
| 5 |
+
gradio>=4.44.0
|
| 6 |
+
httpx
|
| 7 |
+
networkx
|
| 8 |
+
pandas
|
| 9 |
+
requests
|
| 10 |
+
sentence-transformers
|
| 11 |
+
uuid
|
schemas.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from typing import List, Dict, Any
|
| 2 |
+
|
| 3 |
+
def validate_results(results: List[Dict[str, Any]]) -> None:
|
| 4 |
+
for r in results:
|
| 5 |
+
assert isinstance(r["document"], str)
|
| 6 |
+
assert isinstance(r["agency"], str)
|
| 7 |
+
assert isinstance(r["year"], int)
|
| 8 |
+
assert isinstance(r["excerpt"], str)
|
semantic.py
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import faiss
|
| 2 |
+
import numpy as np
|
| 3 |
+
from sentence_transformers import SentenceTransformer
|
| 4 |
+
from typing import List, Dict
|
| 5 |
+
|
| 6 |
+
model = SentenceTransformer("all-MiniLM-L6-v2")
|
| 7 |
+
|
| 8 |
+
def build_faiss_index(docs: List[Dict]):
|
| 9 |
+
texts = [d["content"] for d in docs]
|
| 10 |
+
embeddings = model.encode(texts)
|
| 11 |
+
index = faiss.IndexFlatL2(embeddings.shape[1])
|
| 12 |
+
index.add(np.array(embeddings))
|
| 13 |
+
return index, embeddings
|
| 14 |
+
|
| 15 |
+
def semantic_search(query: str, docs: List[Dict], index):
|
| 16 |
+
q_emb = model.encode([query])
|
| 17 |
+
D, I = index.search(np.array(q_emb), k=5)
|
| 18 |
+
return [docs[i] for i in I[0]]
|
space.yaml
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
title: FOIA Federated Search
|
| 2 |
+
emoji: 📄
|
| 3 |
+
colorFrom: blue
|
| 4 |
+
colorTo: gray
|
| 5 |
+
sdk: gradio
|
| 6 |
+
python_version: "3.10"
|
| 7 |
+
app_file: app.py
|
| 8 |
+
pinned: false
|
trust_safety.md
ADDED
|
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# Trust & Safety Statement
|
| 2 |
+
|
| 3 |
+
This Space indexes only publicly released FOIA documents.
|
| 4 |
+
No private data is collected or stored.
|
| 5 |
+
|
| 6 |
+
Safeguards:
|
| 7 |
+
- Rate limiting
|
| 8 |
+
- Redaction pipeline
|
| 9 |
+
- Audit logging
|
| 10 |
+
- Explicit prohibition on misuse
|
| 11 |
+
|
| 12 |
+
This tool exists solely to enhance public access to already-released government records.
|
vector_store.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import faiss
|
| 2 |
+
import numpy as np
|
| 3 |
+
|
| 4 |
+
# demo embeddings
|
| 5 |
+
INDEX = faiss.IndexFlatL2(384)
|
| 6 |
+
DOCS = []
|
| 7 |
+
|
| 8 |
+
def semantic_search(query: str):
|
| 9 |
+
# placeholder demo
|
| 10 |
+
return [
|
| 11 |
+
{
|
| 12 |
+
"document": "UAP Analysis Memo",
|
| 13 |
+
"agency": "DIA",
|
| 14 |
+
"year": 2010,
|
| 15 |
+
"excerpt": "Unidentified aerial phenomena..."
|
| 16 |
+
}
|
| 17 |
+
]
|