#!/usr/bin/env python3
"""
PROJECT: I.O.S. D.F.I.R. // ENHANCEMENT SKELETON
Streamlit Application for iOS DFIR Documentation
"""
import streamlit as st
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, Dict, List, Optional, Tuple
from dataclasses import dataclass, field
import hashlib
import json
import logging
# Page configuration
st.set_page_config(
page_title="DFIR iOS Enhancement",
page_icon="🔍",
layout="wide",
initial_sidebar_state="collapsed"
)
# Custom CSS for cyberpunk aesthetic
CUSTOM_CSS = """
"""
# Inject custom CSS
st.markdown(CUSTOM_CSS, unsafe_allow_html=True)
# Initialize session state for navigation
if 'current_section' not in st.session_state:
st.session_state.current_section = 'overview'
# Header
def render_header():
st.markdown("""
""", unsafe_allow_html=True)
# Navigation
def render_navigation():
sections = [
('overview', 'Manifesto'),
('modules', 'Modules'),
('pipeline', 'Pipeline'),
('schema', 'Data Structs'),
('source', 'Source Code')
]
cols = st.columns(len(sections))
for i, (key, label) in enumerate(sections):
with cols[i]:
active_class = 'active' if st.session_state.current_section == key else ''
if st.button(
label,
key=f"nav_{key}",
use_container_width=True,
type="secondary" if st.session_state.current_section != key else "primary"
):
st.session_state.current_section = key
st.rerun()
# Section: Overview
def render_overview():
st.markdown("""
""", unsafe_allow_html=True)
col1, col2 = st.columns(2)
with col1:
st.markdown("""
Primary Objective
CORE
Upgrade simple SQLite-backed iOS backup extractors into a structured DFIR pipeline. Preserve analyst visibility, logging, and artifact hashing.
""", unsafe_allow_html=True)
with col2:
st.markdown("""
Current Status
DEV
This file is intentionally verbose. Several modules are scaffolded as placeholders (Stubs) for future parser implementation.
""", unsafe_allow_html=True)
st.markdown("", unsafe_allow_html=True)
st.markdown("""
""", unsafe_allow_html=True)
st.markdown("""
CLI_ARGS
→
MANIFEST_DB
→
PARSER_MODULES
→
TIMELINE_CSV
""", unsafe_allow_html=True)
# Section: Modules
def render_modules():
st.markdown("""
""", unsafe_allow_html=True)
col1, col2, col3, col4 = st.columns(4)
with col1:
st.markdown("""
CORE
CONTACTS
AddressBook Parser
export_contacts()
- Modern & Legacy Schema
- Phone Normalization
- CSV/JSON Export
""", unsafe_allow_html=True)
with col2:
st.markdown("""
CORE
MESSAGING
SMS / iMessage
extract_sms()
- Handle Resolution
- AttributedBody Stub
- Timeline Event Gen
""", unsafe_allow_html=True)
with col3:
st.markdown("""
GEO
LOCATION
LocationD / Cache
extract_locations()
- RTCL Location MO
- Visit MO Parsing
- Cell Tower Logs
""", unsafe_allow_html=True)
with col4:
st.markdown("""
STUB
FUTURE
Safari / Notes
extract_safari_stub()
- History / Downloads
- Rich Notes
- App Containers
""", unsafe_allow_html=True)
# Section: Pipeline
def render_pipeline():
st.markdown("""
""", unsafe_allow_html=True)
st.markdown("""
STEP 1
Initialization
DONE Setup Logging. Validate Backup Root. Parse CLI Arguments.
STEP 2
Manifest Indexing
ACTIVE Load `Manifest.db`. Search for `sms.db`, `AddressBook`, and Location artifacts.
STEP 3
Extraction & Export
NEXT Run Parser Modules. Generate CSV/JSON. Build Unified Timeline.
""", unsafe_allow_html=True)
# Section: Schema
def render_schema():
st.markdown("""
""", unsafe_allow_html=True)
st.markdown("""
Config & Metadata
@dataclass
class CaseMetadata:
case_name: str = "UNSPECIFIED_CASE"
examiner: str = "UNSPECIFIED_EXAMINER"
evidence_id: str = "UNSPECIFIED_EVIDENCE"
@dataclass
class AppConfig:
backup_root: Path
manifest_db: Path
output_root: Path
query_terms: List[str] = field(default_factory=list)
verbose: bool = False
hash_exports: bool = True
copy_raw_files: bool = True
export_csv: bool = True
export_jsonl: bool = True
""", unsafe_allow_html=True)
st.markdown("", unsafe_allow_html=True)
st.markdown("""
Timeline Event Model
@dataclass
class TimelineEvent:
timestamp: Optional[str]
artifact_type: str
source_file: str
summary: str
attributes: Dict[str, Any] = field(default_factory=dict)
""", unsafe_allow_html=True)
# Section: Source Code
def render_source():
st.markdown("""
""", unsafe_allow_html=True)
source_code = '''#!/usr/bin/env python3
""" DFIR iOS Backup Enhancement Skeleton """
from __future__ import annotations
import argparse, csv, dataclasses, hashlib, json, logging, os, shutil, sqlite3, sys, traceback
from dataclasses import dataclass, field
from datetime import datetime, timedelta, timezone
from pathlib import Path
from typing import Any, Dict, Iterable, List, Optional, Tuple
APPLE_EPOCH = datetime(2001, 1, 1, tzinfo=timezone.utc)
# ============================================================
# CONFIG / DATA MODELS
# ============================================================
@dataclass
class CaseMetadata:
case_name: str = "UNSPECIFIED_CASE"
examiner: str = "UNSPECIFIED_EXAMINER"
evidence_id: str = "UNSPECIFIED_EVIDENCE"
notes: str = ""
@dataclass
class AppConfig:
backup_root: Path
manifest_db: Path
output_root: Path
query_terms: List[str] = field(default_factory=list)
verbose: bool = False
hash_exports: bool = True
copy_raw_files: bool = True
export_csv: bool = True
export_jsonl: bool = True
export_kml: bool = False
export_geojson: bool = False
case: CaseMetadata = field(default_factory=CaseMetadata)
@dataclass
class LocatedFile:
file_id: str
relative_path: str
domain: Optional[str]
source_path: Path
@dataclass
class TimelineEvent:
timestamp: Optional[str]
artifact_type: str
source_file: str
summary: str
attributes: Dict[str, Any] = field(default_factory=dict)
# ============================================================
# LOGGING
# ============================================================
def setup_logging(output_root: Path, verbose: bool = False) -> logging.Logger:
output_root.mkdir(parents=True, exist_ok=True)
log_path = output_root / "dfir_run.log"
logger = logging.getLogger("dfir_ios")
logger.setLevel(logging.DEBUG if verbose else logging.INFO)
logger.handlers.clear()
fmt = logging.Formatter("%(asctime)s | %(levelname)s | %(message)s")
fh = logging.FileHandler(log_path, encoding="utf-8")
fh.setLevel(logging.DEBUG)
fh.setFormatter(fmt)
logger.addHandler(fh)
sh = logging.StreamHandler(sys.stdout)
sh.setLevel(logging.DEBUG if verbose else logging.INFO)
sh.setFormatter(fmt)
logger.addHandler(sh)
logger.debug("Logging initialized")
return logger
# ============================================================
# UTILS
# ============================================================
def apple_time_to_datetime(ts: Any) -> Optional[datetime]:
if ts is None:
return None
try:
ts = float(ts)
if ts > 1e12: ts = ts / 1e9
return APPLE_EPOCH + timedelta(seconds=ts)
except Exception:
return None
def normalize_phone(phone: Optional[str]) -> Optional[str]:
if not phone: return phone
value = str(phone).replace("+1", "").replace(" ", "").replace("-", "").replace("(", "").replace(")", "")
return value.strip() or None
def sha256_file(path: Path, chunk_size: int = 1024 * 1024) -> str:
h = hashlib.sha256()
with path.open("rb") as f:
while True:
chunk = f.read(chunk_size)
if not chunk: break
h.update(chunk)
return h.hexdigest()
# ============================================================
# MAIN ORCHESTRATION
# ============================================================
def run(cfg: AppConfig) -> int:
logger = setup_logging(cfg.output_root, cfg.verbose)
logger.info("Starting DFIR iOS backup extraction")
logger.info(f"Case: {cfg.case.case_name}")
logger.info(f"Examiner: {cfg.case.examiner}")
logger.info(f"Evidence ID: {cfg.case.evidence_id}")
# Validate backup root
if not cfg.backup_root.exists():
logger.error(f"Backup root does not exist: {cfg.backup_root}")
return 1
# Load manifest database
if not cfg.manifest_db.exists():
logger.error(f"Manifest database does not exist: {cfg.manifest_db}")
return 1
logger.info("Manifest database loaded successfully")
# Create output directories
cfg.output_root.mkdir(parents=True, exist_ok=True)
(cfg.output_root / "exports").mkdir(exist_ok=True)
(cfg.output_root / "raw").mkdir(exist_ok=True)
(cfg.output_root / "timeline").mkdir(exist_ok=True)
logger.info("Output directories created")
# Run parser modules
timeline_events: List[TimelineEvent] = []
# Export contacts
try:
contacts_count = export_contacts(cfg, logger)
timeline_events.append(TimelineEvent(
timestamp=datetime.now(timezone.utc).isoformat(),
artifact_type="CONTACTS",
source_file="AddressBook.sqlitedb",
summary=f"Exported {contacts_count} contacts"
))
except Exception as e:
logger.error(f"Contact export failed: {e}")
# Export SMS/iMessage
try:
sms_count = extract_sms(cfg, logger)
timeline_events.append(TimelineEvent(
timestamp=datetime.now(timezone.utc).isoformat(),
artifact_type="MESSAGES",
source_file="sms.db",
summary=f"Exported {sms_count} messages"
))
except Exception as e:
logger.error(f"SMS export failed: {e}")
# Export locations
try:
location_count = extract_locations(cfg, logger)
timeline_events.append(TimelineEvent(
timestamp=datetime.now(timezone.utc).isoformat(),
artifact_type="LOCATIONS",
source_file="LocationD/Cache.sqlite",
summary=f"Exported {location_count} location points"
))
except Exception as e:
logger.error(f"Location export failed: {e}")
# Generate timeline
if cfg.export_csv:
generate_timeline_csv(timeline_events, cfg.output_root / "timeline" / "unified_timeline.csv")
logger.info("Timeline CSV generated")
if cfg.export_jsonl:
generate_timeline_jsonl(timeline_events, cfg.output_root / "timeline" / "unified_timeline.jsonl")
logger.info("Timeline JSONL generated")
logger.info("DFIR iOS extraction complete")
return 0
def export_contacts(cfg: AppConfig, logger: logging.Logger) -> int:
"""Export contacts from AddressBook database"""
logger.info("Exporting contacts...")
# Implementation stub
return 0
def extract_sms(cfg: AppConfig, logger: logging.Logger) -> int:
"""Extract SMS/iMessage data"""
logger.info("Extracting SMS/iMessage data...")
# Implementation stub
return 0
def extract_locations(cfg: AppConfig, logger: logging.Logger) -> int:
"""Extract location data from LocationD"""
logger.info("Extracting location data...")
# Implementation stub
return 0
def generate_timeline_csv(events: List[TimelineEvent], output_path: Path):
"""Generate unified timeline CSV"""
with output_path.open("w", newline="", encoding="utf-8") as f:
writer = csv.writer(f)
writer.writerow(["timestamp", "artifact_type", "source_file", "summary", "attributes"])
for event in events:
writer.writerow([
event.timestamp,
event.artifact_type,
event.source_file,
event.summary,
json.dumps(event.attributes)
])
def generate_timeline_jsonl(events: List[TimelineEvent], output_path: Path):
"""Generate unified timeline JSONL"""
with output_path.open("w", encoding="utf-8") as f:
for event in events:
f.write(json.dumps(dataclasses.asdict(event)) + "\\n")
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="DFIR iOS Backup Enhancement")
parser.add_argument("--backup-root", type=Path, required=True, help="Path to iOS backup")
parser.add_argument("--output-root", type=Path, required=True, help="Output directory")
parser.add_argument("--case-name", type=str, default="UNSPECIFIED_CASE")
parser.add_argument("--examiner", type=str, default="UNSPECIFIED_EXAMINER")
parser.add_argument("--evidence-id", type=str, default="UNSPECIFIED_EVIDENCE")
parser.add_argument("--verbose", "-v", action="store_true")
args = parser.parse_args()
manifest_db = args.backup_root / "Manifest.db"
cfg = AppConfig(
backup_root=args.backup_root,
manifest_db=manifest_db,
output_root=args.output_root,
verbose=args.verbose,
case=CaseMetadata(
case_name=args.case_name,
examiner=args.examiner,
evidence_id=args.evidence_id
)
)
sys.exit(run(cfg))'''
st.markdown("""
dfir_ios_enhancement.py
READY
""", unsafe_allow_html=True)
st.code(source_code, language="python")
# Main app
def main():
render_header()
render_navigation()
st.markdown("", unsafe_allow_html=True)
if st.session_state.current_section == 'overview':
render_overview()
elif st.session_state.current_section == 'modules':
render_modules()
elif st.session_state.current_section == 'pipeline':
render_pipeline()
elif st.session_state.current_section == 'schema':
render_schema()
elif st.session_state.current_section == 'source':
render_source()
if __name__ == "__main__":
main()