RFTSystems commited on
Commit
e17a541
·
verified ·
1 Parent(s): 1b97147

Create rft_flightrecorder.py

Browse files
Files changed (1) hide show
  1. rft_flightrecorder.py +657 -0
rft_flightrecorder.py ADDED
@@ -0,0 +1,657 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+ import os
3
+ import uuid
4
+ import hashlib
5
+ import zipfile
6
+ from datetime import datetime, timezone
7
+ from typing import Any, Dict, List, Optional, Tuple
8
+
9
+ from cryptography.hazmat.primitives.asymmetric.ed25519 import (
10
+ Ed25519PrivateKey,
11
+ Ed25519PublicKey,
12
+ )
13
+ from cryptography.hazmat.primitives import serialization
14
+
15
+
16
+ EVENT_SPEC = "rft-flight-event-v0"
17
+ ROOT_SPEC = "rft-flight-session-root-v0"
18
+ DEFAULT_LOG_PATH = "flightlog.jsonl"
19
+
20
+
21
+ # ============================================================
22
+ # Canonical JSON + hashing
23
+ # ============================================================
24
+
25
+ def canon(obj) -> bytes:
26
+ """Canonical JSON encoding (stable for hashing/verifying across machines)."""
27
+ return json.dumps(
28
+ obj,
29
+ ensure_ascii=False,
30
+ sort_keys=True,
31
+ separators=(",", ":"),
32
+ ).encode("utf-8")
33
+
34
+
35
+ def sha256_hex(b: bytes) -> str:
36
+ return hashlib.sha256(b).hexdigest()
37
+
38
+
39
+ def now_utc_iso() -> str:
40
+ return datetime.now(timezone.utc).isoformat()
41
+
42
+
43
+ def short(h: str, n: int = 12) -> str:
44
+ h = (h or "").strip()
45
+ return h[:n] if len(h) > n else h
46
+
47
+
48
+ # ============================================================
49
+ # Key management (Ed25519)
50
+ # ============================================================
51
+
52
+ def gen_keys() -> Tuple[str, str]:
53
+ sk = Ed25519PrivateKey.generate()
54
+ pk = sk.public_key()
55
+
56
+ sk_b = sk.private_bytes(
57
+ encoding=serialization.Encoding.Raw,
58
+ format=serialization.PrivateFormat.Raw,
59
+ encryption_algorithm=serialization.NoEncryption(),
60
+ )
61
+ pk_b = pk.public_bytes(
62
+ encoding=serialization.Encoding.Raw,
63
+ format=serialization.PublicFormat.Raw,
64
+ )
65
+ return sk_b.hex(), pk_b.hex()
66
+
67
+
68
+ def load_sk(sk_hex: str) -> Ed25519PrivateKey:
69
+ return Ed25519PrivateKey.from_private_bytes(bytes.fromhex(sk_hex.strip()))
70
+
71
+
72
+ def load_pk(pk_hex: str) -> Ed25519PublicKey:
73
+ return Ed25519PublicKey.from_public_bytes(bytes.fromhex(pk_hex.strip()))
74
+
75
+
76
+ # ============================================================
77
+ # Payload parsing
78
+ # ============================================================
79
+
80
+ def parse_payload_text(payload_text: str) -> Dict[str, Any]:
81
+ """Accept JSON or plain text; store as structured payload either way."""
82
+ txt = (payload_text or "").strip()
83
+ if not txt:
84
+ return {}
85
+ try:
86
+ v = json.loads(txt)
87
+ if isinstance(v, dict):
88
+ return v
89
+ return {"_value": v}
90
+ except Exception:
91
+ return {"_text": txt}
92
+
93
+
94
+ # ============================================================
95
+ # Log IO
96
+ # ============================================================
97
+
98
+ def read_jsonl(path: str) -> Tuple[List[Dict[str, Any]], int]:
99
+ """Read JSONL. Returns (records, corrupt_lines_count)."""
100
+ if not os.path.exists(path):
101
+ return [], 0
102
+
103
+ out: List[Dict[str, Any]] = []
104
+ corrupt = 0
105
+ with open(path, "r", encoding="utf-8") as f:
106
+ for ln in f:
107
+ ln = ln.strip()
108
+ if not ln:
109
+ continue
110
+ try:
111
+ obj = json.loads(ln)
112
+ if isinstance(obj, dict):
113
+ out.append(obj)
114
+ else:
115
+ corrupt += 1
116
+ except Exception:
117
+ corrupt += 1
118
+ return out, corrupt
119
+
120
+
121
+ def append_jsonl(path: str, obj: Dict[str, Any]) -> None:
122
+ with open(path, "a", encoding="utf-8") as f:
123
+ f.write(json.dumps(obj, ensure_ascii=False) + "\n")
124
+
125
+
126
+ # ============================================================
127
+ # Core event model
128
+ # ============================================================
129
+
130
+ def make_event_core(
131
+ session_id: str,
132
+ seq: int,
133
+ event_type: str,
134
+ payload: Dict[str, Any],
135
+ parent_event_hash: str,
136
+ prev_event_hash: str,
137
+ meta: Dict[str, Any],
138
+ ) -> Dict[str, Any]:
139
+ return {
140
+ "spec": EVENT_SPEC,
141
+ "ts_utc": now_utc_iso(),
142
+ "session_id": session_id,
143
+ "seq": int(seq),
144
+ "event_type": (event_type or "note").strip(),
145
+ "parent_event_hash_sha256": (parent_event_hash or "").strip(),
146
+ "prev_event_hash_sha256": (prev_event_hash or "").strip() if prev_event_hash else ("0" * 64),
147
+ "payload": payload if isinstance(payload, dict) else {"_payload": payload},
148
+ "meta": meta if isinstance(meta, dict) else {},
149
+ }
150
+
151
+
152
+ def events_for_session(all_events: List[Dict[str, Any]], session_id: str) -> List[Dict[str, Any]]:
153
+ sid = (session_id or "").strip()
154
+ if not sid:
155
+ return []
156
+ return [
157
+ e for e in all_events
158
+ if isinstance(e, dict)
159
+ and e.get("session_id") == sid
160
+ and "event_hash_sha256" in e
161
+ and e.get("spec") == EVENT_SPEC
162
+ ]
163
+
164
+
165
+ def last_event_for_session(all_events: List[Dict[str, Any]], session_id: str) -> Optional[Dict[str, Any]]:
166
+ evs = events_for_session(all_events, session_id)
167
+ return evs[-1] if evs else None
168
+
169
+
170
+ def next_seq(all_events: List[Dict[str, Any]], session_id: str) -> int:
171
+ last = last_event_for_session(all_events, session_id)
172
+ if not last:
173
+ return 1
174
+ try:
175
+ return int(last.get("seq", 0)) + 1
176
+ except Exception:
177
+ return 1
178
+
179
+
180
+ # ============================================================
181
+ # Append events (hash-chained; optional signature)
182
+ # ============================================================
183
+
184
+ def append_event(
185
+ log_path: str,
186
+ session_id: str,
187
+ event_type: str,
188
+ payload_text: str,
189
+ parent_event_hash: str,
190
+ sign_event: bool,
191
+ sk_hex: str,
192
+ model_id: str,
193
+ run_mode: str,
194
+ ) -> Tuple[Optional[Dict[str, Any]], str]:
195
+ sid = (session_id or "").strip()
196
+ if not sid:
197
+ return None, "Missing session_id."
198
+
199
+ all_events, _corrupt = read_jsonl(log_path)
200
+
201
+ payload = parse_payload_text(payload_text)
202
+ meta = {
203
+ "model_id": (model_id or "unknown").strip(),
204
+ "run_mode": (run_mode or "unknown").strip(),
205
+ }
206
+
207
+ last = last_event_for_session(all_events, sid)
208
+ prev_hash = last.get("event_hash_sha256") if last else ("0" * 64)
209
+ seq = next_seq(all_events, sid)
210
+
211
+ # If parent hash not provided, default to prev hash (simple linear causality)
212
+ parent = (parent_event_hash or "").strip()
213
+ if not parent and seq > 1:
214
+ parent = prev_hash
215
+
216
+ core = make_event_core(
217
+ session_id=sid,
218
+ seq=seq,
219
+ event_type=event_type,
220
+ payload=payload,
221
+ parent_event_hash=parent,
222
+ prev_event_hash=prev_hash,
223
+ meta=meta,
224
+ )
225
+
226
+ event_hash = sha256_hex(canon(core))
227
+ event = dict(core)
228
+ event["event_hash_sha256"] = event_hash
229
+
230
+ if sign_event:
231
+ if not sk_hex or not sk_hex.strip():
232
+ return None, "Signing enabled but private key is missing."
233
+ try:
234
+ sk = load_sk(sk_hex)
235
+ sig = sk.sign(bytes.fromhex(event_hash))
236
+ event["signature_ed25519"] = sig.hex()
237
+ except Exception:
238
+ return None, "Failed to sign event (invalid private key?)."
239
+
240
+ append_jsonl(log_path, event)
241
+ return event, f"OK. Appended event #{seq} ({event_type}). Hash: {event_hash}"
242
+
243
+
244
+ def start_session(
245
+ log_path: str,
246
+ model_id: str,
247
+ run_mode: str,
248
+ notes: str,
249
+ sign_start: bool,
250
+ sk_hex: str,
251
+ ) -> Tuple[str, str]:
252
+ sid = uuid.uuid4().hex
253
+ payload = {"notes": (notes or "").strip()}
254
+
255
+ ev, msg = append_event(
256
+ log_path=log_path,
257
+ session_id=sid,
258
+ event_type="session_start",
259
+ payload_text=json.dumps(payload, ensure_ascii=False),
260
+ parent_event_hash="",
261
+ sign_event=sign_start,
262
+ sk_hex=sk_hex,
263
+ model_id=model_id,
264
+ run_mode=run_mode,
265
+ )
266
+ if not ev:
267
+ return "", msg
268
+ return sid, f"OK. Session started: {sid} (first hash: {ev['event_hash_sha256']})"
269
+
270
+
271
+ def finalise_session(
272
+ log_path: str,
273
+ session_id: str,
274
+ sign_anchor: bool,
275
+ sk_hex: str,
276
+ model_id: str,
277
+ run_mode: str,
278
+ ) -> Tuple[Optional[Dict[str, Any]], str]:
279
+ sid = (session_id or "").strip()
280
+ if not sid:
281
+ return None, "Missing session_id."
282
+
283
+ all_events, _corrupt = read_jsonl(log_path)
284
+ evs = events_for_session(all_events, sid)
285
+ if not evs:
286
+ return None, "No events found for this session."
287
+
288
+ first_hash = evs[0]["event_hash_sha256"]
289
+ last_hash = evs[-1]["event_hash_sha256"]
290
+ count = len(evs)
291
+
292
+ root_core = {
293
+ "spec": ROOT_SPEC,
294
+ "session_id": sid,
295
+ "first_event_hash_sha256": first_hash,
296
+ "last_event_hash_sha256": last_hash,
297
+ "event_count": count,
298
+ }
299
+ root_hash = sha256_hex(canon(root_core))
300
+
301
+ anchor = dict(root_core)
302
+ anchor["root_hash_sha256"] = root_hash
303
+ anchor["created_utc"] = now_utc_iso()
304
+ anchor["model_id"] = (model_id or "unknown").strip()
305
+ anchor["run_mode"] = (run_mode or "unknown").strip()
306
+
307
+ if sign_anchor:
308
+ if not sk_hex or not sk_hex.strip():
309
+ return None, "Anchor signing enabled but private key is missing."
310
+ try:
311
+ sk = load_sk(sk_hex)
312
+ sig = sk.sign(bytes.fromhex(root_hash))
313
+ anchor["signature_ed25519"] = sig.hex()
314
+ except Exception:
315
+ return None, "Failed to sign anchor (invalid private key?)."
316
+
317
+ payload_text = json.dumps({"anchor": anchor}, ensure_ascii=False)
318
+ ev, msg = append_event(
319
+ log_path=log_path,
320
+ session_id=sid,
321
+ event_type="session_end",
322
+ payload_text=payload_text,
323
+ parent_event_hash=last_hash,
324
+ sign_event=sign_anchor,
325
+ sk_hex=sk_hex,
326
+ model_id=model_id,
327
+ run_mode=run_mode,
328
+ )
329
+ if not ev:
330
+ return None, msg
331
+
332
+ return anchor, f"OK. Session finalised. Root hash: {root_hash} (last event hash: {ev['event_hash_sha256']})"
333
+
334
+
335
+ # ============================================================
336
+ # Verification
337
+ # ============================================================
338
+
339
+ def verify_session_from_events(
340
+ evs: List[Dict[str, Any]],
341
+ session_id: str,
342
+ pk_hex: str = "",
343
+ require_signatures: bool = False,
344
+ ) -> Tuple[str, bool, str]:
345
+ sid = (session_id or "").strip()
346
+ if not sid:
347
+ return "Missing session_id.", False, ""
348
+
349
+ if not evs:
350
+ return "No events found for this session.", False, ""
351
+
352
+ report: List[str] = []
353
+ ok = True
354
+
355
+ pk = None
356
+ if require_signatures:
357
+ if not pk_hex or not pk_hex.strip():
358
+ return "Public key required to verify signatures.", False, ""
359
+ try:
360
+ pk = load_pk(pk_hex)
361
+ except Exception:
362
+ return "Invalid public key.", False, ""
363
+
364
+ expected_prev = "0" * 64
365
+ expected_seq = 1
366
+
367
+ for i, e in enumerate(evs):
368
+ for k in ("spec", "ts_utc", "session_id", "seq", "event_type", "prev_event_hash_sha256", "payload", "meta", "event_hash_sha256"):
369
+ if k not in e:
370
+ ok = False
371
+ report.append(f"[FAIL] Event {i+1}: missing field '{k}'.")
372
+ continue
373
+
374
+ if e.get("spec") != EVENT_SPEC:
375
+ ok = False
376
+ report.append(f"[FAIL] Bad spec at seq {e.get('seq')}.")
377
+ continue
378
+
379
+ if int(e.get("seq", -1)) != expected_seq:
380
+ ok = False
381
+ report.append(f"[FAIL] Seq mismatch: got {e.get('seq')} expected {expected_seq}.")
382
+ expected_seq += 1
383
+
384
+ if e.get("prev_event_hash_sha256") != expected_prev:
385
+ ok = False
386
+ report.append(
387
+ f"[FAIL] Chain broken at seq {e.get('seq')}: prev {short(e.get('prev_event_hash_sha256'))} expected {short(expected_prev)}."
388
+ )
389
+
390
+ core = {
391
+ "spec": e["spec"],
392
+ "ts_utc": e["ts_utc"],
393
+ "session_id": e["session_id"],
394
+ "seq": int(e["seq"]),
395
+ "event_type": e["event_type"],
396
+ "parent_event_hash_sha256": e.get("parent_event_hash_sha256", ""),
397
+ "prev_event_hash_sha256": e["prev_event_hash_sha256"],
398
+ "payload": e["payload"],
399
+ "meta": e["meta"],
400
+ }
401
+ h = sha256_hex(canon(core))
402
+ if h != e["event_hash_sha256"]:
403
+ ok = False
404
+ report.append(f"[FAIL] Hash mismatch at seq {e.get('seq')}: stored {short(e['event_hash_sha256'])} recomputed {short(h)}.")
405
+
406
+ if require_signatures:
407
+ sig_hex = (e.get("signature_ed25519") or "").strip()
408
+ if not sig_hex:
409
+ ok = False
410
+ report.append(f"[FAIL] Missing signature at seq {e.get('seq')}.")
411
+ else:
412
+ try:
413
+ pk.verify(bytes.fromhex(sig_hex), bytes.fromhex(e["event_hash_sha256"]))
414
+ except Exception:
415
+ ok = False
416
+ report.append(f"[FAIL] Bad signature at seq {e.get('seq')}.")
417
+
418
+ expected_prev = e["event_hash_sha256"]
419
+
420
+ end_events = [e for e in evs if e.get("event_type") == "session_end"]
421
+ if end_events:
422
+ se = end_events[-1]
423
+ anchor = (se.get("payload") or {}).get("anchor")
424
+ if isinstance(anchor, dict):
425
+ first_hash = evs[0]["event_hash_sha256"]
426
+ last_hash = evs[-1]["event_hash_sha256"]
427
+ count = len(evs)
428
+
429
+ root_core = {
430
+ "spec": ROOT_SPEC,
431
+ "session_id": sid,
432
+ "first_event_hash_sha256": first_hash,
433
+ "last_event_hash_sha256": last_hash,
434
+ "event_count": count,
435
+ }
436
+ root_hash = sha256_hex(canon(root_core))
437
+ if anchor.get("root_hash_sha256") != root_hash:
438
+ ok = False
439
+ report.append("[FAIL] Session anchor root hash does not match current event chain.")
440
+ else:
441
+ report.append("[OK] Session anchor matches current event chain.")
442
+ else:
443
+ report.append("[WARN] session_end found but anchor payload is missing/invalid.")
444
+
445
+ if ok:
446
+ report.insert(0, f"[PASS] Session verified: {len(evs)} events, chain intact.")
447
+ else:
448
+ report.insert(0, f"[FAIL] Session verification failed: {len(evs)} events checked.")
449
+
450
+ return ("PASS" if ok else "FAIL"), ok, "\n".join(report)
451
+
452
+
453
+ def verify_session(log_path: str, session_id: str, pk_hex: str, require_signatures: bool) -> Tuple[str, bool, str]:
454
+ all_events, _corrupt = read_jsonl(log_path)
455
+ evs = events_for_session(all_events, session_id)
456
+ # Ensure correct order even if file was rearranged
457
+ evs.sort(key=lambda x: int(x.get("seq", 0)))
458
+ return verify_session_from_events(evs, session_id, pk_hex=pk_hex, require_signatures=require_signatures)
459
+
460
+
461
+ # ============================================================
462
+ # Timeline + listing
463
+ # ============================================================
464
+
465
+ def session_timeline_rows(log_path: str, session_id: str) -> Tuple[List[List[Any]], str]:
466
+ sid = (session_id or "").strip()
467
+ if not sid:
468
+ return [], "Missing session_id."
469
+
470
+ all_events, _corrupt = read_jsonl(log_path)
471
+ evs = events_for_session(all_events, sid)
472
+ if not evs:
473
+ return [], "No events found."
474
+
475
+ evs.sort(key=lambda x: int(x.get("seq", 0)))
476
+
477
+ rows: List[List[Any]] = []
478
+ for e in evs:
479
+ rows.append([
480
+ int(e.get("seq", 0)),
481
+ e.get("ts_utc", ""),
482
+ e.get("event_type", ""),
483
+ e.get("meta", {}).get("model_id", ""),
484
+ e.get("meta", {}).get("run_mode", ""),
485
+ e.get("parent_event_hash_sha256", ""),
486
+ e.get("prev_event_hash_sha256", ""),
487
+ e.get("event_hash_sha256", ""),
488
+ "yes" if e.get("signature_ed25519") else "no",
489
+ ])
490
+ return rows, f"Loaded {len(rows)} events."
491
+
492
+
493
+ def get_event_by_hash(log_path: str, session_id: str, event_hash: str) -> Tuple[Optional[Dict[str, Any]], str]:
494
+ sid = (session_id or "").strip()
495
+ h = (event_hash or "").strip()
496
+ if not sid or not h:
497
+ return None, "Missing session_id or event hash."
498
+
499
+ all_events, _corrupt = read_jsonl(log_path)
500
+ evs = events_for_session(all_events, sid)
501
+ for e in evs:
502
+ if e.get("event_hash_sha256") == h:
503
+ return e, "OK."
504
+ return None, "Not found."
505
+
506
+
507
+ def list_sessions(log_path: str) -> Tuple[List[str], str]:
508
+ all_events, corrupt = read_jsonl(log_path)
509
+ counts: Dict[str, int] = {}
510
+ for e in all_events:
511
+ if isinstance(e, dict) and e.get("spec") == EVENT_SPEC:
512
+ sid = e.get("session_id")
513
+ if sid:
514
+ counts[sid] = counts.get(sid, 0) + 1
515
+ sessions = sorted(counts.keys())
516
+ msg = f"Found {len(sessions)} sessions. Corrupt lines ignored: {corrupt}."
517
+ return sessions, msg
518
+
519
+
520
+ def diagnostics(log_path: str) -> Dict[str, Any]:
521
+ all_events, corrupt = read_jsonl(log_path)
522
+ size = os.path.getsize(log_path) if os.path.exists(log_path) else 0
523
+ sessions = set()
524
+ signed = 0
525
+ total = 0
526
+ for e in all_events:
527
+ if isinstance(e, dict) and e.get("spec") == EVENT_SPEC:
528
+ total += 1
529
+ if e.get("session_id"):
530
+ sessions.add(e["session_id"])
531
+ if e.get("signature_ed25519"):
532
+ signed += 1
533
+ return {
534
+ "log_path": log_path,
535
+ "exists": os.path.exists(log_path),
536
+ "bytes": size,
537
+ "total_events": total,
538
+ "sessions": len(sessions),
539
+ "signed_events": signed,
540
+ "corrupt_lines_ignored": corrupt,
541
+ }
542
+
543
+
544
+ # ============================================================
545
+ # Export bundle
546
+ # ============================================================
547
+
548
+ def export_session_bundle(log_path: str, session_id: str) -> Tuple[Optional[str], str]:
549
+ sid = (session_id or "").strip()
550
+ if not sid:
551
+ return None, "Missing session_id."
552
+
553
+ all_events, _corrupt = read_jsonl(log_path)
554
+ evs = events_for_session(all_events, sid)
555
+ if not evs:
556
+ return None, "No events found."
557
+
558
+ evs.sort(key=lambda x: int(x.get("seq", 0)))
559
+
560
+ status, ok, report = verify_session_from_events(evs, sid, pk_hex="", require_signatures=False)
561
+
562
+ zip_name = f"rft_flight_bundle_{sid}.zip"
563
+ tmp_dir = "tmp_export"
564
+ os.makedirs(tmp_dir, exist_ok=True)
565
+
566
+ events_path = os.path.join(tmp_dir, f"{sid}_events.jsonl")
567
+ report_path = os.path.join(tmp_dir, f"{sid}_verify_report.txt")
568
+
569
+ with open(events_path, "w", encoding="utf-8") as f:
570
+ for e in evs:
571
+ f.write(json.dumps(e, ensure_ascii=False) + "\n")
572
+
573
+ with open(report_path, "w", encoding="utf-8") as f:
574
+ f.write(f"session_id: {sid}\n")
575
+ f.write(f"status: {status}\n")
576
+ f.write(f"ok: {ok}\n\n")
577
+ f.write(report + "\n")
578
+
579
+ with zipfile.ZipFile(zip_name, "w", compression=zipfile.ZIP_DEFLATED) as z:
580
+ z.write(events_path, arcname=os.path.basename(events_path))
581
+ z.write(report_path, arcname=os.path.basename(report_path))
582
+
583
+ return zip_name, f"OK. Exported {zip_name} ({len(evs)} events)."
584
+
585
+
586
+ # ============================================================
587
+ # Import bundle (third-party verification)
588
+ # ============================================================
589
+
590
+ def _read_jsonl_from_zip(z: zipfile.ZipFile, member: str) -> List[Dict[str, Any]]:
591
+ out: List[Dict[str, Any]] = []
592
+ raw_text = z.read(member).decode("utf-8", errors="replace")
593
+ for raw in raw_text.splitlines():
594
+ raw = raw.strip()
595
+ if not raw:
596
+ continue
597
+ try:
598
+ obj = json.loads(raw)
599
+ if isinstance(obj, dict):
600
+ out.append(obj)
601
+ except Exception:
602
+ continue
603
+ return out
604
+
605
+
606
+ def import_bundle_verify(
607
+ bundle_path: str,
608
+ pk_hex: str = "",
609
+ require_signatures: bool = False,
610
+ store_into_log: bool = False,
611
+ log_path: str = DEFAULT_LOG_PATH,
612
+ ) -> Tuple[str, bool, str, Optional[str]]:
613
+ if not bundle_path or not os.path.exists(bundle_path):
614
+ return "Missing bundle file.", False, "", None
615
+
616
+ try:
617
+ with zipfile.ZipFile(bundle_path, "r") as z:
618
+ members = z.namelist()
619
+ events_member = None
620
+ for m in members:
621
+ if m.endswith("_events.jsonl"):
622
+ events_member = m
623
+ break
624
+ if not events_member:
625
+ for m in members:
626
+ if m.endswith(".jsonl"):
627
+ events_member = m
628
+ break
629
+ if not events_member:
630
+ return "No .jsonl events file found in bundle.", False, "", None
631
+
632
+ evs = _read_jsonl_from_zip(z, events_member)
633
+
634
+ except Exception:
635
+ return "Failed to read bundle (invalid zip?).", False, "", None
636
+
637
+ sid = ""
638
+ for e in evs:
639
+ if e.get("spec") == EVENT_SPEC and e.get("session_id"):
640
+ sid = e["session_id"]
641
+ break
642
+ if not sid:
643
+ return "Bundle contains no valid flight events.", False, "", None
644
+
645
+ evs = [e for e in evs if e.get("spec") == EVENT_SPEC and e.get("session_id") == sid]
646
+ evs.sort(key=lambda x: int(x.get("seq", 0)))
647
+
648
+ status, ok, report = verify_session_from_events(
649
+ evs, sid, pk_hex=pk_hex, require_signatures=require_signatures
650
+ )
651
+
652
+ if store_into_log and ok:
653
+ for e in evs:
654
+ append_jsonl(log_path, e)
655
+
656
+ stored_msg = "Stored into local flightlog.jsonl." if (store_into_log and ok) else None
657
+ return status, ok, report, stored_msg