File size: 2,878 Bytes
ce847d4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
"""Find all chunk checksums and their positions in the .onemodel file."""
import struct, json
with open("ocr_data/oneocr.onemodel", "rb") as f:
fdata = f.read()
log = json.load(open("temp/crypto_log.json"))
sha256s = [op for op in log if op["op"] == "sha256"]
sha_map = {}
for s in sha256s:
sha_map[s["output"]] = s["input"]
decrypts = [op for op in log if op["op"] == "decrypt"]
print(f"File size: {len(fdata)} bytes")
print(f"Payload starts at: 22684")
# For each decrypt, find its checksum in the file
results = []
for i, d in enumerate(decrypts[1:], 1): # skip DX (dec#00)
sha_inp = bytes.fromhex(sha_map[d["aes_key"]])
if len(sha_inp) < 32:
continue
chk = sha_inp[16:32]
s1, s2 = struct.unpack_from("<QQ", sha_inp, 0)
enc_size = d["input_size"]
pos = fdata.find(chk)
results.append({
"dec_idx": i,
"chk_file_offset": pos,
"chk_hex": chk.hex(),
"size1": s1,
"size2": s2,
"enc_size": enc_size,
})
# Sort by checksum file offset
results.sort(key=lambda r: r["chk_file_offset"])
print(f"\n{'dec#':>5} {'chk_offset':>12} {'data_offset':>12} {'enc_size':>10} {'end_offset':>12} {'size1':>10} {'size2':>10}")
print("-" * 90)
for r in results:
if r["chk_file_offset"] >= 0:
# The chunk header is: 4_bytes + 16_checksum + 8_size1 + 8_size2 = 36 bytes
# Data starts at chk_offset - 4 + 36 = chk_offset + 32
data_off = r["chk_file_offset"] + 32
end_off = data_off + r["enc_size"]
print(f" {r['dec_idx']:3d} {r['chk_file_offset']:12d} {data_off:12d} {r['enc_size']:10d} {end_off:12d} {r['size1']:10d} {r['size2']:10d}")
else:
print(f" {r['dec_idx']:3d} NOT FOUND {r['enc_size']:10d} {r['size1']:10d} {r['size2']:10d}")
# Verify chunk continuity
print("\n=== Chunk continuity check ===")
prev_end = None
for r in results:
if r["chk_file_offset"] < 0:
continue
data_off = r["chk_file_offset"] + 32
chunk_header_start = r["chk_file_offset"] - 4 # 4 bytes before checksum
if prev_end is not None:
gap = chunk_header_start - prev_end
if gap != 0:
print(f" Gap between chunks: {gap} bytes (prev_end={prev_end}, next_header={chunk_header_start})")
if gap > 0:
gap_data = fdata[prev_end:chunk_header_start]
print(f" Gap bytes: {gap_data.hex()}")
prev_end = data_off + r["enc_size"]
print(f"\nExpected file end: {prev_end}")
print(f"Actual file end: {len(fdata)}")
# Verify the 4 bytes before each checksum
print("\n=== 4 bytes before each checksum ===")
for r in results[:10]:
if r["chk_file_offset"] >= 4:
pre = fdata[r["chk_file_offset"]-4:r["chk_file_offset"]]
print(f" dec#{r['dec_idx']:02d}: pre_bytes={pre.hex()} ({struct.unpack_from('<I', pre)[0]})")
|