File size: 7,904 Bytes
127ae92 53960e2 541a74a 53960e2 127ae92 69cda21 541a74a 53960e2 127ae92 68e48f1 53960e2 558cde6 68e48f1 53960e2 127ae92 53960e2 127ae92 558cde6 127ae92 53960e2 127ae92 53960e2 127ae92 53960e2 558cde6 53960e2 558cde6 53960e2 558cde6 53960e2 541a74a 1807b95 541a74a 1807b95 558cde6 53960e2 69cda21 541a74a 69cda21 541a74a 53960e2 541a74a 53960e2 541a74a 1807b95 53960e2 1807b95 69cda21 541a74a 53960e2 541a74a 69cda21 541a74a 53960e2 541a74a 69cda21 541a74a 53960e2 541a74a 53960e2 1807b95 69cda21 1807b95 53960e2 69cda21 53960e2 1807b95 53960e2 541a74a 558cde6 53960e2 dc7285b 53960e2 558cde6 53960e2 541a74a 127ae92 1807b95 53960e2 1807b95 69cda21 1807b95 541a74a 53960e2 541a74a 1807b95 127ae92 1807b95 127ae92 53960e2 127ae92 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 | import os
import json
import time
import socket
import threading
import gc
import ctypes
import multiprocessing as mp
from pathlib import Path
import numpy as np
import pyarrow.parquet as pq
from tokenizers import Tokenizer
# ββ Config βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
HF_TOKEN = os.environ.get("HF_TOKEN")
STATE_FILE = "/data/state.json"
RAW_DIR = "/data/raw"
OUT_DIR = "/data/tokenized"
TOK_PATH = "/data/tokenizer.json"
WORKER_ID = socket.gethostname()
POLL_INTERVAL = 15
os.makedirs(OUT_DIR, exist_ok=True)
# ββ Keep-alive ββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def serve():
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
s.bind(("0.0.0.0", 7860))
s.listen(5)
print(f"β [{WORKER_ID}] Listening on port 7860")
while True:
conn, _ = s.accept()
conn.send(b"HTTP/1.1 200 OK\r\nContent-Length: 2\r\n\r\nOK")
conn.close()
# ββ State βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def load_state():
with open(STATE_FILE) as f:
return json.load(f)
def save_state(state):
tmp = STATE_FILE + f".tmp.{WORKER_ID}"
with open(tmp, "w") as f:
json.dump(state, f, indent=2)
os.replace(tmp, STATE_FILE)
# ββ Claim a pending shard βββββββββββββββββββββββββββββββββββββββββββββββββββββ
def claim_shard(state):
for name, info in state["shards"].items():
if info["status"] == "pending":
raw_path = Path(RAW_DIR) / name
if raw_path.exists():
info["status"] = "claimed"
info["worker"] = WORKER_ID
info["claimed_at"] = time.time()
save_state(state)
return name, raw_path
return None, None
# ββ Tokenizer subprocess ββββββββββββββββββββββββββββββββββββββββββββββββββββββ
_worker_tokenizer = None
def init_worker(tok_path):
global _worker_tokenizer
_worker_tokenizer = Tokenizer.from_file(tok_path)
def tokenize_chunk(texts):
encs = _worker_tokenizer.encode_batch(texts)
return [e.ids for e in encs if len(e.ids) >= 2]
# ββ Process shard βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def process_shard(name, raw_path, pool):
print(f" [{WORKER_ID}] Processing: {name}")
out_name = name.replace(".parquet", ".bin")
out_path = Path(OUT_DIR) / out_name
tmp_path = Path(OUT_DIR) / f"{out_name}.tmp"
total_tokens = 0
try:
pf = pq.ParquetFile(raw_path)
except Exception as e:
raw_path.unlink(missing_ok=True)
return False, f"read_failed: {e}"
try:
with open(tmp_path, "wb") as f:
for batch in pf.iter_batches(batch_size=5_000, columns=["text"]):
texts = batch.column("text").to_pylist()
mid = len(texts) // 2
try:
results = pool.map(tokenize_chunk, [texts[:mid], texts[mid:]])
except Exception as e:
tmp_path.unlink(missing_ok=True)
return False, f"tokenize_failed: {e}"
for ids in results[0] + results[1]:
arr = np.array(ids, dtype=np.uint16)
arr.tofile(f)
total_tokens += len(ids)
del texts, results
gc.collect()
except Exception as e:
tmp_path.unlink(missing_ok=True)
return False, f"write_failed: {e}"
tmp_path.rename(out_path) # β atomic, only visible when complete
print(f" β [{WORKER_ID}] {out_name} | {total_tokens:,} tokens")
return True, None
# ββ Force full memory flush βββββββββββββββββββββββββββββββββββββββββββββββββββ
def flush_memory():
gc.collect()
try:
ctypes.CDLL("libc.so.6").malloc_trim(0)
except Exception:
pass
# ββ Worker loop βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
def worker_loop():
os.makedirs(OUT_DIR, exist_ok=True)
print(f"β [{WORKER_ID}] Loading tokenizer...")
tok = Tokenizer.from_file(TOK_PATH)
print(f"β [{WORKER_ID}] Tokenizer ready | vocab: {tok.get_vocab_size():,}")
del tok
flush_memory()
pool = mp.Pool(processes=2, initializer=init_worker, initargs=(TOK_PATH,))
print(f"β [{WORKER_ID}] Worker pool ready")
try:
while True:
if not os.path.exists(STATE_FILE):
print(f" [{WORKER_ID}] Waiting for state.json...")
time.sleep(POLL_INTERVAL)
continue
try:
state = load_state()
except Exception as e:
print(f" [{WORKER_ID}] State read error: {e}")
time.sleep(POLL_INTERVAL)
continue
total = len(state["shards"]) + len(state.get("queue", []))
done = sum(1 for v in state["shards"].values() if v["status"] == "done")
if total > 0 and done == total:
print(f" [{WORKER_ID}] All done. Sleeping.")
time.sleep(300)
continue
name, raw_path = claim_shard(state)
if not name:
print(f" [{WORKER_ID}] Nothing ready β polling in {POLL_INTERVAL}s")
time.sleep(POLL_INTERVAL)
continue
print(f" [{WORKER_ID}] Claimed: {name}")
success, error = process_shard(name, raw_path, pool)
try:
state = load_state()
except Exception:
pass
if success:
state["shards"][name]["status"] = "done"
state["shards"][name]["error"] = None
save_state(state)
try:
raw_path.unlink()
print(f" [{WORKER_ID}] Deleted: {raw_path.name}")
except Exception as e:
print(f" [{WORKER_ID}] Delete failed: {e}")
else:
state["shards"][name]["status"] = "pending"
state["shards"][name]["worker"] = None
state["shards"][name]["claimed_at"] = None
state["shards"][name]["error"] = error
save_state(state)
print(f" [{WORKER_ID}] Failed ({error}) β left on disk for retry: {name}")
flush_memory()
time.sleep(5)
finally:
pool.terminate()
pool.join()
# ββ Entry point βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
if __name__ == "__main__":
threading.Thread(target=serve, daemon=True).start()
threading.Thread(target=worker_loop, daemon=True).start()
while True:
time.sleep(60) |