import os import requests import sys import base64 token = os.environ.get("HF_TOKEN") namespace = "GAInTech" space_id = "GAInTech/feather-a10g-gt80k-runtime-public" hotpatch_script = """ import os import sys from pathlib import Path def patch(path, old, new): p = Path(path) if not p.exists(): return s = p.read_text() if old in s: p.write_text(s.replace(old, new)) print(f'[hotpatch] patched {path}') # 1. training.py fix patch('/workspace/feather/hydra/training.py', 'mdlm_mask_id = MDLM_MASK_ID if MDLM_MASK_ID >= 0 else (vocab_size - 1)', 'mdlm_mask_id = int(os.environ.get("HYDRA_MDLM_MASK_ID", str(vocab_size - 1)))') # 2. htm.py stub # Fixed: return type and indices for HTM stub p_htm = Path('/workspace/feather/subsystems/htm.py') if p_htm.exists(): s = p_htm.read_text() if 'class _StubRegion' not in s: stub = "\\nclass _StubRegion:\\n def __init__(self, *a, **k): self.n_columns=2048\\n def step(self, *a, **k): import numpy as np; return (np.zeros(2048), None, None, 1.0)\\n def step_many(self, sdr, *a): import numpy as np; T=sdr.shape[0]; return (np.zeros((T,2048)), np.ones(T, dtype=np.float32))\\n def reset(self): pass\\n" s = s.replace('import htm_rust', 'import htm_rust' + stub) s = s.replace('_HTM_REGION_CLS = getattr(htm_rust, "HTMRegion", None)', '_HTM_REGION_CLS = _StubRegion') p_htm.write_text(s) print('[hotpatch] patched htm.py stub') # 3. stream fix patch('/workspace/feather/prepare_nemotron.py', 'local_only = os.environ.get("HYDRA_LOCAL_SHARDS_ONLY", "1") == "1"', 'local_only = False') # 4. sdr_semantic.py fix p_sem = Path('/workspace/feather/subsystems/sdr_semantic.py') if p_sem.exists(): s = p_sem.read_text() # Fix signature old_sig = ' som_alpha: float = 0.05,\\n contrastive_rank: int = 64,\\n ) -> None:' new_sig = ' som_alpha: float = 0.05,\\n contrastive_rank: int = 64,\\n hebbian_alpha: float = 0.01,\\n learnable: bool | None = None,\\n ) -> None:' if old_sig in s: s = s.replace(old_sig, new_sig) # Fix hebbian_alpha member if 'self.hebbian_alpha = float(hebbian_alpha)' not in s: s = s.replace('self.som_alpha = float(som_alpha)', 'self.som_alpha = float(som_alpha)\\n self.hebbian_alpha = 0.01 # Hotpatched') # Fix _apply (THE CRASH CAUSE) old_apply = 'self._retina_indices = fn(self._retina_indices)' new_apply = 'if hasattr(self, "_retina_indices"): self._retina_indices = fn(self._retina_indices)' s = s.replace(old_apply, new_apply) p_sem.write_text(s) print('[hotpatch] patched sdr_semantic.py') # 5. sdr_retina.py cache repo fix patch('/workspace/feather/subsystems/sdr_retina.py', '"icarus112/feather-retina-cache"', '"GAInTech/feather-retina-cache"') """ encoded = base64.b64encode(hotpatch_script.encode()).decode() command = [ "/bin/bash", "-c", f"python3 -c 'import base64; exec(base64.b64decode(\"{encoded}\"))' && python /app/entrypoint.py" ] env = { "FEATHER_RUNTIME_MODE": "job", "HYDRA_BATCH_SIZE": "96", "HYDRA_TOTAL_BATCH": "196608", "HYDRA_USE_NEMOTRON": "1", "HYDRA_TARGET_SHARDS": "0", "HYDRA_FORCE_HTM_CPU": "1", "HYDRA_INERT_MAMBA": "1", "HYDRA_FASTPATH": "1", "PYTHONUNBUFFERED": "1" } payload = { "spaceId": space_id, "command": command, "environment": env, "secrets": {"HF_TOKEN": token}, "flavor": "a10g-large", "timeout": "12h" } url = f"https://huggingface.co/api/jobs/{namespace}" headers = {"Authorization": f"Bearer {token}"} print(f"Submitting SUPREME-HOTPATCH launch to {url}...") r = requests.post(url, json=payload, headers=headers) if r.status_code == 200: print(f"Success! Job ID: {r.json()['id']}") else: print(f"Error {r.status_code}: {r.text}")