Spaces:
Paused
Paused
AdriBat1
Add Deep-NanoGPT experiment (Phase 1 & 2): resumable training, inference, 72-layer models
671ce97 | import os | |
| import sys | |
| from antigravity_sdk.client import RemoteGPU | |
| RETRIEVE_CODE = """ | |
| import os | |
| import glob | |
| import base64 | |
| storage_dir = "/home/user/app/storage/deep_experiment" | |
| files = [] | |
| if os.path.exists(storage_dir): | |
| for fpath in glob.glob(os.path.join(storage_dir, '*')): | |
| fname = os.path.basename(fpath) | |
| with open(fpath, 'rb') as f: | |
| encoded = base64.b64encode(f.read()).decode('utf-8') | |
| files.append({ | |
| "name": fname, | |
| "data": encoded, | |
| "mime": "application/octet-stream" | |
| }) | |
| print(f"Found {len(files)} files in storage.") | |
| else: | |
| print(f"Directory {storage_dir} not found.") | |
| # The server logic expects files list in the return dictionary, | |
| # but RemoteGPU helper saves files from the response 'files' key. | |
| # Since we can't easily inject into that key from pure python execution code without the special JSON protocol, | |
| # we will rely on the server's file detection? | |
| # NO, the server only detects files in the WORK_DIR. | |
| # So we must COPY the persistent files to the WORK_DIR (cwd). | |
| # That solves everything! | |
| import shutil | |
| if os.path.exists(storage_dir): | |
| for fpath in glob.glob(os.path.join(storage_dir, '*')): | |
| shutil.copy(fpath, '.') | |
| print(f"Staged {os.path.basename(fpath)} for download.") | |
| """ | |
| def main(): | |
| print("π‘ Retrieving Persistent Experiment Results...") | |
| gpu = RemoteGPU() | |
| result = gpu.run(RETRIEVE_CODE, download_files=True) | |
| print(result.output) | |
| if os.path.exists("comparison_loss.png"): | |
| print("β Retrieved: comparison_loss.png") | |
| if os.path.exists("generation_sample.txt"): | |
| print("β Retrieved: generation_sample.txt") | |
| if __name__ == "__main__": | |
| main() | |