BULMA / src /utils /env_report.py
Ton Nom
Add full BULMA pipeline, data, code and results
9f9fb84
import gpustat, humanize
def _get_gpu_info():
try:
g = gpustat.GPUStatCollection.new_query()
g = g.jsonify()["gpus"]
name = g[0]["name"]; mem_used = g[0]["memory.used"]; mem_total = g[0]["memory.total"]
return name, mem_used*1024**2, mem_total*1024**2
except Exception:
return "N/A", None, None
def env_report() -> Dict[str, Any]:
gpu_name, mem_used_b, mem_total_b = _get_gpu_info()
rep = {
"python": platform.python_version(),
"os": platform.platform(),
"cpu_count": psutil.cpu_count(),
"ram_total": humanize.naturalsize(psutil.virtual_memory().total, binary=True),
"gpu_name": gpu_name,
"torch": torch.__version__,
"cuda_available": torch.cuda.is_available(),
"cudnn": torch.backends.cudnn.version() if torch.cuda.is_available() else None,
"numpy": np.__version__,
"pandas": pd.__version__,
"sklearn": __import__("sklearn").__version__,
"rdkit": __import__("rdkit").__version__,
"transformers": __import__("transformers").__version__,
"econml": __import__("econml").__version__,
"dowhy": __import__("dowhy").__version__,
}
if mem_total_b:
rep["vram_used"] = humanize.naturalsize(mem_used_b, binary=True)
rep["vram_total"] = humanize.naturalsize(mem_total_b, binary=True)
return rep
rep = env_report()
print(json.dumps(rep, indent=2))
# Clean, readable figures (no seaborn—stays portable)
mpl.rcParams.update({
"figure.figsize": (5.5, 4.2),
"axes.titlesize": 12,
"axes.labelsize": 11,
"xtick.labelsize": 9,
"ytick.labelsize": 9,
"legend.fontsize": 9,
"figure.dpi": 160,
"savefig.dpi": 300,
"axes.spines.top": False,
"axes.spines.right": False,
"font.family": "DejaVu Sans"
})
def savefig(path: Path, bbox=True):
path.parent.mkdir(parents=True, exist_ok=True)
plt.tight_layout()
plt.savefig(path, bbox_inches="tight" if bbox else None)
import contextlib, time
def save_json(obj, path: Path):
path = Path(path); path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "w") as f: json.dump(obj, f, indent=2)
@contextlib.contextmanager
def timer(msg:str):
t0 = time.time()
print(f"⏱️ {msg} ...", end="", flush=True)
yield
dt = time.time() - t0
print(f" done in {dt:.2f}s")
def peek(df: pd.DataFrame, n:int=5):
display(df.head(n))
print(f"shape={df.shape} | cols={list(df.columns)[:6]}{'...' if df.shape[1]>6 else ''}")
def require_schema(df: pd.DataFrame, required_cols):
missing = [c for c in required_cols if c not in df.columns]
if missing:
raise ValueError(f"Missing required columns: {missing}")