EDEN-Core-Scripts / eden_fix_missing_repos.py
Shanmuk4622's picture
Upload eden_fix_missing_repos.py with huggingface_hub
6925b37 verified
"""
eden_fix_missing_repos.py
Creates the missing CIFAR-10 repos for classic models (test2/test3)
that were skipped during the initial README generation, then re-uploads
their CSV logs.
"""
from huggingface_hub import HfApi, create_repo, upload_file
import os, glob
TOKEN = os.environ.get("HF_TOKEN", "")
USER = "Shanmuk4622"
BASE = os.path.dirname(os.path.abspath(__file__))
api = HfApi(token=TOKEN)
print(f"Logged in as: {api.whoami()['name']}\n")
HARDWARE = {
"gpu": "NVIDIA GeForce GTX 1080 Ti (11 GB VRAM, 250 W TDP)",
"cpu": "Intel Xeon W-2125 (4 cores / 8 threads @ 4.00 GHz)",
"ram": "63.66 GB System RAM",
}
# ── Missing repos: classic models on CIFAR-10 ─────────────────────────────────
MISSING = [
("AlexNet", "CIFAR-10", "test2", "AlexNet_CIFAR10.py"),
("DenseNet121","CIFAR-10", "test2", "DenseNet121_CIFAR10.py"),
("InceptionV3","CIFAR-10", "test2", "InceptionV3_CIFAR10.py"),
("ResNet18", "CIFAR-10", "test2", "ResNet18_CIFAR10.py"),
("ResNet50", "CIFAR-10", "test2", "ResNet50_CIFAR10.py"),
]
def make_readme(arch, dataset, folder):
phase = "Baseline – Standard Full Training (Reference Study)"
technique = (
"Standard full fine-tuning used as the **Brute-Force Baseline** for "
"energy comparison. All layers trained from epoch 1 with a fixed learning "
"rate and no gradient accumulation. Included for transparent EAG benchmarking."
)
arch_tag = arch.lower()
ds_size = "60,000 images – 10 classes (32Γ—32 px)"
return f"""---
language: en
license: apache-2.0
tags:
- image-classification
- green-ai
- energy-efficiency
- computer-vision
- {arch_tag}
- eden-framework
- reference-study
- sustainable-ai
datasets:
- cifar10
metrics:
- accuracy
---
# EDEN-{arch}-{dataset} β€” *{phase}*
> **Primary KPI:** EAG (Energy-to-Accuracy Gradient) β€” see Green Delta Table below.
## Abstract
This model is part of **Project EDEN (Energy-Driven Evolution of Networks)**.
It serves as the **Brute-Force Baseline** for the {arch} architecture on {dataset},
providing a transparent energy reference for EAG benchmarking against EDEN-optimized models.
**Applied Technique:** {phase}
## Profiling Environment
| Component | Specification |
|---|---|
| **GPU** | {HARDWARE['gpu']} |
| **CPU** | {HARDWARE['cpu']} |
| **RAM** | {HARDWARE['ram']} |
| **Dataset** | {dataset} β€” {ds_size} |
## 🟒 Green Delta Table
*This is the reference baseline. Compare against EDEN-optimized models for EAG.*
| Metric | {arch} Baseline | EDEN Optimized | Ξ” |
|---|---|---|---|
| Accuracy | See CSV log | See SOTA repo | β€” |
| Total Energy (J) | See CSV log | See SOTA repo | β€” |
| **EAG Score** | β€” | See SOTA repo | Ξ”Acc/Ξ”Joules |
## E2AM Algorithm β€” Applied Phase
{technique}
## Cite This Research
```bibtex
@misc{{eden2025,
title = {{Project EDEN: Energy-Driven Evolution of Networks}},
author = {{EDEN Research Team}},
year = {{2025}},
note = {{Hugging Face: Shanmuk4622}},
url = {{https://huggingface.co/{USER}}}
}}
```
"""
def safe_upload(local_path, repo_id, repo_filename):
try:
upload_file(
path_or_fileobj=local_path,
path_in_repo=repo_filename,
repo_id=repo_id,
token=TOKEN,
repo_type="model",
)
print(f" βœ“ {repo_filename} β†’ {repo_id}")
except Exception as e:
print(f" βœ— {repo_filename} β†’ {repo_id} | {e}")
# ── STEP 1: Create missing repos + upload READMEs ─────────────────────────────
print("="*60)
print("STEP 1 β€” Creating missing CIFAR-10 repos & READMEs")
print("="*60)
readme_dir = os.path.join(BASE, "hf_readmes")
os.makedirs(readme_dir, exist_ok=True)
for arch, dataset, folder, _ in MISSING:
repo_name = f"EDEN-{arch}-{dataset}"
repo_id = f"{USER}/{repo_name}"
# Write README locally
readme_content = make_readme(arch, dataset, folder)
readme_path = os.path.join(readme_dir, f"{repo_name}_README.md")
with open(readme_path, "w", encoding="utf-8") as f:
f.write(readme_content)
# Create repo + upload README
create_repo(repo_id, token=TOKEN, repo_type="model", exist_ok=True, private=False)
safe_upload(readme_path, repo_id, "README.md")
# ── STEP 2: Re-upload the failed CSVs ─────────────────────────────────────────
print("\n" + "="*60)
print("STEP 2 β€” Re-uploading failed CIFAR-10 CSV logs")
print("="*60)
def parse_arch_ds(fn):
fn = fn.lower().replace("\\", "/")
ds, arch = "unknown", "unknown"
if "cifar100" in fn: ds = "CIFAR-100"
elif "cifar10" in fn: ds = "CIFAR-10"
elif "imagenet" in fn: ds = "Custom-ImageNet300"
if "efficientnet" in fn: arch = "EfficientNetV2"
elif "convnext" in fn: arch = "ConvNeXtV2"
elif "mobilevit" in fn: arch = "MobileViTv3"
elif "resnet50" in fn: arch = "ResNet50"
elif "resnet18" in fn: arch = "ResNet18"
elif "vgg16" in fn: arch = "VGG16"
elif "alexnet" in fn: arch = "AlexNet"
elif "inception" in fn: arch = "InceptionV3"
elif "densenet" in fn: arch = "DenseNet121"
elif "unet" in fn: arch = "UNet"
return arch, ds
# Only target CIFAR-10 CSVs for classic models
target_archs = {"AlexNet", "DenseNet121", "InceptionV3", "ResNet18", "ResNet50"}
for csv in sorted(glob.glob(os.path.join(BASE, "**", "*.csv"), recursive=True)):
if any(skip in csv for skip in ["green_ai_", "experiment_summary", "repository"]):
continue
arch, ds = parse_arch_ds(csv)
if arch not in target_archs or ds != "CIFAR-10":
continue
repo_id = f"{USER}/EDEN-{arch}-{ds}"
safe_upload(csv, repo_id, os.path.basename(csv))
print("\n" + "="*60)
print("FIX COMPLETE β€” Check https://huggingface.co/Shanmuk4622")
print("="*60)