|
|
import os |
|
|
import torch |
|
|
from transformers import AutoModel, AutoProcessor |
|
|
from huggingface_hub import snapshot_download |
|
|
from pathlib import Path |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
os.environ["HF_HOME"] = "/tmp/.huggingface" |
|
|
os.environ["TRANSFORMERS_CACHE"] = "/tmp/.cache" |
|
|
os.environ["TORCH_HOME"] = "/tmp/.cache/torch" |
|
|
|
|
|
|
|
|
HF_TOKEN = os.environ.get("WAN2_TOKEN", None) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
MODEL_ID = "Wan-AI/Wan2.2-Animate-14B" |
|
|
MODEL_DIR = Path("/tmp/models/wan14B") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def download_model(): |
|
|
if MODEL_DIR.exists(): |
|
|
size_gb = sum(f.stat().st_size for f in MODEL_DIR.rglob('*') if f.is_file()) / (1024**3) |
|
|
return f"✅ مدل از قبل دانلود شده ({size_gb:.1f} GB)" |
|
|
try: |
|
|
print(f"📥 دانلود مدل {MODEL_ID} در /tmp...") |
|
|
snapshot_download( |
|
|
repo_id=MODEL_ID, |
|
|
local_dir=str(MODEL_DIR), |
|
|
cache_dir="/tmp/.cache", |
|
|
local_dir_use_symlinks=False, |
|
|
resume_download=True, |
|
|
token=HF_TOKEN |
|
|
) |
|
|
size_gb = sum(f.stat().st_size for f in MODEL_DIR.rglob('*') if f.is_file()) / (1024**3) |
|
|
return f"✅ دانلود کامل ({size_gb:.1f} GB)" |
|
|
except Exception as e: |
|
|
return f"❌ خطا در دانلود: {str(e)}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_model(): |
|
|
try: |
|
|
global model, processor |
|
|
print("🔄 بارگذاری مدل 14B روی CPU...") |
|
|
processor = AutoProcessor.from_pretrained(str(MODEL_DIR), local_files_only=True) |
|
|
model = AutoModel.from_pretrained( |
|
|
str(MODEL_DIR), |
|
|
torch_dtype=torch.float32, |
|
|
low_cpu_mem_usage=True, |
|
|
local_files_only=True |
|
|
).to("cpu") |
|
|
model.eval() |
|
|
return "✅ مدل بارگذاری شد و آماده است" |
|
|
except Exception as e: |
|
|
return f"❌ خطا در بارگذاری مدل: {str(e)}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
print(download_model()) |
|
|
print(load_model()) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|