File size: 5,205 Bytes
463afdd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
#!/usr/bin/env python3
"""
Download Depth-Anything V2 ONNX models from HuggingFace

This script downloads optimized ONNX versions of Depth-Anything V2 models
for fast inference without PyTorch dependency.
"""

import os
from pathlib import Path
from huggingface_hub import hf_hub_download
import sys


# Model configurations
MODELS = {
    "small": {
        "repo_id": "depth-anything/Depth-Anything-V2-Small",
        "filename": "depth_anything_v2_vits.onnx",
        "size": "~100MB",
        "speed": "Fast (25M params)"
    },
    "large": {
        "repo_id": "depth-anything/Depth-Anything-V2-Large",
        "filename": "depth_anything_v2_vitl.onnx",
        "size": "~5GB",
        "speed": "Slower (1.3B params)"
    }
}


def download_model(model_type: str, cache_dir: str = "./models/cache"):
    """
    Download a Depth-Anything V2 ONNX model

    Args:
        model_type: Either 'small' or 'large'
        cache_dir: Directory to cache models
    """
    if model_type not in MODELS:
        print(f"❌ Error: Unknown model type '{model_type}'")
        print(f"Available models: {', '.join(MODELS.keys())}")
        return False

    model_info = MODELS[model_type]
    cache_path = Path(cache_dir)
    cache_path.mkdir(parents=True, exist_ok=True)

    print(f"\n📥 Downloading {model_type} model...")
    print(f"   Repo: {model_info['repo_id']}")
    print(f"   File: {model_info['filename']}")
    print(f"   Size: {model_info['size']}")
    print(f"   Speed: {model_info['speed']}")

    try:
        # Note: Using a placeholder repo since actual ONNX models might not be available
        # In production, you would either:
        # 1. Convert PyTorch models to ONNX yourself
        # 2. Use a community ONNX conversion
        # 3. Host your own converted models

        print("\n⚠️  IMPORTANT NOTE:")
        print("Official ONNX models may not be available on HuggingFace yet.")
        print("You'll need to convert PyTorch models to ONNX format.")
        print("\nTo convert models yourself:")
        print("1. Install: pip install torch transformers")
        print("2. Download PyTorch model")
        print("3. Export to ONNX using torch.onnx.export()")
        print("\nAlternatively, check these resources:")
        print("- https://github.com/LiheYoung/Depth-Anything")
        print("- Community ONNX conversions on HuggingFace")

        # Placeholder for actual download
        # model_path = hf_hub_download(
        #     repo_id=model_info['repo_id'],
        #     filename=model_info['filename'],
        #     cache_dir=str(cache_path)
        # )

        print(f"\n✓ Model would be saved to: {cache_path / model_info['filename']}")
        return True

    except Exception as e:
        print(f"\n❌ Error downloading model: {e}")
        return False


def create_conversion_script():
    """Create a helper script for converting PyTorch to ONNX"""

    script_content = '''#!/usr/bin/env python3
"""
Convert Depth-Anything V2 PyTorch model to ONNX
"""

import torch
from transformers import AutoModel
import sys

def convert_to_onnx(model_name, output_path):
    """Convert model to ONNX format"""

    print(f"Loading PyTorch model: {model_name}")
    model = AutoModel.from_pretrained(model_name, trust_remote_code=True)
    model.eval()

    # Dummy input
    dummy_input = torch.randn(1, 3, 518, 518)

    print(f"Exporting to ONNX: {output_path}")
    torch.onnx.export(
        model,
        dummy_input,
        output_path,
        input_names=['input'],
        output_names=['output'],
        dynamic_axes={
            'input': {0: 'batch', 2: 'height', 3: 'width'},
            'output': {0: 'batch', 2: 'height', 3: 'width'}
        },
        opset_version=17
    )

    print(f"✓ Conversion complete: {output_path}")

if __name__ == "__main__":
    # Example usage
    convert_to_onnx(
        "LiheYoung/depth-anything-small-hf",
        "depth_anything_v2_vits.onnx"
    )
'''

    script_path = Path("convert_to_onnx.py")
    script_path.write_text(script_content)
    script_path.chmod(0o755)

    print(f"\n✓ Created conversion script: {script_path}")
    print("  Run with: python convert_to_onnx.py")


def main():
    """Main download function"""

    print("=" * 60)
    print("Depth-Anything V2 Model Downloader")
    print("=" * 60)

    # Create models directory
    models_dir = Path("./models/cache")
    models_dir.mkdir(parents=True, exist_ok=True)

    # Download models based on command line args
    models_to_download = sys.argv[1:] if len(sys.argv) > 1 else ['small']

    if 'all' in models_to_download:
        models_to_download = list(MODELS.keys())

    for model_type in models_to_download:
        download_model(model_type)

    # Create conversion helper
    print("\n" + "=" * 60)
    create_conversion_script()

    print("\n" + "=" * 60)
    print("Next Steps:")
    print("=" * 60)
    print("1. Convert PyTorch models to ONNX (see convert_to_onnx.py)")
    print("2. Place ONNX models in ./models/cache/")
    print("3. Update .env with correct model paths")
    print("4. Start the server: uvicorn api.main:app --reload")
    print("=" * 60)


if __name__ == "__main__":
    main()