| import os | |
| import requests | |
| import argparse | |
| def download_model(url, path): | |
| print(f"Downloading model from {url} to {path}...") | |
| try: | |
| os.makedirs(os.path.dirname(path), exist_ok=True) | |
| r = requests.get(url, allow_redirects=True, stream=True) # Use stream for large file | |
| with open(path, 'wb') as f: | |
| for chunk in r.iter_content(chunk_size=8192): | |
| f.write(chunk) | |
| print("Download complete.") | |
| except Exception as e: | |
| print(f"Failed to download model: {e}") | |
| if __name__ == "__main__": | |
| parser = argparse.ArgumentParser() | |
| parser.add_argument("--url", default="https://huggingface.co/deepghs/insightface/resolve/main/buffalo_s/w600k_mbf.onnx", help="URL of the model") | |
| parser.add_argument("--output", default="app/models/w600k_mbf.onnx", help="Output path") | |
| args = parser.parse_args() | |
| download_model(args.url, args.output) | |