kimi-k2-thinking-dev / download_model.py
EGYADMIN's picture
Fix indentation issues in download_model.py
406fa52 verified
#!/usr/bin/env python3
"""
Model Downloader Script for Kimi-K2-Instruct
This script pre-downloads the model from Hugging Face.
"""""
import os
import sys
def download_model():
"""Download the Kimi-K2-Instruct model from Hugging Face Hub"""""
try:
from transformers import AutoTokenizer, AutoModelForCausalLM
import torch
MODEL_NAME = "moonshotai/Kimi-K2-Instruct"
print("=" * 60)
print("Starting Model Download")
print("=" * 60)
print(f"CUDA Available: {torch.cuda.is_available()}")
if torch.cuda.is_available():
print(f"GPU Count: {torch.cuda.device_count()}")
print(f"Downloading Model: {MODEL_NAME}")
# Download tokenizer
print("Downloading Tokenizer...")
tokenizer = AutoTokenizer.from_pretrained(
MODEL_NAME,
trust_remote_code=True,
token=os.environ.get("HF_TOKEN")
)
print("Tokenizer downloaded successfully")
# Download model
print("Downloading Model Weights...")
model = AutoModelForCausalLM.from_pretrained(
MODEL_NAME,
torch_dtype=torch.bfloat16,
device_map="auto",
trust_remote_code=True,
token=os.environ.get("HF_TOKEN")
)
print("Model downloaded successfully")
print("=" * 60)
print("Model download completed!")
print("=" * 60)
return True
except Exception as e:
print(f"Error: {str(e)}")
import traceback
traceback.print_exc()
return False
if __name__ == "__main__":
success = download_model()
sys.exit(0 if success else 1)
)
)