|
|
| """
|
| CRANE AI - Model İndirme Script'i
|
| HuggingFace'den modelleri önceden indirir
|
| """
|
|
|
| import os
|
| import sys
|
| import logging
|
| from typing import Dict, Any
|
| import asyncio
|
| import torch
|
| from transformers import AutoTokenizer, AutoModelForCausalLM
|
| from huggingface_hub import login
|
| import gc
|
|
|
|
|
| logging.basicConfig(
|
| level=logging.INFO,
|
| format='%(asctime)s - %(levelname)s - %(message)s'
|
| )
|
| logger = logging.getLogger(__name__)
|
|
|
|
|
| HF_TOKEN = os.getenv("HF_TOKEN", "")
|
|
|
|
|
| MODELS_TO_DOWNLOAD = {
|
| "deepseek-coder": {
|
| "model_id": "deepseek-ai/deepseek-coder-1.3b-instruct",
|
| "description": "Kod yazımı için DeepSeek Coder 1.3B",
|
| "task": "code_generation"
|
| },
|
| "qwen-chat": {
|
| "model_id": "Qwen/Qwen2.5-1.5B-Instruct",
|
| "description": "Sohbet için Qwen2.5 1.5B",
|
| "task": "chat"
|
| },
|
| "phi-reason": {
|
| "model_id": "microsoft/Phi-3-mini-4k-instruct",
|
| "description": "Mantık için Phi-3 Mini",
|
| "task": "reasoning"
|
| },
|
| "tinyllama-fast": {
|
| "model_id": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
|
| "description": "Hızlı yanıt için TinyLlama 1.1B",
|
| "task": "quick_response"
|
| }
|
| }
|
|
|
| def get_device():
|
| """Cihaz tipini belirler"""
|
| if torch.cuda.is_available():
|
| return "cuda"
|
| elif torch.backends.mps.is_available():
|
| return "mps"
|
| else:
|
| return "cpu"
|
|
|
| def setup_cache_dir():
|
| """Model cache dizinini ayarlar"""
|
| cache_dir = os.path.join(os.getcwd(), "model_cache")
|
| os.makedirs(cache_dir, exist_ok=True)
|
| os.environ["TRANSFORMERS_CACHE"] = cache_dir
|
| return cache_dir
|
|
|
| def login_huggingface():
|
| """HuggingFace'e login ol"""
|
| try:
|
| login(token=HF_TOKEN)
|
| logger.info("✅ HuggingFace login başarılı")
|
| return True
|
| except Exception as e:
|
| logger.error(f"❌ HuggingFace login hatası: {str(e)}")
|
| return False
|
|
|
| def download_model(model_info: Dict[str, Any]) -> bool:
|
| """Tek bir modeli indir"""
|
| model_id = model_info["model_id"]
|
| description = model_info["description"]
|
|
|
| try:
|
| logger.info(f"📥 İndiriliyor: {description}")
|
| logger.info(f" Model ID: {model_id}")
|
|
|
|
|
| logger.info(" 🔤 Tokenizer indiriliyor...")
|
| tokenizer = AutoTokenizer.from_pretrained(
|
| model_id,
|
| trust_remote_code=True,
|
| token=HF_TOKEN
|
| )
|
| logger.info(" ✅ Tokenizer indirildi")
|
|
|
|
|
| logger.info(" 🤖 Model indiriliyor...")
|
| model = AutoModelForCausalLM.from_pretrained(
|
| model_id,
|
| trust_remote_code=True,
|
| torch_dtype=torch.float16 if get_device() != "cpu" else torch.float32,
|
| token=HF_TOKEN
|
| )
|
| logger.info(" ✅ Model indirildi")
|
|
|
|
|
| del model
|
| del tokenizer
|
| gc.collect()
|
| if torch.cuda.is_available():
|
| torch.cuda.empty_cache()
|
|
|
| logger.info(f"✅ {description} başarıyla indirildi!\n")
|
| return True
|
|
|
| except Exception as e:
|
| logger.error(f"❌ {description} indirme hatası: {str(e)}\n")
|
| return False
|
|
|
| def main():
|
| """Ana fonksiyon"""
|
| logger.info("🚀 CRANE AI Model İndirme Başlatılıyor...")
|
| logger.info(f"🔧 Cihaz: {get_device()}")
|
|
|
|
|
| cache_dir = setup_cache_dir()
|
| logger.info(f"📁 Cache dizini: {cache_dir}")
|
|
|
|
|
| if not login_huggingface():
|
| logger.error("❌ HuggingFace login başarısız. Çıkılıyor...")
|
| sys.exit(1)
|
|
|
|
|
| logger.info(f"📦 {len(MODELS_TO_DOWNLOAD)} model indirilecek...\n")
|
|
|
| successful_downloads = 0
|
| failed_downloads = 0
|
|
|
| for model_name, model_info in MODELS_TO_DOWNLOAD.items():
|
| if download_model(model_info):
|
| successful_downloads += 1
|
| else:
|
| failed_downloads += 1
|
|
|
|
|
| logger.info("="*50)
|
| logger.info("📊 İNDİRME ÖZETİ")
|
| logger.info("="*50)
|
| logger.info(f"✅ Başarılı: {successful_downloads}")
|
| logger.info(f"❌ Başarısız: {failed_downloads}")
|
| logger.info(f"📁 Cache dizini: {cache_dir}")
|
|
|
| if successful_downloads == len(MODELS_TO_DOWNLOAD):
|
| logger.info("🎉 Tüm modeller başarıyla indirildi!")
|
| else:
|
| logger.warning("⚠️ Bazı modeller indirilemedi.")
|
|
|
| logger.info("="*50)
|
|
|
| if __name__ == "__main__":
|
| main() |