File size: 2,433 Bytes
d54e6a9 b834258 c2280e3 d234e06 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 |
"""
EXAONE-3.0-7.8B-Instruct ๋ชจ๋ธ์ Hugging Face์์ ๋ค์ด๋ก๋ํ๊ณ
Ollama์์ ์ฌ์ฉํ ์ ์๋๋ก ์ค๋นํ๋ ์คํฌ๋ฆฝํธ
"""
import os
from huggingface_hub import snapshot_download, login
HF_TOKEN = "YOUR_HUGGINGFACE_TOKEN_HERE"
MODEL_NAME = "LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct"
def download_model():
"""Hugging Face์์ ๋ชจ๋ธ ๋ค์ด๋ก๋"""
print("=" * 60)
print("EXAONE-3.0-7.8B-Instruct ๋ชจ๋ธ ๋ค์ด๋ก๋")
print("=" * 60)
# Hugging Face ๋ก๊ทธ์ธ
try:
login(token=HF_TOKEN)
print("[OK] Hugging Face ๋ก๊ทธ์ธ ์ฑ๊ณต")
except Exception as e:
print(f"[ERROR] Hugging Face ๋ก๊ทธ์ธ ์คํจ: {e}")
return False
# ๋ชจ๋ธ ๋ค์ด๋ก๋
try:
print(f"\n๋ชจ๋ธ ๋ค์ด๋ก๋ ์์: {MODEL_NAME}")
print("์ฃผ์: ๋ชจ๋ธ ํฌ๊ธฐ๊ฐ ์ฝ 15GB์ด๋ฏ๋ก ์๊ฐ์ด ๊ฑธ๋ฆด ์ ์์ต๋๋ค...")
download_path = snapshot_download(
repo_id=MODEL_NAME,
token=HF_TOKEN,
local_dir="./models/EXAONE-3.0-7.8B-Instruct",
local_dir_use_symlinks=False
)
print(f"\n[OK] ๋ชจ๋ธ ๋ค์ด๋ก๋ ์๋ฃ!")
print(f"์ ์ฅ ์์น: {download_path}")
return True
except Exception as e:
print(f"[ERROR] ๋ชจ๋ธ ๋ค์ด๋ก๋ ์คํจ: {e}")
return False
if __name__ == "__main__":
import sys
print("\n์ด ์คํฌ๋ฆฝํธ๋ EXAONE ๋ชจ๋ธ์ Hugging Face์์ ๋ค์ด๋ก๋ํฉ๋๋ค.")
print("๋ค์ด๋ก๋๋ ๋ชจ๋ธ์ Ollama์์ ์ง์ ์ฌ์ฉํ ์ ์์ผ๋ฉฐ,")
print("GGUF ํ์์ผ๋ก ๋ณํํ๋ ์ถ๊ฐ ์์
์ด ํ์ํฉ๋๋ค.")
print("\n์ฐธ๊ณ : Ollama๋ ์ผ๋ฐ์ ์ผ๋ก GGUF ํ์์ ๋ชจ๋ธ๋ง ์ง์ํฉ๋๋ค.")
response = input("\n๊ณ์ํ์๊ฒ ์ต๋๊น? (y/n): ")
if response.lower() != 'y':
print("๋ค์ด๋ก๋๋ฅผ ์ทจ์ํ์ต๋๋ค.")
sys.exit(0)
success = download_model()
if success:
print("\n" + "=" * 60)
print("๋ค์ด๋ก๋ ์๋ฃ!")
print("=" * 60)
print("\n๋ค์ ๋จ๊ณ:")
print("1. llama.cpp๋ฅผ ์ฌ์ฉํ์ฌ GGUF ํ์์ผ๋ก ๋ณํ")
print("2. ๋ณํ๋ ๋ชจ๋ธ์ Ollama์ ์ถ๊ฐ")
print("\n์์ธํ ๋ด์ฉ์ EXAONE_์ค์น_๊ฐ์ด๋.md๋ฅผ ์ฐธ๊ณ ํ์ธ์.")
print("=" * 60)
else:
print("\n๋ค์ด๋ก๋์ ์คํจํ์ต๋๋ค.")
|