soyailabs / install_exaone_direct.py
SOY NV AI
feat: ํ˜„์žฌ ์ž‘์—… ๋‚ด์šฉ ์ €์žฅ
c2280e3
raw
history blame
2.61 kB
"""
EXAONE-3.0-7.8B-Instruct๋ฅผ Ollama์— ์ง์ ‘ ์ถ”๊ฐ€ํ•˜๋Š” ์Šคํฌ๋ฆฝํŠธ
ํ† ํฐ์„ ์‚ฌ์šฉํ•˜์—ฌ ๋ชจ๋ธ์„ ์ถ”๊ฐ€ํ•ฉ๋‹ˆ๋‹ค.
"""
import os
import subprocess
import json
HF_TOKEN = "YOUR_HUGGINGFACE_TOKEN_HERE"
def create_simple_modelfile():
"""๊ฐ„๋‹จํ•œ Modelfile ์ƒ์„ฑ (๋กœ์ปฌ ๊ฒฝ๋กœ ๋Œ€์‹  ๋ชจ๋ธ ์ด๋ฆ„๋งŒ ์‚ฌ์šฉ)"""
modelfile_content = """# EXAONE-3.0-7.8B-Instruct ๋ชจ๋ธ ์„ค์ •
# ์ฐธ๊ณ : Ollama๊ฐ€ Hugging Face ๋ชจ๋ธ์„ ์ง์ ‘ ์ง€์›ํ•˜์ง€ ์•Š๋Š” ๊ฒฝ์šฐ
# ๋‹ค๋ฅธ ๋ฐฉ๋ฒ•์ด ํ•„์š”ํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
PARAMETER temperature 0.7
PARAMETER top_p 0.9
PARAMETER top_k 40
PARAMETER num_ctx 4096
SYSTEM \"\"\"You are EXAONE, a helpful AI assistant developed by LG AI Research.
You can communicate in both Korean and English.\"\"\"
"""
with open("EXAONE-3.0-7.8B-Instruct.modelfile", "w", encoding="utf-8") as f:
f.write(modelfile_content)
print("[OK] Modelfile ์ƒ์„ฑ ์™„๋ฃŒ")
def check_model_availability():
"""๋ชจ๋ธ์ด ์ด๋ฏธ ์„ค์น˜๋˜์–ด ์žˆ๋Š”์ง€ ํ™•์ธ"""
try:
result = subprocess.run(['ollama', 'list'],
capture_output=True, text=True, timeout=5)
if 'EXAONE' in result.stdout or 'exaone' in result.stdout.lower():
print("[INFO] EXAONE ๋ชจ๋ธ์ด ์ด๋ฏธ ์„ค์น˜๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค.")
return True
return False
except Exception as e:
print(f"[WARNING] ๋ชจ๋ธ ํ™•์ธ ์ค‘ ์˜ค๋ฅ˜: {e}")
return False
def main():
print("\n" + "=" * 60)
print("EXAONE-3.0-7.8B-Instruct Ollama ์ถ”๊ฐ€ ์‹œ๋„")
print("=" * 60)
# ํ† ํฐ ์„ค์ •
os.environ['HUGGINGFACE_HUB_TOKEN'] = HF_TOKEN
print(f"[OK] Hugging Face ํ† ํฐ ์„ค์ • ์™„๋ฃŒ")
# ๋ชจ๋ธ ํ™•์ธ
if check_model_availability():
print("\n๋ชจ๋ธ์ด ์ด๋ฏธ ์„ค์น˜๋˜์–ด ์žˆ์Šต๋‹ˆ๋‹ค.")
return
print("\nํ˜„์žฌ Ollama ๋ฒ„์ „์—์„œ๋Š” Hugging Face ๋ชจ๋ธ์„ ์ง์ ‘ ๊ฐ€์ ธ์˜ฌ ์ˆ˜ ์—†์Šต๋‹ˆ๋‹ค.")
print("\n๋‹ค์Œ ๋ฐฉ๋ฒ•์„ ์‹œ๋„ํ•ด๋ณด์„ธ์š”:")
print("\n1. Ollama๋ฅผ ์ตœ์‹  ๋ฒ„์ „์œผ๋กœ ์—…๋ฐ์ดํŠธ")
print(" https://ollama.ai/download")
print("\n2. ์ˆ˜๋™์œผ๋กœ ๋ชจ๋ธ ์ •๋ณด ํ™•์ธ")
print(" ํ˜„์žฌ Ollama ๋ฒ„์ „ ํ™•์ธ: ollama --version")
print(" ์‚ฌ์šฉ ๊ฐ€๋Šฅํ•œ ๋ชจ๋ธ ํ™•์ธ: ollama list")
print("\n3. EXAONE ๋ชจ๋ธ์˜ GGUF ๋ฒ„์ „ ์ฐพ๊ธฐ")
print(" Hugging Face์—์„œ GGUF ํ˜•์‹์˜ ๋ชจ๋ธ์„ ์ฐพ์•„๋ณด์„ธ์š”.")
print("\n" + "=" * 60)
print("์ฐธ๊ณ : ํ˜„์žฌ Ollama 0.13.0์€ Hugging Face ๋ชจ๋ธ ์ง์ ‘ ์ง€์›์ด ์ œํ•œ์ ์ž…๋‹ˆ๋‹ค.")
print("=" * 60)
if __name__ == "__main__":
main()