|
|
""" |
|
|
EXAONE-3.0-7.8B-Instruct ๋ชจ๋ธ์ Ollama์ ์ถ๊ฐํ๋ ์คํฌ๋ฆฝํธ (ํ ํฐ ํฌํจ) |
|
|
""" |
|
|
|
|
|
import os |
|
|
import subprocess |
|
|
from pathlib import Path |
|
|
import sys |
|
|
|
|
|
|
|
|
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) |
|
|
|
|
|
def get_huggingface_token(): |
|
|
""" |
|
|
Hugging Face ํ ํฐ ๊ฐ์ ธ์ค๊ธฐ (ํ๊ฒฝ ๋ณ์ ์ฐ์ , ์์ผ๋ฉด DB์์) |
|
|
Google API ํค ๊ด๋ฆฌ ๋ฐฉ์๊ณผ ๋์ผํ ํจํด |
|
|
""" |
|
|
|
|
|
token = os.getenv('HUGGINGFACE_HUB_TOKEN', '').strip() |
|
|
if not token: |
|
|
token = os.getenv('HF_TOKEN', '').strip() |
|
|
|
|
|
if token: |
|
|
print(f"[Hugging Face] ํ๊ฒฝ ๋ณ์์์ ํ ํฐ ๊ฐ์ ธ์ด (๊ธธ์ด: {len(token)}์)") |
|
|
return token |
|
|
|
|
|
|
|
|
try: |
|
|
from app.database import SystemConfig |
|
|
|
|
|
token = SystemConfig.get_config('huggingface_token', '').strip() |
|
|
if token: |
|
|
print(f"[Hugging Face] DB์์ ํ ํฐ ๊ฐ์ ธ์ด (๊ธธ์ด: {len(token)}์)") |
|
|
return token |
|
|
except Exception as e: |
|
|
print(f"[Hugging Face] DB์์ ํ ํฐ ์กฐํ ์คํจ (ํ๊ฒฝ ๋ณ์ ์ฌ์ฉ): {e}") |
|
|
return '' |
|
|
|
|
|
|
|
|
HF_TOKEN = get_huggingface_token() |
|
|
if not HF_TOKEN: |
|
|
raise ValueError("HUGGINGFACE_HUB_TOKEN ๋๋ HF_TOKEN ํ๊ฒฝ ๋ณ์๋ฅผ ์ค์ ํ๊ฑฐ๋, ๊ด๋ฆฌ ํ์ด์ง์์ ํ ํฐ์ ์ค์ ํด์ฃผ์ธ์.") |
|
|
|
|
|
def set_huggingface_token(): |
|
|
"""Hugging Face ํ ํฐ ์ค์ """ |
|
|
os.environ['HUGGINGFACE_HUB_TOKEN'] = HF_TOKEN |
|
|
print(f"[OK] Hugging Face ํ ํฐ ์ค์ ์๋ฃ") |
|
|
|
|
|
def create_ollama_modelfile(): |
|
|
"""Ollama Modelfile ์์ฑ""" |
|
|
modelfile_content = f"""FROM huggingface:LGAI-EXAONE/EXAONE-3.0-7.8B-Instruct |
|
|
|
|
|
# ๋ชจ๋ธ ์ค์ |
|
|
PARAMETER temperature 0.7 |
|
|
PARAMETER top_p 0.9 |
|
|
PARAMETER top_k 40 |
|
|
PARAMETER num_ctx 4096 |
|
|
PARAMETER num_predict 512 |
|
|
|
|
|
# ์์คํ
ํ๋กฌํํธ |
|
|
SYSTEM \"\"\"You are EXAONE, a helpful AI assistant developed by LG AI Research. |
|
|
You are designed to be helpful, harmless, and honest. |
|
|
You can communicate in both Korean and English.\"\"\" |
|
|
|
|
|
# EXAONE ๋ชจ๋ธ์ ์ฑํ
ํ
ํ๋ฆฟ |
|
|
TEMPLATE \"\"\"{{{{ if .System }}}}<|im_start|>system |
|
|
{{{{ .System }}}}<|im_end|> |
|
|
{{{{ end }}}}{{{{ if .Prompt }}}}<|im_start|>user |
|
|
{{{{ .Prompt }}}}<|im_end|> |
|
|
{{{{ end }}}}<|im_start|>assistant |
|
|
{{{{ .Response }}}}<|im_end|> |
|
|
\"\"\" |
|
|
""" |
|
|
|
|
|
modelfile_path = Path("EXAONE-3.0-7.8B-Instruct.modelfile") |
|
|
modelfile_path.write_text(modelfile_content, encoding='utf-8') |
|
|
print(f"[OK] Modelfile ์์ฑ ์๋ฃ: {modelfile_path.absolute()}") |
|
|
return modelfile_path |
|
|
|
|
|
def create_model_with_modelfile(modelfile_path): |
|
|
"""Modelfile์ ์ฌ์ฉํ์ฌ Ollama ๋ชจ๋ธ ์์ฑ""" |
|
|
try: |
|
|
print("\n" + "=" * 60) |
|
|
print("Ollama ๋ชจ๋ธ ์์ฑ ์์...") |
|
|
print("=" * 60) |
|
|
|
|
|
|
|
|
env = os.environ.copy() |
|
|
env['HUGGINGFACE_HUB_TOKEN'] = HF_TOKEN |
|
|
|
|
|
|
|
|
cmd = ['ollama', 'create', 'EXAONE-3.0-7.8B-Instruct', '-f', str(modelfile_path)] |
|
|
print(f"์คํ ๋ช
๋ น์ด: {' '.join(cmd)}") |
|
|
|
|
|
result = subprocess.run( |
|
|
cmd, |
|
|
env=env, |
|
|
capture_output=True, |
|
|
text=True, |
|
|
timeout=3600 |
|
|
) |
|
|
|
|
|
if result.returncode == 0: |
|
|
print("[OK] ๋ชจ๋ธ ์์ฑ ์ฑ๊ณต!") |
|
|
print(result.stdout) |
|
|
return True |
|
|
else: |
|
|
print("[ERROR] ๋ชจ๋ธ ์์ฑ ์คํจ") |
|
|
print("์ค๋ฅ ์ถ๋ ฅ:") |
|
|
print(result.stderr) |
|
|
return False |
|
|
|
|
|
except subprocess.TimeoutExpired: |
|
|
print("[ERROR] ๋ชจ๋ธ ์์ฑ ์๊ฐ ์ด๊ณผ (1์๊ฐ)") |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"[ERROR] ์ค๋ฅ ๋ฐ์: {e}") |
|
|
return False |
|
|
|
|
|
def check_ollama_installation(): |
|
|
"""Ollama ์ค์น ํ์ธ""" |
|
|
try: |
|
|
result = subprocess.run(['ollama', '--version'], |
|
|
capture_output=True, text=True, timeout=5) |
|
|
if result.returncode == 0: |
|
|
version = result.stdout.strip() |
|
|
print(f"[OK] Ollama ์ค์น ํ์ธ: {version}") |
|
|
return True |
|
|
else: |
|
|
print("[ERROR] Ollama๊ฐ ์ค์น๋์ด ์์ง ์์ต๋๋ค.") |
|
|
return False |
|
|
except FileNotFoundError: |
|
|
print("[ERROR] Ollama๊ฐ ์ค์น๋์ด ์์ง ์์ต๋๋ค.") |
|
|
print(" ์ค์น ๋ฐฉ๋ฒ: https://ollama.ai/download") |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"[ERROR] Ollama ํ์ธ ์ค ์ค๋ฅ: {e}") |
|
|
return False |
|
|
|
|
|
def verify_model(): |
|
|
"""์์ฑ๋ ๋ชจ๋ธ ํ์ธ""" |
|
|
try: |
|
|
result = subprocess.run(['ollama', 'list'], |
|
|
capture_output=True, text=True, timeout=5) |
|
|
if result.returncode == 0: |
|
|
if 'EXAONE-3.0-7.8B-Instruct' in result.stdout: |
|
|
print("\n[OK] ๋ชจ๋ธ์ด ์ฑ๊ณต์ ์ผ๋ก ์ถ๊ฐ๋์์ต๋๋ค!") |
|
|
print("\n์ค์น๋ ๋ชจ๋ธ ๋ชฉ๋ก:") |
|
|
print(result.stdout) |
|
|
return True |
|
|
else: |
|
|
print("\n[WARNING] ๋ชจ๋ธ์ด ๋ชฉ๋ก์ ๋ํ๋์ง ์์ต๋๋ค.") |
|
|
print("\nํ์ฌ ์ค์น๋ ๋ชจ๋ธ:") |
|
|
print(result.stdout) |
|
|
return False |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"[WARNING] ๋ชจ๋ธ ํ์ธ ์ค ์ค๋ฅ: {e}") |
|
|
return False |
|
|
|
|
|
if __name__ == "__main__": |
|
|
print("\n" + "=" * 60) |
|
|
print("EXAONE-3.0-7.8B-Instruct Ollama ์ถ๊ฐ ์คํฌ๋ฆฝํธ") |
|
|
print("=" * 60 + "\n") |
|
|
|
|
|
|
|
|
if not check_ollama_installation(): |
|
|
print("\n[WARNING] Ollama๋ฅผ ๋จผ์ ์ค์นํด์ฃผ์ธ์.") |
|
|
exit(1) |
|
|
|
|
|
|
|
|
set_huggingface_token() |
|
|
|
|
|
|
|
|
modelfile_path = create_ollama_modelfile() |
|
|
|
|
|
|
|
|
print("\n[WARNING] ์ฃผ์์ฌํญ:") |
|
|
print("- ๋ชจ๋ธ ํฌ๊ธฐ๊ฐ ์ฝ 15GB์ด๋ฏ๋ก ๋ค์ด๋ก๋์ ์๊ฐ์ด ๊ฑธ๋ฆด ์ ์์ต๋๋ค.") |
|
|
print("- ์ถฉ๋ถํ ๋์คํฌ ๊ณต๊ฐ๊ณผ GPU ๋ฉ๋ชจ๋ฆฌ๊ฐ ํ์ํฉ๋๋ค.") |
|
|
print("- Hugging Face์์ ๋ชจ๋ธ ์ก์ธ์ค ๊ถํ์ด ํ์ํฉ๋๋ค.") |
|
|
|
|
|
|
|
|
print("\n๋ชจ๋ธ ์์ฑ์ ์์ํฉ๋๋ค...") |
|
|
|
|
|
|
|
|
success = create_model_with_modelfile(modelfile_path) |
|
|
|
|
|
if success: |
|
|
verify_model() |
|
|
print("\n" + "=" * 60) |
|
|
print("[OK] ๋ชจ๋ธ ์ถ๊ฐ ์๋ฃ!") |
|
|
print("=" * 60) |
|
|
print("\n์ด์ ์น ์ ํ๋ฆฌ์ผ์ด์
์์ ๋ชจ๋ธ์ ์ฌ์ฉํ ์ ์์ต๋๋ค.") |
|
|
print("๋ชจ๋ธ ํ
์คํธ:") |
|
|
print(" ollama run EXAONE-3.0-7.8B-Instruct \"์๋
ํ์ธ์\"") |
|
|
print("=" * 60) |
|
|
else: |
|
|
print("\n" + "=" * 60) |
|
|
print("[ERROR] ๋ชจ๋ธ ์์ฑ ์คํจ") |
|
|
print("=" * 60) |
|
|
print("\n์๋์ผ๋ก ๋ชจ๋ธ์ ์์ฑํ๋ ค๋ฉด:") |
|
|
print(f" 1. Hugging Face ํ ํฐ ์ค์ :") |
|
|
print(f" $env:HUGGINGFACE_HUB_TOKEN='YOUR_TOKEN_HERE'") |
|
|
print(f" 2. Modelfile๋ก ๋ชจ๋ธ ์์ฑ:") |
|
|
print(f" ollama create EXAONE-3.0-7.8B-Instruct -f {modelfile_path}") |
|
|
print("=" * 60) |
|
|
|
|
|
|