|
|
""" |
|
|
EXAONE-3.0-7.8B-Instruct๋ฅผ Ollama์ ์ง์ ์ถ๊ฐํ๋ ์คํฌ๋ฆฝํธ |
|
|
ํ ํฐ์ ์ฌ์ฉํ์ฌ ๋ชจ๋ธ์ ์ถ๊ฐํฉ๋๋ค. |
|
|
""" |
|
|
|
|
|
import os |
|
|
import subprocess |
|
|
import json |
|
|
|
|
|
HF_TOKEN = "YOUR_HUGGINGFACE_TOKEN_HERE" |
|
|
|
|
|
def create_simple_modelfile(): |
|
|
"""๊ฐ๋จํ Modelfile ์์ฑ (๋ก์ปฌ ๊ฒฝ๋ก ๋์ ๋ชจ๋ธ ์ด๋ฆ๋ง ์ฌ์ฉ)""" |
|
|
modelfile_content = """# EXAONE-3.0-7.8B-Instruct ๋ชจ๋ธ ์ค์ |
|
|
# ์ฐธ๊ณ : Ollama๊ฐ Hugging Face ๋ชจ๋ธ์ ์ง์ ์ง์ํ์ง ์๋ ๊ฒฝ์ฐ |
|
|
# ๋ค๋ฅธ ๋ฐฉ๋ฒ์ด ํ์ํ ์ ์์ต๋๋ค. |
|
|
|
|
|
PARAMETER temperature 0.7 |
|
|
PARAMETER top_p 0.9 |
|
|
PARAMETER top_k 40 |
|
|
PARAMETER num_ctx 4096 |
|
|
|
|
|
SYSTEM \"\"\"You are EXAONE, a helpful AI assistant developed by LG AI Research. |
|
|
You can communicate in both Korean and English.\"\"\" |
|
|
""" |
|
|
|
|
|
with open("EXAONE-3.0-7.8B-Instruct.modelfile", "w", encoding="utf-8") as f: |
|
|
f.write(modelfile_content) |
|
|
|
|
|
print("[OK] Modelfile ์์ฑ ์๋ฃ") |
|
|
|
|
|
def check_model_availability(): |
|
|
"""๋ชจ๋ธ์ด ์ด๋ฏธ ์ค์น๋์ด ์๋์ง ํ์ธ""" |
|
|
try: |
|
|
result = subprocess.run(['ollama', 'list'], |
|
|
capture_output=True, text=True, timeout=5) |
|
|
if 'EXAONE' in result.stdout or 'exaone' in result.stdout.lower(): |
|
|
print("[INFO] EXAONE ๋ชจ๋ธ์ด ์ด๋ฏธ ์ค์น๋์ด ์์ต๋๋ค.") |
|
|
return True |
|
|
return False |
|
|
except Exception as e: |
|
|
print(f"[WARNING] ๋ชจ๋ธ ํ์ธ ์ค ์ค๋ฅ: {e}") |
|
|
return False |
|
|
|
|
|
def main(): |
|
|
print("\n" + "=" * 60) |
|
|
print("EXAONE-3.0-7.8B-Instruct Ollama ์ถ๊ฐ ์๋") |
|
|
print("=" * 60) |
|
|
|
|
|
|
|
|
os.environ['HUGGINGFACE_HUB_TOKEN'] = HF_TOKEN |
|
|
print(f"[OK] Hugging Face ํ ํฐ ์ค์ ์๋ฃ") |
|
|
|
|
|
|
|
|
if check_model_availability(): |
|
|
print("\n๋ชจ๋ธ์ด ์ด๋ฏธ ์ค์น๋์ด ์์ต๋๋ค.") |
|
|
return |
|
|
|
|
|
print("\nํ์ฌ Ollama ๋ฒ์ ์์๋ Hugging Face ๋ชจ๋ธ์ ์ง์ ๊ฐ์ ธ์ฌ ์ ์์ต๋๋ค.") |
|
|
print("\n๋ค์ ๋ฐฉ๋ฒ์ ์๋ํด๋ณด์ธ์:") |
|
|
print("\n1. Ollama๋ฅผ ์ต์ ๋ฒ์ ์ผ๋ก ์
๋ฐ์ดํธ") |
|
|
print(" https://ollama.ai/download") |
|
|
|
|
|
print("\n2. ์๋์ผ๋ก ๋ชจ๋ธ ์ ๋ณด ํ์ธ") |
|
|
print(" ํ์ฌ Ollama ๋ฒ์ ํ์ธ: ollama --version") |
|
|
print(" ์ฌ์ฉ ๊ฐ๋ฅํ ๋ชจ๋ธ ํ์ธ: ollama list") |
|
|
|
|
|
print("\n3. EXAONE ๋ชจ๋ธ์ GGUF ๋ฒ์ ์ฐพ๊ธฐ") |
|
|
print(" Hugging Face์์ GGUF ํ์์ ๋ชจ๋ธ์ ์ฐพ์๋ณด์ธ์.") |
|
|
|
|
|
print("\n" + "=" * 60) |
|
|
print("์ฐธ๊ณ : ํ์ฌ Ollama 0.13.0์ Hugging Face ๋ชจ๋ธ ์ง์ ์ง์์ด ์ ํ์ ์
๋๋ค.") |
|
|
print("=" * 60) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|