Spaces:
Sleeping
Sleeping
| #!/usr/bin/env python3 | |
| """ | |
| Docker ๋น๋ ์ Hugging Face ๋ชจ๋ธ ๋ค์ด๋ก๋ ์คํฌ๋ฆฝํธ | |
| """ | |
| import os | |
| import logging | |
| from transformers import AutoTokenizer, AutoModelForCausalLM | |
| from pathlib import Path | |
| logging.basicConfig(level=logging.INFO) | |
| logger = logging.getLogger(__name__) | |
| def download_model(): | |
| """Hugging Face Hub์์ ๋ชจ๋ธ ๋ค์ด๋ก๋""" | |
| model_name = "gbrabbit/lily-math-model" | |
| cache_dir = "/app/cache/transformers" | |
| logger.info(f"๐ฅ ๋ชจ๋ธ ๋ค์ด๋ก๋ ์์: {model_name}") | |
| try: | |
| # ์บ์ ๋๋ ํ ๋ฆฌ ์์ฑ | |
| Path(cache_dir).mkdir(parents=True, exist_ok=True) | |
| # ํ ํฌ๋์ด์ ๋ค์ด๋ก๋ | |
| logger.info("๐ค ํ ํฌ๋์ด์ ๋ค์ด๋ก๋ ์ค...") | |
| tokenizer = AutoTokenizer.from_pretrained( | |
| model_name, | |
| trust_remote_code=True, | |
| cache_dir=cache_dir | |
| ) | |
| logger.info("โ ํ ํฌ๋์ด์ ๋ค์ด๋ก๋ ์๋ฃ") | |
| # ๋ชจ๋ธ ๋ค์ด๋ก๋ (๊ฐ์ค์น๋ง) | |
| logger.info("๐ง ๋ชจ๋ธ ๋ค์ด๋ก๋ ์ค...") | |
| model = AutoModelForCausalLM.from_pretrained( | |
| model_name, | |
| trust_remote_code=True, | |
| cache_dir=cache_dir, | |
| torch_dtype="auto", # ๋ฉ๋ชจ๋ฆฌ ์ ์ฝ | |
| low_cpu_mem_usage=True | |
| ) | |
| logger.info("โ ๋ชจ๋ธ ๋ค์ด๋ก๋ ์๋ฃ") | |
| # ๋ฉ๋ชจ๋ฆฌ ์ ๋ฆฌ | |
| del model | |
| del tokenizer | |
| logger.info("๐ ๋ชจ๋ธ ๋ค์ด๋ก๋ ๋ฐ ์บ์ ์๋ฃ") | |
| except Exception as e: | |
| logger.error(f"โ ๋ชจ๋ธ ๋ค์ด๋ก๋ ์คํจ: {e}") | |
| raise | |
| if __name__ == "__main__": | |
| download_model() |