Update app.py
Browse files
app.py
CHANGED
|
@@ -5,6 +5,7 @@ import torch
|
|
| 5 |
from huggingface_hub import login
|
| 6 |
import os
|
| 7 |
from datasets import load_dataset
|
|
|
|
| 8 |
|
| 9 |
# 환경 변수에서 토큰을 가져오기
|
| 10 |
hf_token = os.environ.get("HF_TOKEN", None)
|
|
@@ -18,7 +19,7 @@ else:
|
|
| 18 |
# model, tokenizer 셋팅
|
| 19 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
| 20 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
| 21 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, device_map="auto", token=hf_token)
|
| 22 |
|
| 23 |
# KMMLU 데이터셋 로드
|
| 24 |
dataset = load_dataset("HAERAE-HUB/KMMLU", "Accounting")
|
|
|
|
| 5 |
from huggingface_hub import login
|
| 6 |
import os
|
| 7 |
from datasets import load_dataset
|
| 8 |
+
import accelerate
|
| 9 |
|
| 10 |
# 환경 변수에서 토큰을 가져오기
|
| 11 |
hf_token = os.environ.get("HF_TOKEN", None)
|
|
|
|
| 19 |
# model, tokenizer 셋팅
|
| 20 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
| 22 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, low_cpu_mem_usage=False ,device_map="auto", token=hf_token)
|
| 23 |
|
| 24 |
# KMMLU 데이터셋 로드
|
| 25 |
dataset = load_dataset("HAERAE-HUB/KMMLU", "Accounting")
|