Update app.py
Browse files
app.py
CHANGED
|
@@ -14,12 +14,13 @@ hf_token = os.environ.get("HF_TOKEN", None)
|
|
| 14 |
if hf_token:
|
| 15 |
login(token=hf_token, add_to_git_credential=True)
|
| 16 |
else:
|
| 17 |
-
print("HF_TOKEN 환경 변수가 설정
|
| 18 |
|
| 19 |
# model, tokenizer 셋팅
|
| 20 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
| 22 |
-
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, low_cpu_mem_usage=False ,
|
|
|
|
| 23 |
|
| 24 |
# KMMLU 데이터셋 로드
|
| 25 |
dataset = load_dataset("HAERAE-HUB/KMMLU", "Accounting")
|
|
|
|
| 14 |
if hf_token:
|
| 15 |
login(token=hf_token, add_to_git_credential=True)
|
| 16 |
else:
|
| 17 |
+
print("HF_TOKEN 환경 변수가 설정 오류")
|
| 18 |
|
| 19 |
# model, tokenizer 셋팅
|
| 20 |
model_name = "meta-llama/Meta-Llama-3.1-8B-Instruct"
|
| 21 |
tokenizer = AutoTokenizer.from_pretrained(model_name, token=hf_token)
|
| 22 |
+
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16, low_cpu_mem_usage=False ,token=hf_token)
|
| 23 |
+
#지운 옵션: device_map="auto"
|
| 24 |
|
| 25 |
# KMMLU 데이터셋 로드
|
| 26 |
dataset = load_dataset("HAERAE-HUB/KMMLU", "Accounting")
|