emotion-chatbot / scripts /save_complete_model.py
kootaeng2
Chore: Improve and update code comments
c1b9543
raw
history blame contribute delete
936 Bytes
# save_complete_model.py
# ํ›ˆ๋ จ๋œ ๋ชจ๋ธ์„ ๋ฐฐํฌ์šฉ์œผ๋กœ ๋ณ€ํ™˜
from transformers import AutoModelForSequenceClassification, AutoTokenizer
# ๊ธฐ์กด ํ›ˆ๋ จ ๊ฒฐ๊ณผ๋ฌผ์ด ์ €์žฅ๋œ ๊ฒฝ๋กœ
checkpoint_path = "./results/checkpoint-9681"
# '์™„์ „ํ•œ ๋ชจ๋ธ'์„ ์ €์žฅํ•  ์ƒˆ ํด๋” ์ด๋ฆ„
output_dir = "./korean-emotion-classifier-final"
print(f"'{checkpoint_path}'์—์„œ ๋ชจ๋ธ๊ณผ ํ† ํฌ๋‚˜์ด์ €๋ฅผ ๋ถˆ๋Ÿฌ์˜ต๋‹ˆ๋‹ค...")
model = AutoModelForSequenceClassification.from_pretrained(checkpoint_path)
tokenizer = AutoTokenizer.from_pretrained(checkpoint_path)
print("๋ถˆ๋Ÿฌ์˜ค๊ธฐ ์™„๋ฃŒ.")
print(f"'{output_dir}' ํด๋”์— ์™„์ „ํ•œ ๋ชจ๋ธ๊ณผ ํ† ํฌ๋‚˜์ด์ €๋ฅผ ์ €์žฅํ•ฉ๋‹ˆ๋‹ค...")
model.save_pretrained(output_dir)
tokenizer.save_pretrained(output_dir)
print("์ €์žฅ ์™„๋ฃŒ! 'korean-emotion-classifier-final' ํด๋”๋ฅผ ํ™•์ธํ•˜์„ธ์š”.")
print("์ด ํด๋” ์•ˆ์˜ ํŒŒ์ผ๋“ค์„ 'my-local-model' ํด๋”๋กœ ์˜ฎ๊ฒจ์ฃผ์‹œ๋ฉด ๋ฉ๋‹ˆ๋‹ค.")