|
|
import torch |
|
|
|
|
|
from KoELECTRA import * |
|
|
|
|
|
from transformers import ElectraModel, ElectraTokenizer |
|
|
from transformers import ( |
|
|
ElectraConfig, |
|
|
ElectraTokenizer, |
|
|
|
|
|
) |
|
|
save_ckpt_path = './sentiment-classification.pth' |
|
|
|
|
|
ctx = "cuda" if torch.cuda.is_available() else "cpu" |
|
|
device = torch.device(ctx) |
|
|
|
|
|
model = koElectraForSequenceClassification() |
|
|
checkpoint = torch.load(save_ckpt_path, map_location=device) |
|
|
model.load_state_dict(checkpoint['model_state_dict']) |
|
|
model.eval() |
|
|
tokenizer = ElectraTokenizer() |
|
|
|
|
|
|
|
|
MODEL_SAVE_REPO = 'Koelectra-five-sentiment-classification' |
|
|
HUGGINGFACE_AUTH_TOKEN = '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' |
|
|
|
|
|
MODEL_SAVE_HUB_PATH = '.' |
|
|
|
|
|
|
|
|
|
|
|
model.kogpt2.push_to_hub( |
|
|
MODEL_SAVE_REPO, |
|
|
use_temp_dir=True, |
|
|
use_auth_token=HUGGINGFACE_AUTH_TOKEN |
|
|
) |
|
|
tokenizer.push_to_hub( |
|
|
MODEL_SAVE_REPO, |
|
|
use_temp_dir=True, |
|
|
use_auth_token=HUGGINGFACE_AUTH_TOKEN |
|
|
) |