import torch #from kogpt2Test.dialogLM.Kogpt2 import DialogKoGPT2 from KoELECTRA import * #from kogpt2_transformers import get_kogpt2_tokenizer from transformers import ElectraModel, ElectraTokenizer from transformers import ( ElectraConfig, ElectraTokenizer, ) save_ckpt_path = './sentiment-classification.pth' ctx = "cuda" if torch.cuda.is_available() else "cpu" device = torch.device(ctx) model = koElectraForSequenceClassification() checkpoint = torch.load(save_ckpt_path, map_location=device) model.load_state_dict(checkpoint['model_state_dict']) model.eval() tokenizer = ElectraTokenizer() ## repo MODEL_SAVE_REPO = 'Koelectra-five-sentiment-classification' # ex) 'my-bert-fine-tuned' HUGGINGFACE_AUTH_TOKEN = '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' # https://huggingface.co/settings/token MODEL_SAVE_HUB_PATH = '.' ## Push to huggingface-hub model.kogpt2.push_to_hub( MODEL_SAVE_REPO, use_temp_dir=True, use_auth_token=HUGGINGFACE_AUTH_TOKEN ) tokenizer.push_to_hub( MODEL_SAVE_REPO, use_temp_dir=True, use_auth_token=HUGGINGFACE_AUTH_TOKEN )