| { | |
| "model_type": "kobert-stance-classifier", | |
| "base_model": "monologg/kobert", | |
| "tokenizer": "monologg/kobert", | |
| "num_labels": 3, | |
| "label2id": { | |
| "support": 0, | |
| "neutral": 1, | |
| "oppose": 2 | |
| }, | |
| "id2label": { | |
| "0": "support", | |
| "1": "neutral", | |
| "2": "oppose" | |
| }, | |
| "label_names_kr": [ | |
| "옹호", | |
| "중립", | |
| "비판" | |
| ], | |
| "max_length": 512, | |
| "dropout": 0.3, | |
| "hidden_size": 768, | |
| "test_accuracy": 0.7393058918482648, | |
| "test_f1": 0.7394790179274192, | |
| "training_config": { | |
| "model_name": "monologg/kobert", | |
| "max_length": 512, | |
| "dropout": 0.3, | |
| "batch_size": 64, | |
| "epochs": 10, | |
| "learning_rate": 2e-05, | |
| "weight_decay": 0.01, | |
| "max_grad_norm": 1.0, | |
| "use_focal_loss": true, | |
| "focal_gamma": 2.0, | |
| "early_stop_patience": 3, | |
| "early_stop_min_delta": 0.001, | |
| "val_size": 0.1, | |
| "test_size": 0.1, | |
| "wandb_project": "kobert-stance-16k", | |
| "wandb_run_name": null, | |
| "save_checkpoint_every": 1, | |
| "checkpoint_dir": "/content/checkpoints" | |
| } | |
| } |