soongsil-bert-wellness / config.json
kco4776's picture
Update config.json
040ac5d
{
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"eos_token_id": 2,
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "\uac10\uc815",
"1": "\ub0b4\uc6d0\uc774\uc720",
"2": "\ubaa8\ud638\ud568",
"3": "\ubc30\uacbd",
"4": "\ubd80\uac00\uc124\uba85",
"5": "\uc0c1\ud0dc",
"6": "\uc6d0\uc778",
"7": "\uc77c\ubc18\ub300\ud654",
"8": "\uc790\uac00\uce58\ub8cc",
"9": "\uc99d\uc0c1",
"10": "\uce58\ub8cc\uc774\ub825",
"11": "\ud604\uc7ac\uc0c1\ud0dc"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"\uac10\uc815": 0,
"\ub0b4\uc6d0\uc774\uc720": 1,
"\ubaa8\ud638\ud568": 2,
"\ubc30\uacbd": 3,
"\ubd80\uac00\uc124\uba85": 4,
"\uc0c1\ud0dc": 5,
"\uc6d0\uc778": 6,
"\uc77c\ubc18\ub300\ud654": 7,
"\uc790\uac00\uce58\ub8cc": 8,
"\uc99d\uc0c1": 9,
"\uce58\ub8cc\uc774\ub825": 10,
"\ud604\uc7ac\uc0c1\ud0dc": 11
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"torch_dtype": "float32",
"total_flos": 8.217624121867606e+19,
"transformers_version": "4.12.5",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 16000
}