EvgeniaKomleva's picture
Upload folder using huggingface_hub (#1)
1c55abe
{
"_name_or_path": "./roberta_cleen_all_exist_new/model_xchxky/epoch_1",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
0.21687424182891846,
1.5857642889022827,
0.758258581161499,
0.37032732367515564,
-1.329129695892334,
2.4964492321014404,
0.8586344718933105,
-1.4632924795150757,
1.2904568910598755,
0.8706234693527222,
-0.1880217045545578
],
"start_transitions": [
0.9229826927185059,
0.28560739755630493,
0.14332018792629242,
-1.4447805881500244,
1.4846503734588623,
-1.5904203653335571,
-0.7379185557365417,
-1.8030710220336914,
-1.0581018924713135,
0.6242042779922485,
0.5164673328399658
],
"transitions": [
[
0.3673975467681885,
0.18701963126659393,
-0.43712371587753296,
-0.1407420039176941,
-0.10534486174583435,
0.24474096298217773,
0.7354440093040466,
0.34748244285583496,
0.17631690204143524,
-0.23136325180530548,
-0.025033308193087578
],
[
-0.007745583541691303,
-0.09301131218671799,
-0.31596821546554565,
-0.26407039165496826,
-0.06394483149051666,
0.13408444821834564,
-0.4474257230758667,
-0.2721427083015442,
-0.12181078642606735,
-0.12198833376169205,
0.5588247179985046
],
[
-0.19631847739219666,
0.008313635364174843,
0.15113116800785065,
0.04224454239010811,
-0.236379936337471,
-0.22627614438533783,
-0.04470784589648247,
-0.06738904118537903,
-0.2742701470851898,
0.5238128900527954,
-0.19441142678260803
],
[
0.25509530305862427,
0.0956823006272316,
0.32445773482322693,
-0.3041941523551941,
-0.24632413685321808,
-0.06011788919568062,
0.05390192195773125,
-0.45170074701309204,
0.14786915481090546,
-0.31270110607147217,
0.12534676492214203
],
[
-0.33268553018569946,
-0.27061566710472107,
0.4702346622943878,
-0.374167263507843,
-0.0938689336180687,
-0.4018147885799408,
0.24715960025787354,
0.09559278190135956,
0.04130111634731293,
0.15933264791965485,
0.5198276042938232
],
[
-0.08424585312604904,
0.41698721051216125,
0.01722242869436741,
-0.317511647939682,
0.15484148263931274,
-0.02030128985643387,
0.1650458425283432,
0.2202066034078598,
0.24747343361377716,
0.2225068062543869,
-0.07041354477405548
],
[
0.17406269907951355,
-0.49445095658302307,
0.1372549682855606,
0.5307994484901428,
0.2616923451423645,
0.08446840941905975,
-0.03838552534580231,
-0.39147865772247314,
0.10781766474246979,
0.5030436515808105,
-0.2691557705402374
],
[
0.3631405830383301,
0.5819560885429382,
0.43440482020378113,
-0.08992395550012589,
0.12068815529346466,
-0.18815878033638,
0.31527331471443176,
0.03760548308491707,
0.6801357865333557,
0.29773885011672974,
-0.04291761294007301
],
[
0.17793186008930206,
0.21460671722888947,
0.17269837856292725,
0.4337525963783264,
-0.3202753961086273,
0.2475089281797409,
-0.24713143706321716,
-0.2365410327911377,
-0.2708987891674042,
-0.04050455614924431,
-0.12179888039827347
],
[
-0.09372731298208237,
-0.03050650842487812,
-0.35732266306877136,
-0.03869544342160225,
0.15174028277397156,
-0.2674034535884857,
-0.07940886914730072,
-0.4257971942424774,
0.2486799657344818,
-0.12294259667396545,
0.2491007000207901
],
[
0.1782885640859604,
-0.471216082572937,
-0.08392570912837982,
-0.47434109449386597,
0.6260464787483215,
0.23181001842021942,
-0.1574145406484604,
-0.047237854450941086,
-0.1693454533815384,
-0.4191673696041107,
0.21925123035907745
]
]
},
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "B-Dislikes",
"1": "B-Family",
"2": "B-Job",
"3": "B-Likes",
"4": "B-Pet",
"5": "I-Dislikes",
"6": "I-Family",
"7": "I-Job",
"8": "I-Likes",
"9": "I-Pet",
"10": "O"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-Dislikes": 0,
"B-Family": 1,
"B-Job": 2,
"B-Likes": 3,
"B-Pet": 4,
"I-Dislikes": 5,
"I-Family": 6,
"I-Job": 7,
"I-Likes": 8,
"I-Pet": 9,
"O": 10
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.20.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}