animaai's picture
Upload folder using huggingface_hub
1d23560
raw
history blame
8.01 kB
{
"_name_or_path": "./roberta_cleen_all_exist_new_0/model_thulje/epoch_1",
"architectures": [
"RobertaForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"crf_state_dict": {
"_constraint_mask": [
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
1.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
1.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
1.0
],
[
1.0,
1.0,
1.0,
1.0,
1.0,
0.0,
0.0,
0.0,
0.0,
0.0,
1.0,
0.0,
0.0
],
[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
0.0
]
],
"end_transitions": [
2.240643262863159,
-0.2742827236652374,
0.6393970251083374,
0.005434449762105942,
-0.1513204723596573,
-1.2698227167129517,
0.19887013733386993,
0.28996115922927856,
0.17393897473812103,
-0.33823758363723755,
0.8555233478546143
],
"start_transitions": [
-0.4646048843860626,
-0.06933217495679855,
1.976535677909851,
-0.01753147505223751,
-0.7673749327659607,
-0.05074487626552582,
-2.2183399200439453,
0.82484370470047,
1.2874979972839355,
1.3923321962356567,
0.43642526865005493
],
"transitions": [
[
0.2501518726348877,
0.0009667582344263792,
0.30999264121055603,
0.39742279052734375,
0.31025633215904236,
0.09058089554309845,
-0.019848795607686043,
-0.22607830166816711,
-0.37826070189476013,
-0.3088703155517578,
-0.40294963121414185
],
[
0.1511581689119339,
0.06581507623195648,
0.04197699949145317,
0.13711129128932953,
-0.1339503526687622,
0.2818056643009186,
-0.5844879150390625,
0.2564857006072998,
-0.17954248189926147,
-0.022843224927783012,
0.3660793900489807
],
[
-0.22967128455638885,
0.24774327874183655,
0.2004176527261734,
0.1407482624053955,
0.5036953687667847,
-0.26214247941970825,
-0.06645846366882324,
-0.3897196352481842,
0.3363957405090332,
0.3760513961315155,
0.08766741305589676
],
[
0.08801333606243134,
0.559565544128418,
-0.1347968876361847,
-0.04922352358698845,
-0.27180802822113037,
0.12606672942638397,
0.3549225330352783,
-0.3206363320350647,
0.0662713572382927,
0.10210344195365906,
-0.4600335657596588
],
[
-0.25136229395866394,
0.11263541132211685,
0.30574309825897217,
-0.12007039040327072,
-0.14439710974693298,
0.02785634994506836,
-0.03327173739671707,
-0.4631956219673157,
0.16959922015666962,
-0.8500743508338928,
-0.3696420192718506
],
[
-0.14124469459056854,
0.2320529967546463,
0.3118607699871063,
0.3109912872314453,
0.0008499743416905403,
0.30968427658081055,
-0.05566711351275444,
0.4032119810581207,
-0.022859327495098114,
0.15043704211711884,
0.5076503157615662
],
[
0.12845280766487122,
0.28300824761390686,
0.13735704123973846,
0.007540121674537659,
-0.2670679986476898,
0.312574565410614,
-0.41549786925315857,
-0.5133036375045776,
0.1582707017660141,
0.19924618303775787,
0.030154455453157425
],
[
-0.3130203187465668,
0.25306785106658936,
-0.27456873655319214,
-0.43190330266952515,
0.2754882574081421,
0.08095683157444,
-0.1389872282743454,
-0.17365975677967072,
-0.07709870487451553,
-0.42576122283935547,
0.08613783866167068
],
[
0.3981761336326599,
0.14935140311717987,
0.23046627640724182,
-0.2853967547416687,
0.07103794068098068,
-0.11574462801218033,
-0.07706587016582489,
-0.1586235910654068,
0.4237719178199768,
-0.10810907930135727,
-0.6446168422698975
],
[
0.0846155658364296,
0.03686073422431946,
-0.5546830296516418,
-0.6503274440765381,
0.12535876035690308,
-0.3845370411872864,
0.021639790385961533,
-0.5573288202285767,
0.2939161956310272,
-0.37080487608909607,
-0.05652831494808197
],
[
0.11467353254556656,
0.20622697472572327,
-0.14056970179080963,
0.4235305190086365,
0.33264821767807007,
0.198853999376297,
0.09324410557746887,
0.15920676290988922,
-0.19516964256763458,
0.5432201027870178,
-0.014160040766000748
]
]
},
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "B-Dislikes",
"1": "B-Family",
"2": "B-Job",
"3": "B-Likes",
"4": "B-Pet",
"5": "I-Dislikes",
"6": "I-Family",
"7": "I-Job",
"8": "I-Likes",
"9": "I-Pet",
"10": "O"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-Dislikes": 0,
"B-Family": 1,
"B-Job": 2,
"B-Likes": 3,
"B-Pet": 4,
"I-Dislikes": 5,
"I-Family": 6,
"I-Job": 7,
"I-Likes": 8,
"I-Pet": 9,
"O": 10
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.20.1",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}