Upload folder using huggingface_hub
Browse files- config.json +53 -1
- configuration_comparer.py +3 -15
- modeling_comparer.py +12 -3
- tokenizer_config.json +1 -1
config.json
CHANGED
|
@@ -6,7 +6,59 @@
|
|
| 6 |
"AutoConfig": "configuration_comparer.ComparerConfig",
|
| 7 |
"AutoModel": "modeling_comparer.ComparerModel"
|
| 8 |
},
|
| 9 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 10 |
"dtype": "float16",
|
| 11 |
"hidden_size": 768,
|
| 12 |
"model_type": "comparer",
|
|
|
|
| 6 |
"AutoConfig": "configuration_comparer.ComparerConfig",
|
| 7 |
"AutoModel": "modeling_comparer.ComparerModel"
|
| 8 |
},
|
| 9 |
+
"base_config_dict": {
|
| 10 |
+
"_name_or_path": "AI-Response-Comparer/deberta-v3-base",
|
| 11 |
+
"architectures": [
|
| 12 |
+
"DebertaV2Model"
|
| 13 |
+
],
|
| 14 |
+
"attention_probs_dropout_prob": 0.1,
|
| 15 |
+
"bos_token_id": null,
|
| 16 |
+
"chunk_size_feed_forward": 0,
|
| 17 |
+
"dtype": "float16",
|
| 18 |
+
"eos_token_id": null,
|
| 19 |
+
"hidden_act": "gelu",
|
| 20 |
+
"hidden_dropout_prob": 0.1,
|
| 21 |
+
"hidden_size": 768,
|
| 22 |
+
"id2label": {
|
| 23 |
+
"0": "LABEL_0",
|
| 24 |
+
"1": "LABEL_1"
|
| 25 |
+
},
|
| 26 |
+
"initializer_range": 0.02,
|
| 27 |
+
"intermediate_size": 3072,
|
| 28 |
+
"is_encoder_decoder": false,
|
| 29 |
+
"label2id": {
|
| 30 |
+
"LABEL_0": 0,
|
| 31 |
+
"LABEL_1": 1
|
| 32 |
+
},
|
| 33 |
+
"layer_norm_eps": 1e-07,
|
| 34 |
+
"legacy": true,
|
| 35 |
+
"max_position_embeddings": 512,
|
| 36 |
+
"max_relative_positions": -1,
|
| 37 |
+
"model_type": "deberta-v2",
|
| 38 |
+
"norm_rel_ebd": "layer_norm",
|
| 39 |
+
"num_attention_heads": 12,
|
| 40 |
+
"num_hidden_layers": 12,
|
| 41 |
+
"output_attentions": false,
|
| 42 |
+
"output_hidden_states": false,
|
| 43 |
+
"pad_token_id": 0,
|
| 44 |
+
"pooler_dropout": 0,
|
| 45 |
+
"pooler_hidden_act": "gelu",
|
| 46 |
+
"pooler_hidden_size": 768,
|
| 47 |
+
"pos_att_type": [
|
| 48 |
+
"p2c",
|
| 49 |
+
"c2p"
|
| 50 |
+
],
|
| 51 |
+
"position_biased_input": false,
|
| 52 |
+
"position_buckets": 256,
|
| 53 |
+
"problem_type": null,
|
| 54 |
+
"relative_attention": true,
|
| 55 |
+
"return_dict": true,
|
| 56 |
+
"share_att_key": true,
|
| 57 |
+
"tie_word_embeddings": true,
|
| 58 |
+
"transformers_version": "5.2.0",
|
| 59 |
+
"type_vocab_size": 0,
|
| 60 |
+
"vocab_size": 128100
|
| 61 |
+
},
|
| 62 |
"dtype": "float16",
|
| 63 |
"hidden_size": 768,
|
| 64 |
"model_type": "comparer",
|
configuration_comparer.py
CHANGED
|
@@ -1,21 +1,9 @@
|
|
| 1 |
-
from transformers import PretrainedConfig
|
| 2 |
|
| 3 |
class ComparerConfig(PretrainedConfig):
|
| 4 |
model_type = "comparer"
|
| 5 |
|
| 6 |
-
def __init__(
|
| 7 |
-
self,
|
| 8 |
-
base_model="microsoft/deberta-v3-base",
|
| 9 |
-
hidden_size=768,
|
| 10 |
-
base_model_config=None,
|
| 11 |
-
**kwargs
|
| 12 |
-
):
|
| 13 |
super().__init__(**kwargs)
|
| 14 |
-
|
| 15 |
-
self.base_model = base_model
|
| 16 |
self.hidden_size = hidden_size
|
| 17 |
-
|
| 18 |
-
if base_model_config is None:
|
| 19 |
-
base_model_config = AutoConfig.from_pretrained(base_model)
|
| 20 |
-
|
| 21 |
-
self.base_model_config = base_model_config
|
|
|
|
| 1 |
+
from transformers import PretrainedConfig
|
| 2 |
|
| 3 |
class ComparerConfig(PretrainedConfig):
|
| 4 |
model_type = "comparer"
|
| 5 |
|
| 6 |
+
def __init__(self, hidden_size=768, base_config_dict=None, **kwargs):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 7 |
super().__init__(**kwargs)
|
|
|
|
|
|
|
| 8 |
self.hidden_size = hidden_size
|
| 9 |
+
self.base_config_dict = base_config_dict or {}
|
|
|
|
|
|
|
|
|
|
|
|
modeling_comparer.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import torch
|
| 2 |
import torch.nn as nn
|
| 3 |
-
from transformers import PreTrainedModel, AutoModel
|
| 4 |
-
from
|
| 5 |
|
| 6 |
class ComparerModel(PreTrainedModel):
|
| 7 |
config_class = ComparerConfig
|
|
@@ -9,7 +9,16 @@ class ComparerModel(PreTrainedModel):
|
|
| 9 |
def __init__(self, config):
|
| 10 |
super().__init__(config)
|
| 11 |
|
| 12 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
self.linear_tokens = nn.Linear(config.hidden_size, 100)
|
| 15 |
self.linear_tokens2 = nn.Linear(200, 50)
|
|
|
|
| 1 |
import torch
|
| 2 |
import torch.nn as nn
|
| 3 |
+
from transformers import PreTrainedModel, AutoModel, AutoConfig
|
| 4 |
+
from configuration_comparer import ComparerConfig
|
| 5 |
|
| 6 |
class ComparerModel(PreTrainedModel):
|
| 7 |
config_class = ComparerConfig
|
|
|
|
| 9 |
def __init__(self, config):
|
| 10 |
super().__init__(config)
|
| 11 |
|
| 12 |
+
if not config.base_config_dict:
|
| 13 |
+
raise ValueError("base_config_dict is missing from the configuration! Cannot build base model.")
|
| 14 |
+
|
| 15 |
+
base_dict = config.base_config_dict.copy()
|
| 16 |
+
|
| 17 |
+
model_type = base_dict.pop("model_type", "deberta-v2")
|
| 18 |
+
|
| 19 |
+
base_config = AutoConfig.for_model(model_type, **base_dict)
|
| 20 |
+
|
| 21 |
+
self.main_layer = AutoModel.from_config(base_config)
|
| 22 |
|
| 23 |
self.linear_tokens = nn.Linear(config.hidden_size, 100)
|
| 24 |
self.linear_tokens2 = nn.Linear(200, 50)
|
tokenizer_config.json
CHANGED
|
@@ -10,7 +10,7 @@
|
|
| 10 |
"[CLS]",
|
| 11 |
"[SEP]"
|
| 12 |
],
|
| 13 |
-
"is_local":
|
| 14 |
"mask_token": "[MASK]",
|
| 15 |
"model_max_length": 1000000000000000019884624838656,
|
| 16 |
"pad_token": "[PAD]",
|
|
|
|
| 10 |
"[CLS]",
|
| 11 |
"[SEP]"
|
| 12 |
],
|
| 13 |
+
"is_local": true,
|
| 14 |
"mask_token": "[MASK]",
|
| 15 |
"model_max_length": 1000000000000000019884624838656,
|
| 16 |
"pad_token": "[PAD]",
|