File size: 1,515 Bytes
f8ab83c | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 | from transformers import PretrainedConfig
class HareConfig(PretrainedConfig):
model_type = "hare"
def __init__(
self,
hidden_size=768,
num_attention_heads=12,
num_hidden_layers=22,
intermediate_size=1152,
hidden_activation="gelu",
max_position_embeddings=8192,
vocab_size=50368,
pad_token_id=50283,
bos_token_id=50281,
eos_token_id=50282,
cls_token_id=50281,
sep_token_id=50282,
global_attn_every_n_layers=3,
local_attention=128,
replaced_layers=None,
surgery_variant="conservative",
**kwargs,
):
super().__init__(
pad_token_id=pad_token_id,
bos_token_id=bos_token_id,
eos_token_id=eos_token_id,
**kwargs,
)
self.hidden_size = hidden_size
self.num_attention_heads = num_attention_heads
self.num_hidden_layers = num_hidden_layers
self.intermediate_size = intermediate_size
self.hidden_activation = hidden_activation
self.max_position_embeddings = max_position_embeddings
self.vocab_size = vocab_size
self.cls_token_id = cls_token_id
self.sep_token_id = sep_token_id
self.global_attn_every_n_layers = global_attn_every_n_layers
self.local_attention = local_attention
self.replaced_layers = replaced_layers
self.surgery_variant = surgery_variant
|