File size: 1,065 Bytes
91eed61 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 | from transformers import PretrainedConfig
class HistaugConfig(PretrainedConfig):
model_type = "histaug"
def __init__(
self,
input_dim: int = 512,
depth: int = 6,
num_heads: int = 8,
mlp_ratio: float = 4.0,
use_transform_pos_embeddings: bool = True,
positional_encoding_type: str = "learnable",
final_activation: str = "Identity",
embedding_type: str = "linear",
chunk_size: int = 16,
transforms: dict = None,
**kwargs,
):
# your model hyperparameters
self.input_dim = input_dim
self.depth = depth
self.num_heads = num_heads
self.mlp_ratio = mlp_ratio
self.use_transform_pos_embeddings = use_transform_pos_embeddings
self.positional_encoding_type = positional_encoding_type
self.final_activation = final_activation
self.embedding_type = embedding_type
self.chunk_size = chunk_size
self.transforms = transforms or {"parameters": {}}
super().__init__(**kwargs) |