rocky-embed / configuration_rocky.py
pranavupadhyaya52's picture
Upload config
b749cfb verified
raw
history blame contribute delete
722 Bytes
from transformers import PretrainedConfig
class RockyConfig(PretrainedConfig):
model_type = "rocky"
def __init__(
self,
vocab_size=30522,
dim=768,
depth=12,
heads=12,
ffn_dim=2048,
proj_dim=1024,
max_seq_len=1024,
**kwargs
):
self.vocab_size = vocab_size
self.dim = dim
self.depth = depth
self.heads = heads
self.ffn_dim = ffn_dim
self.proj_dim = proj_dim
self.max_seq_len = max_seq_len
super().__init__(**kwargs)
self.auto_map = {
"AutoConfig": "configuration_rocky.RockyConfig",
"AutoModel": "modeling_rocky.RockyForEmbeddings"
}