unicosys-hypergraph / configuration_unicosys.py
drzo's picture
fix: add remote code for AutoModel.from_pretrained(trust_remote_code=True)
6db96db verified
"""Unicosys Hypergraph Knowledge Model — Configuration."""
from transformers import PretrainedConfig
class UnicosysConfig(PretrainedConfig):
"""HuggingFace-compatible config for the Unicosys knowledge model."""
model_type = "unicosys_hypergraph"
def __init__(
self,
# Graph structure
num_node_types: int = 8,
num_edge_types: int = 15,
num_subsystems: int = 6,
max_nodes: int = 250000,
# Embedding dimensions
node_embed_dim: int = 128,
text_embed_dim: int = 256,
hidden_dim: int = 256,
# Transformer text encoder
text_vocab_size: int = 32000,
text_max_length: int = 128,
text_num_heads: int = 4,
text_num_layers: int = 2,
# Graph attention
gat_num_heads: int = 4,
gat_num_layers: int = 2,
gat_dropout: float = 0.1,
# Training
negative_sample_ratio: int = 5,
margin: float = 1.0,
# Metadata
case_number: str = "2025-137857",
num_entities: int = 0,
num_evidence: int = 0,
num_cross_links: int = 0,
node_type_vocab: dict = None,
edge_type_vocab: dict = None,
subsystem_vocab: dict = None,
**kwargs,
):
super().__init__(**kwargs)
self.num_node_types = num_node_types
self.num_edge_types = num_edge_types
self.num_subsystems = num_subsystems
self.max_nodes = max_nodes
self.node_embed_dim = node_embed_dim
self.text_embed_dim = text_embed_dim
self.hidden_dim = hidden_dim
self.text_vocab_size = text_vocab_size
self.text_max_length = text_max_length
self.text_num_heads = text_num_heads
self.text_num_layers = text_num_layers
self.gat_num_heads = gat_num_heads
self.gat_num_layers = gat_num_layers
self.gat_dropout = gat_dropout
self.negative_sample_ratio = negative_sample_ratio
self.margin = margin
self.case_number = case_number
self.num_entities = num_entities
self.num_evidence = num_evidence
self.num_cross_links = num_cross_links
self.node_type_vocab = node_type_vocab or {}
self.edge_type_vocab = edge_type_vocab or {}
self.subsystem_vocab = subsystem_vocab or {}