xddd-processed / modeling_custom.py
jnjj's picture
Add modeling_custom.py
d6f8ec4 verified
from transformers.models.llama.modeling_llama import LlamaForCausalLM, LlamaConfig
import torch
from transformers.utils import logging
logger = logging.get_logger(__name__)
class CustomLlamaForCausalLM(LlamaForCausalLM):
def __init__(self, config: LlamaConfig):
super().__init__(config)
logger.info("CustomLlamaForCausalLM initialized with conceptual features documented in config.")
if getattr(config, 'conceptual_features', {}).get('grouping_logic'):
logger.info(f"Conceptual grouping logic enabled with size {config.conceptual_features.get('group_size', 'N/A')}")
if getattr(config, 'conceptual_features', {}).get('long_context_optimization'):
logger.info("Conceptual long context optimization hint detected.")