File size: 2,710 Bytes
c67d7da
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
# -*- coding: utf-8 -*-
from typing import Optional
from transformers.configuration_utils import PretrainedConfig

class DynamicAlibiConfig(PretrainedConfig):
    model_type = 'dynamic_alibi'
    keys_to_ignore_at_inference = ['past_key_values']
    
    def __init__(
        self,
        vocab_size: int = 32000,
        hidden_size: int = 2048,
        hidden_ratio: Optional[int] = 4,
        intermediate_size: Optional[int] = None,
        num_hidden_layers: int = 24,
        num_heads: int = 32,
        num_kv_heads: int = None,
        hidden_act: str = "swish",
        window_size: Optional[int] = None,
        max_position_embeddings: int = 2048,
        initializer_range: float = 0.02,
        elementwise_affine: Optional[bool] = True,
        norm_eps: float = 1e-6,
        use_cache: bool = True,
        pad_token_id: int = None,
        bos_token_id: int = 1,
        eos_token_id: int = 2,
        tie_word_embeddings: bool = False,
        attention_bias: bool = False,
        fuse_norm: bool = True,
        fuse_cross_entropy: bool = True,
        rope_base: float = 500000.0,
        use_rope: bool = False,
        use_alibi: bool = True,
        # 🆕 动态ALiBi参数
        use_dynamic_alibi: bool = False,
        alibi_num_epochs: int = 10,
        alibi_initial_slope: float = 1.0,
        alibi_decay_rate: float = 0.6,
        **kwargs,
    ):
        self.vocab_size = vocab_size
        self.hidden_size = hidden_size
        self.hidden_ratio = hidden_ratio
        self.intermediate_size = intermediate_size
        self.num_hidden_layers = num_hidden_layers
        self.num_heads = num_heads
        self.num_kv_heads = num_kv_heads
        self.window_size = window_size
        self.max_position_embeddings = max_position_embeddings
        self.hidden_act = hidden_act
        self.initializer_range = initializer_range
        self.elementwise_affine = elementwise_affine
        self.norm_eps = norm_eps
        self.use_cache = use_cache
        self.attention_bias = attention_bias
        self.fuse_cross_entropy = fuse_cross_entropy
        self.fuse_norm = fuse_norm
        self.rope_base = rope_base
        self.use_rope = use_rope
        self.use_alibi = use_alibi
        
        # 🆕 动态ALiBi配置
        self.use_dynamic_alibi = use_dynamic_alibi
        self.alibi_num_epochs = alibi_num_epochs
        self.alibi_initial_slope = alibi_initial_slope
        self.alibi_decay_rate = alibi_decay_rate
        
        super().__init__(
            pad_token_id=pad_token_id,
            bos_token_id=bos_token_id,
            eos_token_id=eos_token_id,
            tie_word_embeddings=tie_word_embeddings,
            **kwargs,
        )