File size: 1,028 Bytes
296800d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
from transformers import PretrainedConfig

class EmCoderConfig(PretrainedConfig):
    model_type = "emcoder"

    def __init__(
        self,
        vocab_size=50265,
        max_seq_len=512,
        d_model=768,
        n_head=12,
        n_layers=6,
        d_ffn=3072,
        dropout=0.15,
        num_labels=28,
        base_encoder_path="",
        id2label=None,
        label2id=None,
        **kwargs
    ):
        # id2label konverze na int klíče (kvůli JSON standardu)
        if id2label is not None:
            id2label = {int(k): v for k, v in id2label.items()}
            
        super().__init__(
            id2label=id2label,
            label2id=label2id,
            **kwargs
        )
        self.vocab_size = vocab_size
        self.max_seq_len = max_seq_len
        self.d_model = d_model
        self.n_head = n_head
        self.n_layers = n_layers
        self.d_ffn = d_ffn
        self.dropout = dropout
        self.num_labels = num_labels
        self.base_encoder_path = base_encoder_path