dev-das commited on
Commit
b61c20e
·
verified ·
1 Parent(s): 589a4c0

Update configuration_my_gpt.py

Browse files
Files changed (1) hide show
  1. configuration_my_gpt.py +21 -10
configuration_my_gpt.py CHANGED
@@ -6,23 +6,34 @@ class MyGPTConfig(PretrainedConfig):
6
  def __init__(
7
  self,
8
  vocab_size=50257,
9
- context_length=256,
10
- emb_dim=256,
11
- n_heads=4,
12
- n_layers=12,
13
  drop_rate=0.1,
14
  qkv_bias=False,
 
 
 
15
  **kwargs
16
  ):
17
- super().__init__(**kwargs)
 
 
 
 
 
18
 
 
19
  self.vocab_size = vocab_size
20
- self.context_length = context_length
21
- self.emb_dim = emb_dim
22
- self.n_heads = n_heads
23
- self.n_layers = n_layers
 
 
24
  self.drop_rate = drop_rate
25
  self.qkv_bias = qkv_bias
 
26
  self.is_decoder = True
27
  self.is_encoder_decoder = False
28
-
 
6
  def __init__(
7
  self,
8
  vocab_size=50257,
9
+ hidden_size=256,
10
+ num_attention_heads=4,
11
+ num_hidden_layers=12,
12
+ max_position_embeddings=256,
13
  drop_rate=0.1,
14
  qkv_bias=False,
15
+ bos_token_id=50256,
16
+ eos_token_id=50256,
17
+ pad_token_id=50256,
18
  **kwargs
19
  ):
20
+ super().__init__(
21
+ bos_token_id=bos_token_id,
22
+ eos_token_id=eos_token_id,
23
+ pad_token_id=pad_token_id,
24
+ **kwargs
25
+ )
26
 
27
+ # 🔥 Standard HF field names
28
  self.vocab_size = vocab_size
29
+ self.hidden_size = hidden_size
30
+ self.num_attention_heads = num_attention_heads
31
+ self.num_hidden_layers = num_hidden_layers
32
+ self.max_position_embeddings = max_position_embeddings
33
+
34
+ # Custom fields
35
  self.drop_rate = drop_rate
36
  self.qkv_bias = qkv_bias
37
+
38
  self.is_decoder = True
39
  self.is_encoder_decoder = False