Yuchan
commited on
Update AlphaS2S.py
Browse files- AlphaS2S.py +4 -4
AlphaS2S.py
CHANGED
|
@@ -201,10 +201,10 @@ class Transformer(tf.keras.Model):
|
|
| 201 |
super().__init__()
|
| 202 |
self.max_len = max_len
|
| 203 |
self.d_model = d_model
|
| 204 |
-
self.enc_embedding = layers.Embedding(input_vocab_size,
|
| 205 |
-
self.enc_pos_embedding = layers.Embedding(max_len,
|
| 206 |
-
self.dec_embedding = layers.Embedding(target_vocab_size,
|
| 207 |
-
self.dec_pos_embedding = layers.Embedding(max_len,
|
| 208 |
self.enc_layers = [EncoderBlock(d_model, num_heads, dff, dropout) for _ in range(num_layers)]
|
| 209 |
self.dec_layers = [DecoderBlock(d_model, num_heads, dff, dropout) for _ in range(num_layers)]
|
| 210 |
self.final_layer = layers.Dense(target_vocab_size, use_bias=False)
|
|
|
|
| 201 |
super().__init__()
|
| 202 |
self.max_len = max_len
|
| 203 |
self.d_model = d_model
|
| 204 |
+
self.enc_embedding = layers.Embedding(input_vocab_size, 256)
|
| 205 |
+
self.enc_pos_embedding = layers.Embedding(max_len, 256)
|
| 206 |
+
self.dec_embedding = layers.Embedding(target_vocab_size, 256)
|
| 207 |
+
self.dec_pos_embedding = layers.Embedding(max_len, 256)
|
| 208 |
self.enc_layers = [EncoderBlock(d_model, num_heads, dff, dropout) for _ in range(num_layers)]
|
| 209 |
self.dec_layers = [DecoderBlock(d_model, num_heads, dff, dropout) for _ in range(num_layers)]
|
| 210 |
self.final_layer = layers.Dense(target_vocab_size, use_bias=False)
|