File size: 314 Bytes
b431993
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "vocab_size": 26,
  "embed_dim": 128,
  "hidden_dim": 256,
  "lstm_layers": 2,
  "bidirectional": true,
  "seq_embed_dim": 256,
  "combined_dim": 1024,
  "predictor_hidden": [512, 256, 128, 64],
  "dropout": 0.1,
  "max_length": 600,
  "learning_rate": 0.001,
  "batch_size": 64,
  "attention_pooling": true
}