{ "batch_size": 64, "encode": "one-hot", "epochs": 100, "early_stop": 20, "lr": 0.001, "convolution_layers": { "n_layers": 4, "filters": [1024, 512, 256, 128], "kernel_sizes": [8, 16, 32, 64] }, "transformer_layers": { "n_layers": 0, "attn_key_dim": [16, 16, 16], "attn_heads": [2048, 2048, 2048] }, "n_dense_layer": 1, "dense_neurons1": 64, "dropout_conv": "yes", "dropout_prob": 0.4, "pad": "same" }