File size: 1,698 Bytes
9fcbfc5 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 | import keras
from keras.layers import Dense, BatchNormalization
from keras import regularizers
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, EarlyStopping
import pandas as pd
import numpy as np
# Model parameters:
activation = 'relu'
final_activation = 'sigmoid'
loss = 'binary_crossentropy'
batchsize = 200
epochs = 100
lr = 0.00005
# Model architecture:
model = keras.Sequential()
model.add(
Dense(units=300, input_dim=x_train.shape[1], activation=activation, kernel_regularizer=regularizers.L1(0.001)))
model.add(BatchNormalization())
model.add(Dense(units=102, activation=activation, kernel_regularizer=regularizers.L1(0.001)))
model.add(BatchNormalization())
model.add(Dense(units=12, activation=activation, kernel_regularizer=regularizers.L1(0.001)))
model.add(BatchNormalization())
model.add(Dense(units=6, activation=activation, kernel_regularizer=regularizers.L1(0.001)))
model.add(BatchNormalization())
model.add(Dense(units=1, activation=final_activation))
model.compile(optimizer=Adam(learning_rate=lr),
loss=loss,
metrics=['accuracy', 'AUC'])
model.summary()
# Model checkpoints:
saveModel = ModelCheckpoint('best_model.hdf5',
save_best_only=True,
monitor='val_loss',
mode='min')
# Model training:
history = model.fit(
x_train,
y_train,
batch_size=batchsize,
callbacks=[EarlyStopping(verbose=True, patience=10, monitor='val_loss'), saveModel],
epochs=epochs,
validation_data=(
x_val,
y_val)) |