import keras from keras.layers import Dense, BatchNormalization from keras import regularizers from keras.optimizers import Adam from keras.callbacks import ModelCheckpoint, EarlyStopping import pandas as pd import numpy as np # Model parameters: activation = 'relu' final_activation = 'sigmoid' loss = 'binary_crossentropy' batchsize = 200 epochs = 100 lr = 0.000003 class_weight = {0: 0.10, 1: 1.0} # Model architecture: model = keras.Sequential() model.add(Dense(units=153, input_shape=(153,), activation=activation)) model.add(Dense(units=153, activation=activation)) model.add(Dense(units=64, activation=activation)) model.add(Dense(units=64, activation=activation)) model.add(Dense(units=32, activation=activation)) model.add(Dense(units=32, activation=activation)) model.add(Dense(units=16, activation=activation)) model.add(Dense(units=16, activation=activation)) model.add(Dense(units=1, activation=final_activation)) model.compile(optimizer=Adam(learning_rate=lr), loss=loss, metrics=['accuracy', 'AUC']) model.summary() # Model checkpoints: saveModel = ModelCheckpoint('best_model.hdf5', save_best_only=True, monitor='val_loss', mode='min') # Model training: model.fit( x_train, y_train, batch_size=batchsize, callbacks=[EarlyStopping(verbose=True, patience=25, monitor='val_loss'), saveModel], epochs=epochs, validation_data=( x_val, y_val), shuffle=True, class_weight=class_weight)