activation=tf.nn.relu,
kernel_initializer="uniform",
input_shape=(input_dim,)))
model.add(tf.keras.layers.Dense(75, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(50, activation=tf.nn.relu))
model.add(tf.keras.layers.Dense(25, activation=tf.nn.relu))
// The single output node and Sigmoid activation makes this a Logistic
// Regression.