See also this cool Neural Network Evolution Playground with Backprop NEAT.

from functools import reduce
from operator import add
import random

from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Conv2D, Activation, MaxPooling2D, Flatten
from keras.utils.np_utils import to_categorical
from keras.optimizers import Adam
from keras.callbacks import EarlyStopping, ModelCheckpoint

n_classes = 10
batch_size = 128
n_epochs = 1000
input_shape = (28, 28, 1)

(X_train, y_train), (X_val, y_val) = mnist.load_data()
X_train = X_train.reshape(60000, 28, 28, 1)
X_val = X_val.reshape(10000, 28, 28, 1)
X_train = X_train.astype('float32')
X_val = X_val.astype('float32')
X_val /= 255.
X_val /= 255.

y_train = to_categorical(y_train, n_classes)
y_val = to_categorical(y_val, n_classes)

Downloading data from https://s3.amazonaws.com/img-datasets/mnist.npz
11493376/11490434 [==============================] - 6s 1us/step

def create_model(parameters, n_classes, input_shape):
print(parameters)
dropout = parameters['dropout']
learning_rate = parameters['learning_rate']
hidden_inputs = parameters['hidden_inputs']

model = Sequential()
model.add(Conv2D(32, (3, 3), padding='same', input_shape=input_shape))
model.add(Activation('relu'))
model.add(Conv2D(32, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(dropout))

model.add(Conv2D(64, (3, 3), padding='same'))
model.add(Activation('relu'))
model.add(Conv2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(dropout))

model.add(Flatten())
model.add(Dense(hidden_inputs))
model.add(Activation('relu'))
model.add(Dropout(dropout))
model.add(Dense(n_classes))
model.add(Activation('softmax'))
opt = Adam(learning_rate)

model.compile(loss='categorical_crossentropy',
optimizer=opt, metrics=['accuracy'])

return model

class Network():
def __init__(self, parameter_space=None):
self.accuracy = 0.
self.parameter_space = parameter_space
self.network_parameters = {}

def set_random_parameters(self):
for parameter in self.parameter_space:
self.network_parameters[parameter] = random.choice(self.parameter_space[parameter])

def create_network(self, network):
self.network_parameters = network

def train(self):
callbacks = [EarlyStopping(monitor='val_acc', patience=5)]
model = create_model(self.network_parameters, n_classes, input_shape)
history = model.fit(X_train, y_train, batch_size=batch_size, epochs=n_epochs, verbose=1, validation_data=(X_val, y_val), callbacks=callbacks)
self.accuracy = max(history.history['val_acc'])

class Genetic_Algorithm():
def __init__(self, parameter_space, retain=0.3, random_select=0.1, mutate_prob=0.25):
self.mutate_prob = mutate_prob
self.random_select = random_select
self.retain = retain
self.parameter_space = parameter_space

def create_population(self, count):
population = []
for _ in range(0, count):
network = Network(self.parameter_space)
network.set_random_parameters()
population.append(network)
return population

def get_fitness(network):
return network.accuracy

def get_grade(self, population):
total = reduce(add, (self.fitness(network)
for network in population))
return float(total) / len(population)

def breed(self, mother, father):
children = []
for _ in range(2):
child = {}
for param in self.parameter_space:
child[param] = random.choice(
[mother.network[param],
father.network[param]]
)
network = Network(self.nn_param_choices)
network.create_set(child)
if self.mutate_chance > random.random():
network = self.mutate(network)
children.append(network)
return children

def mutate(self, network):
mutation = random.choice(list(self.parameter_space.keys()))
network.network[mutation] = random.choice(self.parameter_space[mutation])
return network

def evolve(self, pop):
graded = [(self.fitness(network),
network) for network in pop]
graded = [x[1] for x in sorted(graded,
key=lambda x: x[0], reverse=True)]
retain_length = int(len(graded)*self.retain)

parents = graded[:retain_length]

for individual in graded[retain_length:]:
if self.random_select > random.random():
parents.append(individual)

parents_length = len(parents)
desired_length = len(pop) - parents_length
children = []

while len(children) < desired_length:

male = random.randint(0,
parents_length-1)
female = random.randint(0,
parents_length-1)

if male != female:
male = parents[male]
female = parents[female]

children_new = self.breed(male,
female)

for child_new in children_new:
if len(children) < desired_length:
children.append(child_new)

parents.extend(children)

return parents

def get_population_accuracy(population):
total_accuracy = 0
for network in population:
total_accuracy += network.get_accuracy

return total_accuracy / len(population)

n_generations = 10
population_size = 20

parameter_space = {
'dropout': [0.25, 0.5, 0.75],
'hidden_inputs': [256, 512, 1024],
'learning_rate': [0.1, 0.01, 0.001, 0.0001]
}

GA = Genetic_Algorithm(parameter_space)
population = GA.create_population(population_size)

for i in range(n_generations):
print('Generation {}'.format(i))

for network in population:
network.train()

average_accuracy = get_population_accuracy(population)
print('Average accuracy: {:.2f}'.format(average_accuracy))

# Evolve
if i < n_generations - 1: s = GA.evolve(networks) Generation 0 {'dropout': 0.5, 'hidden_inputs': 256, 'learning_rate': 0.1} Train on 60000 samples, validate on 10000 samples Epoch 1/1000 8704/60000 [===>..........................] - ETA: 1:43 - loss: 14.5456 - acc: 0.0971