Last active
April 20, 2022 11:22
-
-
Save romanmichaelpaolucci/5918fb1ad03813e327abbc7afed02ecf to your computer and use it in GitHub Desktop.
Neural Evolution
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
# New Type of Neural Network | |
class GeneticNeuralNetwork(Sequential): | |
# Constructor | |
def __init__(self, child_weights=None): | |
# Initialize Sequential Model Super Class | |
super().__init__() | |
# If no weights provided randomly generate them | |
if child_weights is None: | |
# Layers are created and randomly generated | |
layer1 = Dense(4, input_shape=(4,), activation='sigmoid') | |
layer2 = Dense(2, activation='sigmoid') | |
layer3 = Dense(1, activation='sigmoid') | |
# Layers are added to the model | |
self.add(layer1) | |
self.add(layer2) | |
self.add(layer3) | |
# If weights are provided set them within the layers | |
else: | |
# Set weights within the layers | |
self.add( | |
Dense( | |
4, | |
input_shape=(4,), | |
activation='sigmoid', | |
weights=[child_weights[0], np.zeros(4)]) | |
) | |
self.add( | |
Dense( | |
2, | |
activation='sigmoid', | |
weights=[child_weights[1], np.zeros(2)]) | |
) | |
self.add( | |
Dense( | |
1, | |
activation='sigmoid', | |
weights=[child_weights[2], np.zeros(1)]) | |
) | |
# Function for forward propagating a row vector of a matrix | |
def forward_propagation(self, X_train, y_train): | |
# Forward propagation | |
y_hat = self.predict(X_train.values) | |
# Compute fitness score | |
self.fitness = accuracy_score(y_train, y_hat.round()) | |
# Standard Backpropagation | |
def compile_train(self, epochs): | |
self.compile( | |
optimizer='rmsprop', | |
loss='binary_crossentropy', | |
metrics=['accuracy'] | |
) | |
self.fit(X_train.values, y_train.values, epochs=epochs) | |
# Crossover traits between two Genetic Neural Networks | |
def dynamic_crossover(nn1, nn2): | |
# Lists for respective weights | |
nn1_weights = [] | |
nn2_weights = [] | |
child_weights = [] | |
# Get all weights from all layers in the first network | |
for layer in nn1.layers: | |
nn1_weights.append(layer.get_weights()[0]) | |
# Get all weights from all layers in the second network | |
for layer in nn2.layers: | |
nn2_weights.append(layer.get_weights()[0]) | |
# Iterate through all weights from all layers for crossover | |
for i in range(0, len(nn1_weights)): | |
# Get single point to split the matrix in parents based on # of cols | |
split = random.randint(0, np.shape(nn1_weights[i])[1]-1) | |
# Iterate through after a single point and set the remaing cols to nn_2 | |
for j in range(split, np.shape(nn1_weights[i])[1]-1): | |
nn1_weights[i][:, j] = nn2_weights[i][:, j] | |
# After crossover add weights to child | |
child_weights.append(nn1_weights[i]) | |
# Add a chance for mutation | |
mutation(child_weights) | |
# Create and return child object | |
child = GeneticNeuralNetwork(child_weights) | |
return child |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Mutation method/function where is defined or from what package does it comes from?