diff --git a/src/evolutionary_keras/models.py b/src/evolutionary_keras/models.py index 1754cf5..e6c3f3d 100644 --- a/src/evolutionary_keras/models.py +++ b/src/evolutionary_keras/models.py @@ -4,7 +4,7 @@ from tensorflow.keras.callbacks import History from tensorflow.keras.models import Model -from tensorflow.python.keras import callbacks as callbacks_module +from tensorflow.keras import callbacks as callbacks_module import evolutionary_keras.optimizers as Evolutionary_Optimizers diff --git a/src/evolutionary_keras/optimizers.py b/src/evolutionary_keras/optimizers.py index c02a8d1..97e6c54 100644 --- a/src/evolutionary_keras/optimizers.py +++ b/src/evolutionary_keras/optimizers.py @@ -335,8 +335,7 @@ def flatten(self): # The first values of 'self.length_flat_layer' is set to 0 which is helpful in determining # the range of weights in the function 'undo_flatten'. flattened_weights = [] - self.length_flat_layer = [] - self.length_flat_layer.append(0) + self.length_flat_layer = [0] for weight in self.model.trainable_weights: a = np.reshape(compatibility_numpy(weight), [-1]) flattened_weights.append(a) @@ -353,10 +352,9 @@ def undo_flatten(self, flattened_weights): """ new_weights = [] for i, layer_shape in enumerate(self.shape): - flat_layer = flattened_weights[ - self.length_flat_layer[i] : self.length_flat_layer[i] - + self.length_flat_layer[i + 1] - ] + start_index = sum(self.length_flat_layer[: i + 1]) + end_index = start_index + self.length_flat_layer[i + 1] + flat_layer = flattened_weights[start_index:end_index] new_weights.append(np.reshape(flat_layer, layer_shape)) ordered_names = [weight.name for layer in self.model.layers for weight in layer.weights]