Skip to content

GitHub actions #244

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 5 commits into from
Nov 6, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pygad/pygad.py
Original file line number Diff line number Diff line change
Expand Up @@ -880,7 +880,7 @@ def __init__(self,
self.select_parents = self.rank_selection
else:
self.valid_parameters = False
raise TypeError(f"Undefined parent selection type: {parent_selection_type}. \nThe assigned value to the 'parent_selection_type' parameter does not refer to one of the supported parent selection techniques which are: \n-sss (for steady state selection)\n-rws (for roulette wheel selection)\n-sus (for stochastic universal selection)\n-rank (for rank selection)\n-random (for random selection)\n-tournament (for tournament selection).\n")
raise TypeError(f"Undefined parent selection type: {parent_selection_type}. \nThe assigned value to the 'parent_selection_type' parameter does not refer to one of the supported parent selection techniques which are: \n-sss (steady state selection)\n-rws (roulette wheel selection)\n-sus (stochastic universal selection)\n-rank (rank selection)\n-random (random selection)\n-tournament (tournament selection)\n-tournament_nsga2: (Tournament selection for NSGA-II)\n-nsga2: (NSGA-II parent selection).\n")

# For tournament selection, validate the K value.
if (parent_selection_type == "tournament"):
Expand Down
80 changes: 71 additions & 9 deletions pygad/utils/mutation.py
Original file line number Diff line number Diff line change
Expand Up @@ -578,10 +578,31 @@ def adaptive_mutation_by_space(self, offspring):
# Adaptive mutation changes one or more genes in each offspring randomly.
# The number of genes to mutate depends on the solution's fitness value.
for offspring_idx in range(offspring.shape[0]):
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_num_genes = self.mutation_num_genes[0]
## TODO Make edits to work with multi-objective optimization.
# Compare the fitness of each offspring to the average fitness of each objective function.
fitness_comparison = offspring_fitness[offspring_idx] < average_fitness

# Check if the problem is single or multi-objective optimization.
if type(fitness_comparison) in [bool, numpy.bool_]:
# Single-objective optimization problem.
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_num_genes = self.mutation_num_genes[0]
else:
adaptive_mutation_num_genes = self.mutation_num_genes[1]
else:
adaptive_mutation_num_genes = self.mutation_num_genes[1]
# Multi-objective optimization problem.

# Get the sum of the pool array (result of comparison).
# True is considered 1 and False is 0.
fitness_comparison_sum = sum(fitness_comparison)
# Check if more than or equal to 50% of the objectives have fitness greater than the average.
# If True, then use the first percentage.
# If False, use the second percentage.
if fitness_comparison_sum >= len(fitness_comparison)/2:
adaptive_mutation_num_genes = self.mutation_num_genes[0]
else:
adaptive_mutation_num_genes = self.mutation_num_genes[1]

mutation_indices = numpy.array(random.sample(range(0, self.num_genes), adaptive_mutation_num_genes))
for gene_idx in mutation_indices:

Expand Down Expand Up @@ -703,6 +724,7 @@ def adaptive_mutation_randomly(self, offspring):
## TODO Make edits to work with multi-objective optimization.
# Compare the fitness of each offspring to the average fitness of each objective function.
fitness_comparison = offspring_fitness[offspring_idx] < average_fitness

# Check if the problem is single or multi-objective optimization.
if type(fitness_comparison) in [bool, numpy.bool_]:
# Single-objective optimization problem.
Expand Down Expand Up @@ -791,10 +813,30 @@ def adaptive_mutation_probs_by_space(self, offspring):
# Adaptive random mutation changes one or more genes in each offspring randomly.
# The probability of mutating a gene depends on the solution's fitness value.
for offspring_idx in range(offspring.shape[0]):
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_probability = self.mutation_probability[0]
## TODO Make edits to work with multi-objective optimization.
# Compare the fitness of each offspring to the average fitness of each objective function.
fitness_comparison = offspring_fitness[offspring_idx] < average_fitness

# Check if the problem is single or multi-objective optimization.
if type(fitness_comparison) in [bool, numpy.bool_]:
# Single-objective optimization problem.
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_probability = self.mutation_probability[0]
else:
adaptive_mutation_probability = self.mutation_probability[1]
else:
adaptive_mutation_probability = self.mutation_probability[1]
# Multi-objective optimization problem.

# Get the sum of the pool array (result of comparison).
# True is considered 1 and False is 0.
fitness_comparison_sum = sum(fitness_comparison)
# Check if more than or equal to 50% of the objectives have fitness greater than the average.
# If True, then use the first percentage.
# If False, use the second percentage.
if fitness_comparison_sum >= len(fitness_comparison)/2:
adaptive_mutation_probability = self.mutation_probability[0]
else:
adaptive_mutation_probability = self.mutation_probability[1]

probs = numpy.random.random(size=offspring.shape[1])
for gene_idx in range(offspring.shape[1]):
Expand Down Expand Up @@ -914,10 +956,30 @@ def adaptive_mutation_probs_randomly(self, offspring):
# Adaptive random mutation changes one or more genes in each offspring randomly.
# The probability of mutating a gene depends on the solution's fitness value.
for offspring_idx in range(offspring.shape[0]):
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_probability = self.mutation_probability[0]
## TODO Make edits to work with multi-objective optimization.
# Compare the fitness of each offspring to the average fitness of each objective function.
fitness_comparison = offspring_fitness[offspring_idx] < average_fitness

# Check if the problem is single or multi-objective optimization.
if type(fitness_comparison) in [bool, numpy.bool_]:
# Single-objective optimization problem.
if offspring_fitness[offspring_idx] < average_fitness:
adaptive_mutation_probability = self.mutation_probability[0]
else:
adaptive_mutation_probability = self.mutation_probability[1]
else:
adaptive_mutation_probability = self.mutation_probability[1]
# Multi-objective optimization problem.

# Get the sum of the pool array (result of comparison).
# True is considered 1 and False is 0.
fitness_comparison_sum = sum(fitness_comparison)
# Check if more than or equal to 50% of the objectives have fitness greater than the average.
# If True, then use the first percentage.
# If False, use the second percentage.
if fitness_comparison_sum >= len(fitness_comparison)/2:
adaptive_mutation_probability = self.mutation_probability[0]
else:
adaptive_mutation_probability = self.mutation_probability[1]

probs = numpy.random.random(size=offspring.shape[1])
for gene_idx in range(offspring.shape[1]):
Expand Down
Loading