Skip to content

Commit

Permalink
population initializer refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
PasaOpasen committed Apr 13, 2024
1 parent c4a9bb3 commit a601fe1
Show file tree
Hide file tree
Showing 8 changed files with 111 additions and 102 deletions.
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -341,7 +341,7 @@ from geneticalgorithm2 import Generation, AlgorithmParams # classes for comfort

from geneticalgorithm2 import Crossover, Mutations, Selection # classes for specific mutation and crossover behavior

from geneticalgorithm2 import Population_initializer # for creating better start population
from geneticalgorithm2 import get_population_initializer # for creating better start population

from geneticalgorithm2 import np_lru_cache # for cache function (if u want)

Expand Down Expand Up @@ -1125,7 +1125,7 @@ import matplotlib.pyplot as plt
from DiscreteHillClimbing import Hill_Climbing_descent

from geneticalgorithm2 import GeneticAlgorithm2 as ga
from geneticalgorithm2 import Population_initializer
from geneticalgorithm2 import get_population_initializer


def f(arr):
Expand All @@ -1152,7 +1152,7 @@ model = ga(function=f, dimension=varbound.shape[0],

for time in ('before_select', 'after_select', 'never'):
model.run(no_plot=True,
population_initializer=Population_initializer(
population_initializer=get_population_initializer(
select_best_of=3,
local_optimization_step=time,
local_optimizer=my_local_optimizer
Expand Down
5 changes: 4 additions & 1 deletion geneticalgorithm2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,10 @@
from .crossovers import Crossover
from .selections import Selection

from .initializer import Population_initializer
from .population_initializer import get_population_initializer

# to keep backward compatibility
Population_initializer: TypeAlias = get_population_initializer

from .callbacks import Callbacks, Actions, ActionConditions, MiddleCallbacks

Expand Down
4 changes: 2 additions & 2 deletions geneticalgorithm2/geneticalgorithm2.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@

from .classes import AlgorithmParams, Generation, MiddleCallbackData, GAResult, GenerationConvertible

from .initializer import Population_initializer
from .population_initializer import get_population_initializer
from .utils.plotting import plot_pop_scores, plot_several_lines

from .utils.funcs import can_be_prob, is_numpy, is_current_gen_number, fast_min, random_indexes_pair
Expand Down Expand Up @@ -437,7 +437,7 @@ def run(

population_initializer: Tuple[
int, Callable[[array2D, array1D], Tuple[array2D, array1D]]
] = Population_initializer(select_best_of=1, local_optimization_step='never', local_optimizer=None),
] = get_population_initializer(select_best_of=1, local_optimization_step='never', local_optimizer=None),

stop_when_reached: Optional[float] = None,
callbacks: Optional[Sequence[CallbackFunc]] = None,
Expand Down
90 changes: 0 additions & 90 deletions geneticalgorithm2/initializer.py

This file was deleted.

96 changes: 96 additions & 0 deletions geneticalgorithm2/population_initializer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,96 @@

from typing import Callable, Optional, Tuple, Literal

import numpy as np

from .utils.aliases import TypeAlias, array1D, array2D


LOCAL_OPTIMIZATION_STEP_CASE: TypeAlias = Literal['before_select', 'after_select', 'never']
"""
When the local optimization (candidates enhancing) must be performed:
* 'never' -- don't do local optimization
* 'before_select' -- before selection best N objects
(example: do local optimization for 5N objects and select N best results)
* 'after_select' -- do local optimization on best selected N objects
"""


def get_population_initializer(
select_best_of: int = 4,
local_optimization_step: LOCAL_OPTIMIZATION_STEP_CASE = 'never',
local_optimizer: Optional[
Callable[
[array1D, float],
Tuple[array1D, float]
]
] = None
) -> Tuple[int, Callable[[array2D, array1D], Tuple[array2D, array1D]]]:
"""
Args:
select_best_of: determines population size to select 1/select_best_of best part of start population.
For example, for select_best_of = 4 and population_size = N there will be selected N best objects
from 5N generated objects (if start_generation=None dictionary).
If start_generation is not None dictionary, there will be selected best (start_generation) / N objects
local_optimization_step: when to perform local optimization
local_optimizer: the local optimization function (object array, its score) -> (modified array, its score)
Returns:
select_best_of, function which will perform the selection and local optimization
"""

assert select_best_of > 0 and isinstance(select_best_of, int), (select_best_of, type(select_best_of))

assert local_optimization_step in LOCAL_OPTIMIZATION_STEP_CASE.__args__, (
local_optimization_step, LOCAL_OPTIMIZATION_STEP_CASE.__args__
)

if local_optimizer is None and local_optimization_step in LOCAL_OPTIMIZATION_STEP_CASE.__args__[:2]:
raise Exception(
f"for local_optimization_step from {LOCAL_OPTIMIZATION_STEP_CASE.__args__[:2]} "
f"local_optimizer function mustn't be None"
)

def select_best(population: array2D, scores: array1D) -> Tuple[array2D, array1D]:
args = np.argsort(scores)
args = args[:round(args.size/select_best_of)]
return population[args], scores[args]

def local_opt(population: array2D, scores: array1D):
_pop, _score = zip(
*[
local_optimizer(population[i], scores[i]) for i in range(scores.size)
]
)
return np.array(_pop), np.array(_score)

#def Create_population(func, start_generation, expected_size, #variable_boundaries):
#
# if not (start_generation['variables'] is None):
# pop = start_generation['variables']
# scores = start_generation['scores']
# if scores is None:
# scores = np.array([func(pop[i, :]) for i in range(pop.shape[0])])
# return pop, scores

def process_population(population: array2D, scores: array1D):
if local_optimization_step == 'before_select':
pop, s = local_opt(population, scores)
return select_best(pop, s)

if local_optimization_step == 'after_select':
pop, s = select_best(population, scores)
return local_opt(pop, s)

#if local_optimization_step == 'never':
return select_best(population, scores)

return select_best_of, process_population








4 changes: 2 additions & 2 deletions tests/best_of_N.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
import matplotlib.pyplot as plt

from geneticalgorithm2 import GeneticAlgorithm2 as ga
from geneticalgorithm2 import Population_initializer
from geneticalgorithm2 import get_population_initializer


def f(X):
Expand All @@ -39,7 +39,7 @@ def f(X):

for _ in range(40):
model.run(no_plot = True,
population_initializer=Population_initializer(select_best_of = best_of)
population_initializer=get_population_initializer(select_best_of = best_of)
)
average_report += np.array(model.report)

Expand Down
4 changes: 2 additions & 2 deletions tests/best_of_N_with_opp.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
from OptimizationTestFunctions import Ackley

from geneticalgorithm2 import GeneticAlgorithm2 as ga
from geneticalgorithm2 import Population_initializer
from geneticalgorithm2 import get_population_initializer


dim = 15
Expand Down Expand Up @@ -62,7 +62,7 @@

for _ in range(40):
model.run(no_plot = True,
population_initializer=Population_initializer(select_best_of = 3),
population_initializer=get_population_initializer(select_best_of = 3),
init_oppositors=opp
)
average_report += np.array(model.report)
Expand Down
4 changes: 2 additions & 2 deletions tests/init_local_opt.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from DiscreteHillClimbing import Hill_Climbing_descent

from geneticalgorithm2 import GeneticAlgorithm2 as ga
from geneticalgorithm2 import Population_initializer
from geneticalgorithm2 import get_population_initializer


def f(arr):
Expand Down Expand Up @@ -45,7 +45,7 @@ def f(arr):


model.run(no_plot = True,
population_initializer = Population_initializer(
population_initializer = get_population_initializer(
select_best_of = 3,
local_optimization_step = time,
local_optimizer = my_local_optimizer
Expand Down

0 comments on commit a601fe1

Please sign in to comment.