Skip to content

Commit 0af205f

Browse files
committed
create constants for frequently used labels
1 parent 1c7ef13 commit 0af205f

File tree

3 files changed

+18
-13
lines changed

3 files changed

+18
-13
lines changed

golem/core/optimisers/opt_history_objects/opt_history.py

Lines changed: 8 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,10 @@
1818
if TYPE_CHECKING:
1919
from golem.core.optimisers.opt_history_objects.individual import Individual
2020

21+
INITIAL_ASSUMPTIONS_LABEL = 'initial_assumptions'
22+
EVOLUTION_RESULTS_LABEL = 'evolution_results'
23+
TUNING_START_LABEL = 'tuning_start'
24+
TUNING_RESULT_LABEL = 'tuning_result'
2125

2226
class OptHistory:
2327
"""
@@ -212,7 +216,7 @@ def initial_assumptions(self) -> Optional[Generation]:
212216
if not self.generations:
213217
return None
214218
for gen in self.generations:
215-
if gen.label == 'initial_assumptions':
219+
if gen.label == INITIAL_ASSUMPTIONS_LABEL:
216220
return gen
217221

218222
@property
@@ -224,23 +228,23 @@ def evolution_results(self) -> Optional[Generation]:
224228
if not self.generations:
225229
return None
226230
for gen in reversed(self.generations):
227-
if gen.label == 'evolution_results':
231+
if gen.label == EVOLUTION_RESULTS_LABEL:
228232
return gen
229233

230234
@property
231235
def tuning_start(self) -> Optional[Generation]:
232236
if not self.generations:
233237
return None
234238
for gen in reversed(self.generations):
235-
if gen.label == 'tuning_start':
239+
if gen.label == TUNING_START_LABEL:
236240
return gen
237241

238242
@property
239243
def tuning_result(self) -> Optional[Generation]:
240244
if not self.generations:
241245
return None
242246
for gen in reversed(self.generations):
243-
if gen.label == 'tuning_result':
247+
if gen.label == TUNING_RESULT_LABEL:
244248
return gen
245249

246250
@property

golem/core/optimisers/random/random_search.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,8 +9,9 @@
99
from golem.core.optimisers.graph import OptGraph
1010
from golem.core.optimisers.objective import Objective, ObjectiveFunction
1111
from golem.core.optimisers.opt_history_objects.individual import Individual
12+
from golem.core.optimisers.opt_history_objects.opt_history import EVOLUTION_RESULTS_LABEL, INITIAL_ASSUMPTIONS_LABEL
1213
from golem.core.optimisers.optimization_parameters import GraphRequirements
13-
from golem.core.optimisers.optimizer import GraphOptimizer, GraphGenerationParams
14+
from golem.core.optimisers.optimizer import GraphGenerationParams, GraphOptimizer
1415
from golem.core.optimisers.timer import OptimisationTimer
1516
from golem.core.utilities.grouped_condition import GroupedCondition
1617

@@ -34,7 +35,7 @@ def __init__(self,
3435
'Optimisation stopped: Time limit is reached'
3536
).add_condition(
3637
lambda: requirements.num_of_generations is not None and
37-
self.current_iteration_num >= requirements.num_of_generations,
38+
self.current_iteration_num >= requirements.num_of_generations,
3839
'Optimisation stopped: Max number of iterations reached')
3940

4041
def optimise(self, objective: ObjectiveFunction) -> Sequence[OptGraph]:
@@ -46,14 +47,14 @@ def optimise(self, objective: ObjectiveFunction) -> Sequence[OptGraph]:
4647

4748
with self.timer, self._progressbar as pbar:
4849
self.best_individual = self._eval_initial_individual(evaluator)
49-
self._update_best_individual(self.best_individual, 'initial_assumptions')
50+
self._update_best_individual(self.best_individual, INITIAL_ASSUMPTIONS_LABEL)
5051
while not self.stop_optimization():
5152
new_individual = self._generate_new_individual()
5253
evaluator([new_individual])
5354
self.current_iteration_num += 1
5455
self._update_best_individual(new_individual)
5556
pbar.update()
56-
self._update_best_individual(self.best_individual, 'evolution_results')
57+
self._update_best_individual(self.best_individual, EVOLUTION_RESULTS_LABEL)
5758
pbar.close()
5859
return [self.best_individual.graph]
5960

golem/core/tuning/tuner_interface.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
from golem.core.optimisers.graph import OptGraph
1515
from golem.core.optimisers.objective import ObjectiveEvaluate, ObjectiveFunction
1616
from golem.core.optimisers.opt_history_objects.individual import Individual
17-
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory
17+
from golem.core.optimisers.opt_history_objects.opt_history import OptHistory, TUNING_RESULT_LABEL, TUNING_START_LABEL
1818
from golem.core.optimisers.opt_history_objects.parent_operator import ParentOperator
1919
from golem.core.tuning.search_space import SearchSpace, convert_parameters
2020
from golem.core.utilities.data_structures import ensure_wrapped_in_sequence
@@ -96,7 +96,7 @@ def init_check(self, graph: OptGraph) -> None:
9696
graph = deepcopy(graph)
9797
fitness = self.objective_evaluate(graph)
9898
self.init_individual = self._create_individual(graph, fitness)
99-
self._add_to_history([self.init_individual], label='tuning_start')
99+
self._add_to_history([self.init_individual], label=TUNING_START_LABEL)
100100

101101
init_metric = self._fitness_to_metric_value(fitness)
102102
self.log.message(f'Initial graph: {graph_structure(graph)} \n'
@@ -154,7 +154,7 @@ def _single_obj_final_check(self, tuned_graph: OptGraph):
154154
self.log.message('Final metric is None')
155155

156156
self.obtained_individual = final_individual
157-
self._add_to_history([self.obtained_individual], label='tuning_result')
157+
self._add_to_history([self.obtained_individual], label=TUNING_RESULT_LABEL)
158158

159159
return self.obtained_individual.graph
160160

@@ -179,7 +179,7 @@ def _multi_obj_final_check(self, tuned_graphs: Sequence[OptGraph]) -> Sequence[O
179179
self.obtained_individual = [self.init_individual]
180180
final_graphs = [self.init_individual.graph]
181181

182-
self._add_to_history(self.obtained_individual, label='tuning_result')
182+
self._add_to_history(self.obtained_individual, label=TUNING_RESULT_LABEL)
183183

184184
return final_graphs
185185

@@ -284,7 +284,7 @@ def _add_to_history(self, individuals: Sequence[Individual], label: Optional[str
284284

285285
if label is None:
286286
label = f'tuning_iteration_{self.evaluations_count}'
287-
if label not in ('tuning_start', 'tuning_result'):
287+
if label not in (TUNING_START_LABEL, TUNING_RESULT_LABEL):
288288
individuals = list(individuals)
289289
individuals.append(self.init_individual) # add initial individual to maintain consistency of inheritance
290290
history.add_to_history(individuals=individuals,

0 commit comments

Comments
 (0)