Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Adding relative improvement as stopping criterion #1479

Open
wants to merge 7 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
17 changes: 15 additions & 2 deletions nevergrad/optimization/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,6 +114,7 @@ def __init__(
x: utils.MultiValue(self.parametrization, np.inf, reference=self.parametrization)
for x in ["optimistic", "pessimistic", "average", "minimum"]
}
self.previous_best_loss: float = float("Inf")
# pruning function, called at each "tell"
# this can be desactivated or modified by each implementation
self.pruning: tp.Optional[_PruningCallable] = utils.Pruning.sensible_default(
Expand Down Expand Up @@ -260,7 +261,11 @@ def register_callback(self, name: str, callback: _OptimCallBack) -> None:
callback: callable
a callable taking the same parameters as the method it is registered upon (including self)
"""
assert name in ["ask", "tell"], f'Only "ask" and "tell" methods can have callbacks (not {name})'
assert name in [
"ask",
"tell",
"minimize",
], f'Only "ask", "tell" and "minimize" methods can have callbacks (not {name})'
self._callbacks.setdefault(name, []).append(callback)

def remove_all_callbacks(self) -> None:
Expand Down Expand Up @@ -420,6 +425,9 @@ def _preprocess_multiobjective(self, candidate: p.Parameter) -> tp.FloatLoss:
)
return self._hypervolume_pareto.add(candidate)

def _save_loss_history(self) -> None:
self.previous_best_loss = self.current_bests["minimum"].get_estimation("minimum")

def _update_archive_and_bests(self, candidate: p.Parameter, loss: tp.FloatLoss) -> None:
x = candidate.get_standardized_data(reference=self.parametrization)
if not isinstance(
Expand All @@ -441,7 +449,10 @@ def _update_archive_and_bests(self, candidate: p.Parameter, loss: tp.FloatLoss)
mvalue.parameter = candidate # keep best candidate
# update current best records
# this may have to be improved if we want to keep more kinds of best losss

if self.archive[x].get_estimation("minimum") < self.current_bests["minimum"].get_estimation(
"minimum"
):
self._save_loss_history()
for name in self.current_bests:
if mvalue is self.current_bests[name]: # reboot
best = min(self.archive.values(), key=lambda mv, n=name: mv.get_estimation(n)) # type: ignore
Expand Down Expand Up @@ -662,6 +673,8 @@ def minimize(
# this is the first thing to do when resuming an existing optimization run
# process finished
if self._finished_jobs:
for callback in self._callbacks.get("minimize", []):
callback(self)
if (remaining_budget or sleeper._start is not None) and not first_iteration:
# ignore stop if no more suggestion is sent
# this is an ugly hack to avoid warnings at the end of steady mode
Expand Down
15 changes: 15 additions & 0 deletions nevergrad/optimization/callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -347,6 +347,11 @@ def timer(cls, max_duration: float) -> "EarlyStopping":
"""Early stop when max_duration seconds has been reached (from the first ask)"""
return cls(_DurationCriterion(max_duration))

@classmethod
def relative_improvement(cls, min_improvement: float) -> "EarlyStopping":
"""Early stop when loss function does not decrease enough (from the last update)"""
return cls(_RelImprovementCriterion(min_improvement))


class _DurationCriterion:
def __init__(self, max_duration: float) -> None:
Expand All @@ -357,3 +362,13 @@ def __call__(self, optimizer: base.Optimizer) -> bool:
if np.isinf(self._start):
self._start = time.time()
return time.time() > self._start + self._max_duration


class _RelImprovementCriterion:
def __init__(self, min_improvement) -> None:
self._min_improvement = min_improvement

def __call__(self, optimizer: base.Optimizer) -> bool:
last_best = optimizer.previous_best_loss
current_best = optimizer.current_bests["minimum"].get_estimation("minimum")
return (last_best - current_best) / last_best < self._min_improvement
10 changes: 10 additions & 0 deletions nevergrad/optimization/test_callbacks.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,6 +128,16 @@ def test_early_stopping() -> None:
assert optimizer.recommend().loss < 12 # type: ignore


def test_improvement_criterion() -> None:
optim = optimizerlib.OnePlusOne(2, budget=100)
crit = ng.callbacks._RelImprovementCriterion(min_improvement=0.01)
assert not crit(optim)
assert not crit(optim)
assert not crit(optim)
time.sleep(0.01)
assert crit(optim)


def test_duration_criterion() -> None:
optim = optimizerlib.OnePlusOne(2, budget=100)
crit = ng.callbacks._DurationCriterion(0.01)
Expand Down