Skip to content
Open
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 17 additions & 5 deletions flaml/automl/automl.py
Original file line number Diff line number Diff line change
Expand Up @@ -3569,12 +3569,24 @@ def _search_sequential(self):
state.best_config,
self.data_size_full,
)

if (
self._trained_estimator.params[self._trained_estimator.ITER_HP]
!= self.best_config[self._trained_estimator.ITER_HP]
if getattr(self._trained_estimator, "params", {}) and getattr(
self._trained_estimator, "ITER_HP", None
):
logger.warning("early stopping happened")
_hp_trained_iter = self._trained_estimator.params.get(
self._trained_estimator.ITER_HP
)
_best_config_iter = self.best_config.get(
self._trained_estimator.ITER_HP
)
if _hp_trained_iter != _best_config_iter:
logger.warning(
"Early stopping happened when retraining a model with the best configuration."
f" The best config's ITER_HP is {_best_config_iter}"
f" and the actual ITER_HP used for retraining the model is {_hp_trained_iter}."
" This early stopping happens because because flaml needs to do its best effort to"
" retrain without violating the time budget when retrain_full is set to 'budget'. "
" If this mismatch is not desired, please set retrain_full to True."
)

logger.info(
"retrain {} for {:.1f}s".format(self._best_estimator, retrain_time)
Expand Down