mirror of
https://github.com/microsoft/autogen.git
synced 2025-07-28 19:30:10 +00:00

if save_best_model_per_estimator is False and retrain_final is True, unfit the model after evaluation in HPO. retrain if using ray. update ITER_HP in config after a trial is finished. change prophet logging level. example and notebook update. allow settings to be passed to AutoML constructor. Are you planning to add multi-output-regression capability to FLAML #192 Is multi-tasking allowed? #277 can pass the auotml setting to the constructor instead of requiring a derived class. remove model_history. checkpoint bug fix. * model_history meaning save_best_model_per_estimator * ITER_HP * example update * prophet logging level * comment update in forecast notebook * print format improvement * allow settings to be passed to AutoML constructor * checkpoint bug fix * time limit for autohf regression test * skip slow test on macos * cleanup before del
32 lines
1.2 KiB
Python
32 lines
1.2 KiB
Python
import os
|
|
|
|
try:
|
|
from transformers import Trainer as TFTrainer
|
|
except ImportError:
|
|
TFTrainer = object
|
|
|
|
|
|
class TrainerForAuto(TFTrainer):
|
|
def evaluate(self, eval_dataset=None, ignore_keys=None, metric_key_prefix="eval"):
|
|
"""Overriding transformers.Trainer.evaluate by saving metrics and checkpoint path"""
|
|
from transformers.trainer_utils import PREFIX_CHECKPOINT_DIR
|
|
|
|
ckpt_dir = os.path.join(
|
|
self.args.output_dir, f"{PREFIX_CHECKPOINT_DIR}-{self.state.global_step}"
|
|
)
|
|
eval_dataset = eval_dataset if eval_dataset is not None else self.eval_dataset
|
|
metrics = eval_dataset and super().evaluate(
|
|
eval_dataset, ignore_keys, metric_key_prefix
|
|
)
|
|
if metrics:
|
|
for key in list(metrics.keys()):
|
|
if key.startswith("eval_"):
|
|
metrics[key[5:]] = metrics.pop(key)
|
|
if hasattr(self, "ckpt_to_global_step"):
|
|
self.ckpt_to_global_step[ckpt_dir] = self.state.global_step
|
|
if metrics:
|
|
self.ckpt_to_metric[ckpt_dir] = metrics
|
|
else:
|
|
self.ckpt_to_global_step = {ckpt_dir: self.state.global_step}
|
|
self.ckpt_to_metric = {ckpt_dir: metrics} if metrics else {}
|