2020-12-14 23:10:03 -08:00
|
|
|
import os
|
|
|
|
import unittest
|
|
|
|
from tempfile import TemporaryDirectory
|
|
|
|
|
2021-10-08 16:09:43 -07:00
|
|
|
from sklearn.datasets import fetch_california_housing
|
2020-12-14 23:10:03 -08:00
|
|
|
|
|
|
|
from flaml import AutoML
|
|
|
|
from flaml.training_log import training_log_reader
|
|
|
|
|
|
|
|
|
|
|
|
class TestTrainingLog(unittest.TestCase):
|
2021-10-08 16:09:43 -07:00
|
|
|
def test_training_log(self, path="test_training_log.log"):
|
2020-12-14 23:10:03 -08:00
|
|
|
|
|
|
|
with TemporaryDirectory() as d:
|
2021-08-12 02:02:22 -04:00
|
|
|
filename = os.path.join(d, path)
|
2020-12-14 23:10:03 -08:00
|
|
|
|
|
|
|
# Run a simple job.
|
2021-09-04 01:42:21 -07:00
|
|
|
automl = AutoML()
|
2020-12-14 23:10:03 -08:00
|
|
|
automl_settings = {
|
2021-09-04 01:42:21 -07:00
|
|
|
"time_budget": 1,
|
2021-10-08 16:09:43 -07:00
|
|
|
"metric": "mse",
|
|
|
|
"task": "regression",
|
2020-12-14 23:10:03 -08:00
|
|
|
"log_file_name": filename,
|
|
|
|
"log_training_metric": True,
|
2021-04-08 09:29:55 -07:00
|
|
|
"mem_thres": 1024 * 1024,
|
2021-02-05 21:41:14 -08:00
|
|
|
"n_jobs": 1,
|
2021-03-19 09:50:47 -07:00
|
|
|
"model_history": True,
|
2021-10-16 01:11:12 -07:00
|
|
|
"train_time_limit": 0.1,
|
2021-07-10 09:02:17 -07:00
|
|
|
"verbose": 3,
|
2021-07-20 17:00:44 -07:00
|
|
|
"ensemble": True,
|
2021-09-04 01:42:21 -07:00
|
|
|
"keep_search_state": True,
|
2020-12-14 23:10:03 -08:00
|
|
|
}
|
2021-10-08 16:09:43 -07:00
|
|
|
X_train, y_train = fetch_california_housing(return_X_y=True)
|
2021-09-04 01:42:21 -07:00
|
|
|
automl.fit(X_train=X_train, y_train=y_train, **automl_settings)
|
2020-12-14 23:10:03 -08:00
|
|
|
# Check if the training log file is populated.
|
|
|
|
self.assertTrue(os.path.exists(filename))
|
2021-10-16 01:11:12 -07:00
|
|
|
if automl.best_estimator:
|
|
|
|
estimator, config = automl.best_estimator, automl.best_config
|
|
|
|
model0 = automl.best_model_for_estimator(estimator)
|
2021-10-16 03:09:56 -07:00
|
|
|
print(model0.params["n_estimators"], model0.estimator)
|
2021-10-16 01:11:12 -07:00
|
|
|
|
|
|
|
automl.time_budget = None
|
|
|
|
model, _ = automl._state._train_with_config(estimator, config)
|
2021-10-16 03:09:56 -07:00
|
|
|
print(model.estimator)
|
2021-10-16 01:11:12 -07:00
|
|
|
# model0 and model are equivalent unless model0's n_estimator is out of search space range
|
|
|
|
assert (
|
|
|
|
str(model0.estimator) == str(model.estimator)
|
2021-10-16 02:08:43 -07:00
|
|
|
or model0.params["n_estimators"] < 4
|
2021-10-16 01:11:12 -07:00
|
|
|
)
|
|
|
|
|
|
|
|
# assuming estimator & config are saved and loaded as follows
|
|
|
|
automl = AutoML()
|
|
|
|
automl.fit(
|
|
|
|
X_train=X_train,
|
|
|
|
y_train=y_train,
|
|
|
|
max_iter=0,
|
|
|
|
task="regression",
|
|
|
|
estimator_list=[estimator],
|
|
|
|
n_jobs=1,
|
|
|
|
starting_points={estimator: config},
|
|
|
|
)
|
|
|
|
# then the fitted model should be equivalent to model
|
|
|
|
# print(str(model.estimator), str(automl.model.estimator))
|
|
|
|
assert str(model.estimator) == str(automl.model.estimator)
|
|
|
|
|
|
|
|
with training_log_reader(filename) as reader:
|
|
|
|
count = 0
|
|
|
|
for record in reader.records():
|
|
|
|
print(record)
|
|
|
|
count += 1
|
|
|
|
self.assertGreater(count, 0)
|
2021-08-12 02:02:22 -04:00
|
|
|
|
|
|
|
automl_settings["log_file_name"] = None
|
2021-09-04 01:42:21 -07:00
|
|
|
automl.fit(X_train=X_train, y_train=y_train, **automl_settings)
|
|
|
|
automl._selected.update(None, 0)
|
|
|
|
automl = AutoML()
|
2021-10-08 16:09:43 -07:00
|
|
|
automl.fit(X_train=X_train, y_train=y_train, max_iter=0, task="regression")
|
2021-08-12 02:02:22 -04:00
|
|
|
|
|
|
|
def test_illfilename(self):
|
|
|
|
try:
|
2021-10-08 16:09:43 -07:00
|
|
|
self.test_training_log("/")
|
2021-08-12 02:02:22 -04:00
|
|
|
except IsADirectoryError:
|
|
|
|
print("IsADirectoryError happens as expected in linux.")
|
|
|
|
except PermissionError:
|
|
|
|
print("PermissionError happens as expected in windows.")
|