mirror of
https://github.com/microsoft/autogen.git
synced 2025-09-09 08:16:09 +00:00
pred_time_limit clarification and logging (#319)
* pred_time_limit clarification * log prediction time * handle ChunkedEncodingError in test
This commit is contained in:
parent
fb59bb9928
commit
18230ed22f
@ -495,6 +495,7 @@ class AutoML(BaseEstimator):
|
|||||||
metric for each model.
|
metric for each model.
|
||||||
mem_thres: A float of the memory size constraint in bytes.
|
mem_thres: A float of the memory size constraint in bytes.
|
||||||
pred_time_limit: A float of the prediction latency constraint in seconds.
|
pred_time_limit: A float of the prediction latency constraint in seconds.
|
||||||
|
It refers to the average prediction time per row in validation data.
|
||||||
train_time_limit: A float of the training time constraint in seconds.
|
train_time_limit: A float of the training time constraint in seconds.
|
||||||
verbose: int, default=3 | Controls the verbosity, higher means more
|
verbose: int, default=3 | Controls the verbosity, higher means more
|
||||||
messages.
|
messages.
|
||||||
@ -1751,6 +1752,7 @@ class AutoML(BaseEstimator):
|
|||||||
metric for each model.
|
metric for each model.
|
||||||
mem_thres: A float of the memory size constraint in bytes.
|
mem_thres: A float of the memory size constraint in bytes.
|
||||||
pred_time_limit: A float of the prediction latency constraint in seconds.
|
pred_time_limit: A float of the prediction latency constraint in seconds.
|
||||||
|
It refers to the average prediction time per row in validation data.
|
||||||
train_time_limit: A float of the training time constraint in seconds.
|
train_time_limit: A float of the training time constraint in seconds.
|
||||||
X_val: None or a numpy array or a pandas dataframe of validation data.
|
X_val: None or a numpy array or a pandas dataframe of validation data.
|
||||||
y_val: None or a numpy array or a pandas series of validation labels.
|
y_val: None or a numpy array or a pandas series of validation labels.
|
||||||
|
@ -206,7 +206,7 @@ def _eval_estimator(
|
|||||||
val_loss = sklearn_metric_loss_score(
|
val_loss = sklearn_metric_loss_score(
|
||||||
eval_metric, val_pred_y, y_val, labels, weight_val, groups_val
|
eval_metric, val_pred_y, y_val, labels, weight_val, groups_val
|
||||||
)
|
)
|
||||||
metric_for_logging = {}
|
metric_for_logging = {"pred_time": pred_time}
|
||||||
if log_training_metric:
|
if log_training_metric:
|
||||||
train_pred_y = get_y_pred(estimator, X_train, eval_metric, obj)
|
train_pred_y = get_y_pred(estimator, X_train, eval_metric, obj)
|
||||||
metric_for_logging["train_loss"] = sklearn_metric_loss_score(
|
metric_for_logging["train_loss"] = sklearn_metric_loss_score(
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
from openml.exceptions import OpenMLServerException
|
from openml.exceptions import OpenMLServerException
|
||||||
|
from requests.exceptions import ChunkedEncodingError
|
||||||
|
|
||||||
|
|
||||||
def test_automl(budget=5, dataset_format="dataframe", hpo_method=None):
|
def test_automl(budget=5, dataset_format="dataframe", hpo_method=None):
|
||||||
@ -8,8 +9,8 @@ def test_automl(budget=5, dataset_format="dataframe", hpo_method=None):
|
|||||||
X_train, X_test, y_train, y_test = load_openml_dataset(
|
X_train, X_test, y_train, y_test = load_openml_dataset(
|
||||||
dataset_id=1169, data_dir="test/", dataset_format=dataset_format
|
dataset_id=1169, data_dir="test/", dataset_format=dataset_format
|
||||||
)
|
)
|
||||||
except OpenMLServerException:
|
except (OpenMLServerException, ChunkedEncodingError) as e:
|
||||||
print("OpenMLServerException raised")
|
print(e)
|
||||||
return
|
return
|
||||||
""" import AutoML class from flaml package """
|
""" import AutoML class from flaml package """
|
||||||
from flaml import AutoML
|
from flaml import AutoML
|
||||||
@ -84,8 +85,8 @@ def test_mlflow():
|
|||||||
X_train, X_test, y_train, y_test = load_openml_task(
|
X_train, X_test, y_train, y_test = load_openml_task(
|
||||||
task_id=7592, data_dir="test/"
|
task_id=7592, data_dir="test/"
|
||||||
)
|
)
|
||||||
except OpenMLServerException:
|
except (OpenMLServerException, ChunkedEncodingError) as e:
|
||||||
print("OpenMLServerException raised")
|
print(e)
|
||||||
return
|
return
|
||||||
""" import AutoML class from flaml package """
|
""" import AutoML class from flaml package """
|
||||||
from flaml import AutoML
|
from flaml import AutoML
|
||||||
|
Loading…
x
Reference in New Issue
Block a user