* increase search space

* None check
This commit is contained in:
Chi Wang 2021-03-28 17:54:25 -07:00 committed by GitHub
parent 8bcdb2a0c2
commit f28d093522
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 28 additions and 28 deletions

View File

@ -72,11 +72,11 @@ class BaseEstimator:
def _fit(self, X_train, y_train, **kwargs):
curent_time = time.time()
current_time = time.time()
X_train = self._preprocess(X_train)
model = self.estimator_class(**self.params)
model.fit(X_train, y_train, **kwargs)
train_time = time.time() - curent_time
train_time = time.time() - current_time
self._model = model
return train_time
@ -187,16 +187,16 @@ class LGBMEstimator(BaseEstimator):
'domain': tune.qloguniform(lower=4, upper=upper, q=1),
'init_value': 4,
},
'min_child_weight': {
'domain': tune.loguniform(lower=0.001, upper=20.0),
'init_value': 20.0,
'min_data_in_leaf': {
'domain': tune.qloguniform(lower=2, upper=2**7, q=1),
'init_value': 20,
},
'learning_rate': {
'domain': tune.loguniform(lower=0.01, upper=1.0),
'domain': tune.loguniform(lower=1/1024, upper=1.0),
'init_value': 0.1,
},
'subsample': {
'domain': tune.uniform(lower=0.6, upper=1.0),
'domain': tune.uniform(lower=0.1, upper=1.0),
'init_value': 1.0,
},
'log_max_bin': {
@ -204,15 +204,15 @@ class LGBMEstimator(BaseEstimator):
'init_value': 8,
},
'colsample_bytree': {
'domain': tune.uniform(lower=0.7, upper=1.0),
'domain': tune.uniform(lower=0.01, upper=1.0),
'init_value': 1.0,
},
'reg_alpha': {
'domain': tune.loguniform(lower=1e-10, upper=1.0),
'init_value': 1e-10,
'domain': tune.loguniform(lower=1/1024, upper=1024),
'init_value': 1/1024,
},
'reg_lambda': {
'domain': tune.loguniform(lower=1e-10, upper=1.0),
'domain': tune.loguniform(lower=1/1024, upper=1024),
'init_value': 1.0,
},
}
@ -224,8 +224,8 @@ class LGBMEstimator(BaseEstimator):
return (max_leaves*3 + (max_leaves-1)*4 + 1.0)*n_estimators*8
def __init__(self, task='binary:logistic', n_jobs=1,
n_estimators=2, max_leaves=2, min_child_weight=1e-3, learning_rate=0.1,
subsample=1.0, reg_lambda=1.0, reg_alpha=0.0, colsample_bylevel=1.0,
n_estimators=2, max_leaves=2, min_data_in_leaf=20, learning_rate=0.1,
subsample=1.0, reg_lambda=1.0, reg_alpha=0.0,
colsample_bytree=1.0, log_max_bin=8, **params):
super().__init__(task, **params)
# Default: regression for LGBMRegressor,
@ -239,13 +239,13 @@ class LGBMEstimator(BaseEstimator):
else: objective = 'regression'
self.params = {
"n_estimators": int(round(n_estimators)),
"num_leaves": params.get('num_leaves', int(round(max_leaves))),
"max_leaves": int(round(max_leaves)),
'objective': params.get("objective", objective),
'n_jobs': n_jobs,
'learning_rate': float(learning_rate),
'reg_alpha': float(reg_alpha),
'reg_lambda': float(reg_lambda),
'min_child_weight': float(min_child_weight),
'min_data_in_leaf': int(round(min_data_in_leaf)),
'colsample_bytree':float(colsample_bytree),
'subsample': float(subsample),
}
@ -310,31 +310,31 @@ class XGBoostEstimator(SKLearnEstimator):
'init_value': 4,
},
'min_child_weight': {
'domain': tune.loguniform(lower=0.001, upper=20.0),
'init_value': 20.0,
'domain': tune.loguniform(lower=0.001, upper=128),
'init_value': 1,
},
'learning_rate': {
'domain': tune.loguniform(lower=0.01, upper=1.0),
'domain': tune.loguniform(lower=1/1024, upper=1.0),
'init_value': 0.1,
},
'subsample': {
'domain': tune.uniform(lower=0.6, upper=1.0),
'domain': tune.uniform(lower=0.1, upper=1.0),
'init_value': 1.0,
},
'colsample_bylevel': {
'domain': tune.uniform(lower=0.6, upper=1.0),
'domain': tune.uniform(lower=0.01, upper=1.0),
'init_value': 1.0,
},
'colsample_bytree': {
'domain': tune.uniform(lower=0.7, upper=1.0),
'domain': tune.uniform(lower=0.01, upper=1.0),
'init_value': 1.0,
},
'reg_alpha': {
'domain': tune.loguniform(lower=1e-10, upper=1.0),
'init_value': 1e-10,
'domain': tune.loguniform(lower=1/1024, upper=1024),
'init_value': 1/1024,
},
'reg_lambda': {
'domain': tune.loguniform(lower=1e-10, upper=1.0),
'domain': tune.loguniform(lower=1/1024, upper=1024),
'init_value': 1.0,
},
}

View File

@ -200,6 +200,7 @@ class FLOW2(Searcher):
def step_lower_bound(self) -> float:
step_lb = self._step_lb
for key in self._tunable_keys:
if key not in self.best_config: continue
domain = self.space[key]
sampler = domain.get_sampler()
if isinstance(sampler, sample.Quantized):
@ -499,7 +500,7 @@ class FLOW2(Searcher):
def rand_vector_unit_sphere(self, dim) -> np.ndarray:
vec = self._random.normal(0, 1, dim)
mag = np.linalg.norm(vec)
mag = np.linalg.norm(vec)
return vec/mag
def suggest(self, trial_id: str) -> Optional[Dict]:
@ -518,7 +519,6 @@ class FLOW2(Searcher):
self._resource * self.resource_multiple_factor)
config = self.best_config.copy()
config[self.prune_attr] = self._resource
# self.incumbent[self.prune_attr] = self._resource
self._direction_tried = None
self._configs[trial_id] = config
return config

View File

@ -1 +1 @@
__version__ = "0.2.9"
__version__ = "0.2.10"

View File

@ -4,7 +4,7 @@ ws = Workspace.from_config()
compute_target = ws.compute_targets['V100-4']
# compute_target = ws.compute_targets['K80']
command = [
"pip install torch transformers datasets flaml[blendsearch,ray] ax-platform sqlalchemy && ",
"pip install torch transformers datasets flaml[blendsearch,ray] && ",
"python test_electra.py"]
config = ScriptRunConfig(