mirror of
				https://github.com/microsoft/autogen.git
				synced 2025-11-04 03:39:52 +00:00 
			
		
		
		
	Simplify lgbm example (#358)
* simplify lgbm examples * provide link to lgbm example script. * simply lgbm example in the example script. Co-authored-by: Chi Wang <wang.chi@microsoft.com>
This commit is contained in:
		
							parent
							
								
									dcfd218108
								
							
						
					
					
						commit
						c6c0c29769
					
				@ -13,9 +13,8 @@ train_x, test_x, train_y, test_y = train_test_split(data, target, test_size=0.25
 | 
			
		||||
 | 
			
		||||
def train_breast_cancer(config):
 | 
			
		||||
    params = LGBMEstimator(**config).params
 | 
			
		||||
    num_boost_round = params.pop("n_estimators")
 | 
			
		||||
    train_set = lgb.Dataset(train_x, label=train_y)
 | 
			
		||||
    gbm = lgb.train(params, train_set, num_boost_round)
 | 
			
		||||
    gbm = lgb.train(params, train_set)
 | 
			
		||||
    preds = gbm.predict(test_x)
 | 
			
		||||
    pred_labels = np.rint(preds)
 | 
			
		||||
    tune.report(
 | 
			
		||||
 | 
			
		||||
@ -14,10 +14,9 @@ X_train, X_test, y_train, y_test = train_test_split(
 | 
			
		||||
def train_lgbm(config: dict) -> dict:
 | 
			
		||||
    # convert config dict to lgbm params
 | 
			
		||||
    params = LGBMEstimator(**config).params
 | 
			
		||||
    num_boost_round = params.pop("n_estimators")
 | 
			
		||||
    # train the model
 | 
			
		||||
    train_set = lightgbm.Dataset(X_train, y_train)
 | 
			
		||||
    model = lightgbm.train(params, train_set, num_boost_round)
 | 
			
		||||
    model = lightgbm.train(params, train_set)
 | 
			
		||||
    # evaluate the model
 | 
			
		||||
    pred = model.predict(X_test)
 | 
			
		||||
    mse = mean_squared_error(y_test, pred)
 | 
			
		||||
 | 
			
		||||
@ -49,10 +49,9 @@ from flaml.model import LGBMEstimator
 | 
			
		||||
def train_lgbm(config: dict) -> dict:
 | 
			
		||||
    # convert config dict to lgbm params
 | 
			
		||||
    params = LGBMEstimator(**config).params
 | 
			
		||||
    num_boost_round = params.pop("n_estimators")
 | 
			
		||||
    # train the model
 | 
			
		||||
    train_set = lightgbm.Dataset(X_train, y_train)
 | 
			
		||||
    model = lightgbm.train(params, train_set, num_boost_round)
 | 
			
		||||
    model = lightgbm.train(params, train_set)
 | 
			
		||||
    # evaluate the model
 | 
			
		||||
    pred = model.predict(X_test)
 | 
			
		||||
    mse = mean_squared_error(y_test, pred)
 | 
			
		||||
@ -75,6 +74,7 @@ analysis = tune.run(
 | 
			
		||||
    low_cost_partial_config=low_cost_partial_config, time_budget_s=3, num_samples=-1,
 | 
			
		||||
)
 | 
			
		||||
```
 | 
			
		||||
Please see this [script](https://github.com/microsoft/FLAML/blob/main/test/tune.py) for the complete version of the above example.
 | 
			
		||||
 | 
			
		||||
### Where to Go Next?
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user