mirror of
https://github.com/microsoft/autogen.git
synced 2025-09-19 21:24:10 +00:00

* Added spark support for parallel training. * Added tests and fixed a bug * Added more tests and updated docs * Updated setup.py and docs * Added customize_learner and tests * Update spark tests and setup.py * Update docs and verbose * Update logging, fix issue in cloud notebook * Update github workflow for spark tests * Update github workflow * Remove hack of handling _choice_ * Allow for failures * Fix tests, update docs * Update setup.py * Update Dockerfile for Spark * Update tests, remove some warnings * Add test for notebooks, update utils * Add performance test for Spark * Fix lru_cache maxsize * Fix test failures on some platforms * Fix coverage report failure * resovle PR comments * resovle PR comments 2nd round * resovle PR comments 3rd round * fix lint and rename test class * resovle PR comments 4th round * refactor customize_learner to broadcast_code
42 lines
1.2 KiB
Python
42 lines
1.2 KiB
Python
import nbformat
|
|
from nbconvert.preprocessors import ExecutePreprocessor
|
|
from nbconvert.preprocessors import CellExecutionError
|
|
from flaml.tune.spark.utils import check_spark
|
|
import os
|
|
import pytest
|
|
|
|
spark_available, _ = check_spark()
|
|
skip_spark = not spark_available
|
|
|
|
pytestmark = pytest.mark.skipif(
|
|
skip_spark, reason="Spark is not installed. Skip all spark tests."
|
|
)
|
|
|
|
here = os.path.abspath(os.path.dirname(__file__))
|
|
os.environ["FLAML_MAX_CONCURRENT"] = "2"
|
|
|
|
|
|
def run_notebook(input_nb, output_nb="executed_notebook.ipynb", save=False):
|
|
try:
|
|
file_path = os.path.join(here, os.pardir, os.pardir, "notebook", input_nb)
|
|
with open(file_path) as f:
|
|
nb = nbformat.read(f, as_version=4)
|
|
ep = ExecutePreprocessor(timeout=600, kernel_name="python3")
|
|
ep.preprocess(nb, {"metadata": {"path": here}})
|
|
except CellExecutionError:
|
|
raise
|
|
except Exception as e:
|
|
print("\nIgnoring below error:\n", e, "\n\n")
|
|
finally:
|
|
if save:
|
|
with open(os.path.join(here, output_nb), "w", encoding="utf-8") as f:
|
|
nbformat.write(nb, f)
|
|
|
|
|
|
def test_automl_lightgbm_test():
|
|
run_notebook("integrate_spark.ipynb")
|
|
|
|
|
|
if __name__ == "__main__":
|
|
test_automl_lightgbm_test()
|