From 2c43ebba6f57e6d78e822d2875d3abdbfd0f1021 Mon Sep 17 00:00:00 2001 From: Nahuel Date: Fri, 23 Dec 2022 17:33:30 +0100 Subject: [PATCH] Fix#9448: Add ES volumes (#9506) * Add ES volumes * Fix run_local_docker script * Fix error run_local_docker script * Update Es volumes in docker-compose files --- .../docker-compose-postgres.yml | 3 +++ docker/local-metadata/docker-compose.yml | 3 +++ docker/metadata/docker-compose-postgres.yml | 3 +++ docker/metadata/docker-compose.yml | 3 +++ docker/run_local_docker.sh | 27 +++++++++++++------ ingestion/setup.py | 2 +- ingestion/src/metadata/cli/docker.py | 8 +----- 7 files changed, 33 insertions(+), 16 deletions(-) diff --git a/docker/local-metadata/docker-compose-postgres.yml b/docker/local-metadata/docker-compose-postgres.yml index b48664c5d46..d6387c29f8a 100644 --- a/docker/local-metadata/docker-compose-postgres.yml +++ b/docker/local-metadata/docker-compose-postgres.yml @@ -14,6 +14,7 @@ volumes: ingestion-volume-dag-airflow: ingestion-volume-dags: ingestion-volume-tmp: + es-data: services: postgresql: build: @@ -54,6 +55,8 @@ services: ports: - "9200:9200" - "9300:9300" + volumes: + - es-data:/usr/share/elasticsearch/data openmetadata-server: build: diff --git a/docker/local-metadata/docker-compose.yml b/docker/local-metadata/docker-compose.yml index ad87107e673..bd3c88077d0 100644 --- a/docker/local-metadata/docker-compose.yml +++ b/docker/local-metadata/docker-compose.yml @@ -14,6 +14,7 @@ volumes: ingestion-volume-dag-airflow: ingestion-volume-dags: ingestion-volume-tmp: + es-data: services: mysql: build: @@ -53,6 +54,8 @@ services: ports: - "9200:9200" - "9300:9300" + volumes: + - es-data:/usr/share/elasticsearch/data openmetadata-server: build: diff --git a/docker/metadata/docker-compose-postgres.yml b/docker/metadata/docker-compose-postgres.yml index a89663c0525..d53dc23433b 100644 --- a/docker/metadata/docker-compose-postgres.yml +++ b/docker/metadata/docker-compose-postgres.yml @@ -14,6 +14,7 @@ volumes: ingestion-volume-dag-airflow: ingestion-volume-dags: ingestion-volume-tmp: + es-data: services: postgresql: container_name: openmetadata_postgresql @@ -48,6 +49,8 @@ services: ports: - "9200:9200" - "9300:9300" + volumes: + - es-data:/usr/share/elasticsearch/data openmetadata-server: container_name: openmetadata_server diff --git a/docker/metadata/docker-compose.yml b/docker/metadata/docker-compose.yml index b34e410c20c..792b857ba0a 100644 --- a/docker/metadata/docker-compose.yml +++ b/docker/metadata/docker-compose.yml @@ -14,6 +14,7 @@ volumes: ingestion-volume-dag-airflow: ingestion-volume-dags: ingestion-volume-tmp: + es-data: services: mysql: container_name: openmetadata_mysql @@ -46,6 +47,8 @@ services: ports: - "9200:9200" - "9300:9300" + volumes: + - es-data:/usr/share/elasticsearch/data openmetadata-server: container_name: openmetadata_server diff --git a/docker/run_local_docker.sh b/docker/run_local_docker.sh index 6aba46e17c5..61a607f29fa 100755 --- a/docker/run_local_docker.sh +++ b/docker/run_local_docker.sh @@ -60,7 +60,11 @@ else echo "Skipping Maven Build" fi -#cd docker/local-metadata || exit +RESULT=$? +if [ $RESULT -ne 0 ]; then + echo "Failed to run Maven build!" + exit 1 +fi if [[ $debugOM == "true" ]]; then export OPENMETADATA_DEBUG=true @@ -74,16 +78,16 @@ then fi fi -if [[ $VIRTUAL_ENV == "" ]]; -then - echo "Please Use Virtual Environment and make sure to generate Pydantic Models"; +if [[ $VIRTUAL_ENV == "" ]]; +then + echo "Please Use Virtual Environment and make sure to generate Pydantic Models"; else - echo "Generating Pydantic Models"; + echo "Generating Pydantic Models"; make install_dev generate fi echo "Stopping any previous Local Docker Containers" -docker compose -f docker/local-metadata/docker-compose-postgres.yml down +docker compose -f docker/local-metadata/docker-compose-postgres.yml down docker compose -f docker/local-metadata/docker-compose.yml down echo "Starting Local Docker Containers" @@ -95,14 +99,22 @@ else docker compose -f docker/local-metadata/docker-compose.yml build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose -f docker/local-metadata/docker-compose.yml up --build -d fi +RESULT=$? +if [ $RESULT -ne 0 ]; then + echo "Failed to start Docker instances!" + exit 1 +fi + until curl -s -f "http://localhost:9200/_cat/indices/team_search_index"; do printf 'Checking if Elastic Search instance is up...\n' sleep 5 done + until curl -s -f --header 'Authorization: Basic YWRtaW46YWRtaW4=' "http://localhost:8080/api/v1/dags/sample_data"; do printf 'Checking if Sample Data DAG is reachable...\n' sleep 5 done + curl --location --request PATCH 'localhost:8080/api/v1/dags/sample_data' \ --header 'Authorization: Basic YWRtaW46YWRtaW4=' \ --header 'Content-Type: application/json' \ @@ -110,10 +122,9 @@ curl --location --request PATCH 'localhost:8080/api/v1/dags/sample_data' \ "is_paused": false }' -cd ../ printf 'Validate sample data DAG...' sleep 5 -python validate_compose.py +python docker/validate_compose.py until curl -s -f --header "Authorization: Bearer $authorizationToken" "http://localhost:8585/api/v1/tables/name/sample_data.ecommerce_db.shopify.fact_sale"; do printf 'Waiting on Sample Data Ingestion to complete...\n' diff --git a/ingestion/setup.py b/ingestion/setup.py index 05ab60146af..ad509209476 100644 --- a/ingestion/setup.py +++ b/ingestion/setup.py @@ -79,7 +79,7 @@ plugins: Dict[str, Set[str]] = { "google-cloud-datacatalog==3.6.2", }, "bigquery-usage": {"google-cloud-logging", "cachetools"}, - "docker": {"python_on_whales==0.34.0"}, + "docker": {"python_on_whales==0.55.0"}, "backup": {"boto3~=1.19.12", "azure-identity", "azure-storage-blob"}, "dagster": {"pymysql>=1.0.2", "psycopg2-binary", "GeoAlchemy2", "dagster_graphql"}, "datalake-s3": { diff --git a/ingestion/src/metadata/cli/docker.py b/ingestion/src/metadata/cli/docker.py index cc2237d7121..e160da30ec9 100644 --- a/ingestion/src/metadata/cli/docker.py +++ b/ingestion/src/metadata/cli/docker.py @@ -72,13 +72,6 @@ def docker_volume(): # create a main directory if not os.path.exists(MAIN_DIR): os.mkdir(MAIN_DIR) - path_to_join = ["db-data", "es-data"] - final_path = [] - for path in path_to_join: - temp_path = os.path.join(MAIN_DIR, path) - final_path.append(temp_path) - for path in final_path: - os.makedirs(path, exist_ok=True) def start_docker(docker, start_time, file_path, ingest_sample_data: bool): @@ -246,6 +239,7 @@ def run_docker( # pylint: disable=too-many-branches too-many-statements compose_project_name="openmetadata", compose_files=[docker_compose_file_path], compose_env_file=env_file, + compose_project_directory=pathlib.Path(), ) if docker_obj_instance.start: