Fix#9448: Add ES volumes (#9506)

* Add ES volumes

* Fix run_local_docker script

* Fix error run_local_docker script

* Update Es volumes in docker-compose files
This commit is contained in:
Nahuel 2022-12-23 17:33:30 +01:00 committed by GitHub
parent c931ee3447
commit 2c43ebba6f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 33 additions and 16 deletions

View File

@ -14,6 +14,7 @@ volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
es-data:
services:
postgresql:
build:
@ -54,6 +55,8 @@ services:
ports:
- "9200:9200"
- "9300:9300"
volumes:
- es-data:/usr/share/elasticsearch/data
openmetadata-server:
build:

View File

@ -14,6 +14,7 @@ volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
es-data:
services:
mysql:
build:
@ -53,6 +54,8 @@ services:
ports:
- "9200:9200"
- "9300:9300"
volumes:
- es-data:/usr/share/elasticsearch/data
openmetadata-server:
build:

View File

@ -14,6 +14,7 @@ volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
es-data:
services:
postgresql:
container_name: openmetadata_postgresql
@ -48,6 +49,8 @@ services:
ports:
- "9200:9200"
- "9300:9300"
volumes:
- es-data:/usr/share/elasticsearch/data
openmetadata-server:
container_name: openmetadata_server

View File

@ -14,6 +14,7 @@ volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
es-data:
services:
mysql:
container_name: openmetadata_mysql
@ -46,6 +47,8 @@ services:
ports:
- "9200:9200"
- "9300:9300"
volumes:
- es-data:/usr/share/elasticsearch/data
openmetadata-server:
container_name: openmetadata_server

View File

@ -60,7 +60,11 @@ else
echo "Skipping Maven Build"
fi
#cd docker/local-metadata || exit
RESULT=$?
if [ $RESULT -ne 0 ]; then
echo "Failed to run Maven build!"
exit 1
fi
if [[ $debugOM == "true" ]]; then
export OPENMETADATA_DEBUG=true
@ -74,16 +78,16 @@ then
fi
fi
if [[ $VIRTUAL_ENV == "" ]];
then
echo "Please Use Virtual Environment and make sure to generate Pydantic Models";
if [[ $VIRTUAL_ENV == "" ]];
then
echo "Please Use Virtual Environment and make sure to generate Pydantic Models";
else
echo "Generating Pydantic Models";
echo "Generating Pydantic Models";
make install_dev generate
fi
echo "Stopping any previous Local Docker Containers"
docker compose -f docker/local-metadata/docker-compose-postgres.yml down
docker compose -f docker/local-metadata/docker-compose-postgres.yml down
docker compose -f docker/local-metadata/docker-compose.yml down
echo "Starting Local Docker Containers"
@ -95,14 +99,22 @@ else
docker compose -f docker/local-metadata/docker-compose.yml build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose -f docker/local-metadata/docker-compose.yml up --build -d
fi
RESULT=$?
if [ $RESULT -ne 0 ]; then
echo "Failed to start Docker instances!"
exit 1
fi
until curl -s -f "http://localhost:9200/_cat/indices/team_search_index"; do
printf 'Checking if Elastic Search instance is up...\n'
sleep 5
done
until curl -s -f --header 'Authorization: Basic YWRtaW46YWRtaW4=' "http://localhost:8080/api/v1/dags/sample_data"; do
printf 'Checking if Sample Data DAG is reachable...\n'
sleep 5
done
curl --location --request PATCH 'localhost:8080/api/v1/dags/sample_data' \
--header 'Authorization: Basic YWRtaW46YWRtaW4=' \
--header 'Content-Type: application/json' \
@ -110,10 +122,9 @@ curl --location --request PATCH 'localhost:8080/api/v1/dags/sample_data' \
"is_paused": false
}'
cd ../
printf 'Validate sample data DAG...'
sleep 5
python validate_compose.py
python docker/validate_compose.py
until curl -s -f --header "Authorization: Bearer $authorizationToken" "http://localhost:8585/api/v1/tables/name/sample_data.ecommerce_db.shopify.fact_sale"; do
printf 'Waiting on Sample Data Ingestion to complete...\n'

View File

@ -79,7 +79,7 @@ plugins: Dict[str, Set[str]] = {
"google-cloud-datacatalog==3.6.2",
},
"bigquery-usage": {"google-cloud-logging", "cachetools"},
"docker": {"python_on_whales==0.34.0"},
"docker": {"python_on_whales==0.55.0"},
"backup": {"boto3~=1.19.12", "azure-identity", "azure-storage-blob"},
"dagster": {"pymysql>=1.0.2", "psycopg2-binary", "GeoAlchemy2", "dagster_graphql"},
"datalake-s3": {

View File

@ -72,13 +72,6 @@ def docker_volume():
# create a main directory
if not os.path.exists(MAIN_DIR):
os.mkdir(MAIN_DIR)
path_to_join = ["db-data", "es-data"]
final_path = []
for path in path_to_join:
temp_path = os.path.join(MAIN_DIR, path)
final_path.append(temp_path)
for path in final_path:
os.makedirs(path, exist_ok=True)
def start_docker(docker, start_time, file_path, ingest_sample_data: bool):
@ -246,6 +239,7 @@ def run_docker( # pylint: disable=too-many-branches too-many-statements
compose_project_name="openmetadata",
compose_files=[docker_compose_file_path],
compose_env_file=env_file,
compose_project_directory=pathlib.Path(),
)
if docker_obj_instance.start: