Added default volumes (#7741)

* Added default volumes

* Ran make py_format for formatting

* Added sample data in mount directories

* Updated as per the comments

* Done changes for --ingest_sample_data flag

Co-authored-by: “Vijay” <“vijay.l@deuexsolutions.com”>
This commit is contained in:
Vj-L 2022-09-29 14:53:55 +05:30 committed by GitHub
parent 13e507b97f
commit 6cfd16925a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 124 additions and 41 deletions

View File

@ -10,11 +10,6 @@
# limitations under the License.
version: "3.9"
volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
services:
postgresql:
build:
@ -29,6 +24,8 @@ services:
POSTGRES_PASSWORD: password
expose:
- 5432
volumes:
- $PWD/docker-volume/db-data:/var/lib/postgresql
ports:
- "5432:5432"
networks:
@ -128,6 +125,8 @@ services:
expose:
- 8585
- 8586
volumes:
- $PWD/docker-volume/om-server:/tmp
ports:
- "8585:8585"
- "8586:8586"
@ -179,9 +178,9 @@ services:
networks:
- local_app_net
volumes:
- ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- ingestion-volume-dags:/opt/airflow/dags
- ingestion-volume-tmp:/tmp
- $PWD/docker-volume/ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- $PWD/docker-volume/ingestion-volume-dags:/opt/airflow/dags
- $PWD/docker-volume/ingestion-volume-tmp:/tmp
networks:
local_app_net:

View File

@ -10,11 +10,6 @@
# limitations under the License.
version: "3.9"
volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
services:
mysql:
build:
@ -28,6 +23,8 @@ services:
MYSQL_ROOT_PASSWORD: password
expose:
- 3306
volumes:
- $PWD/docker-volume/db-data:/var/lib/mysql
ports:
- "3306:3306"
networks:
@ -126,6 +123,8 @@ services:
expose:
- 8585
- 8586
volumes:
- $PWD/docker-volume/om-server:/tmp
ports:
- "8585:8585"
- "8586:8586"
@ -177,9 +176,9 @@ services:
networks:
- local_app_net
volumes:
- ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- ingestion-volume-dags:/opt/airflow/dags
- ingestion-volume-tmp:/tmp
- $PWD/docker-volume/ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- $PWD/docker-volume/ingestion-volume-dags:/opt/airflow/dags
- $PWD/docker-volume/ingestion-volume-tmp:/tmp
networks:
local_app_net:

View File

@ -10,11 +10,6 @@
# limitations under the License.
version: "3.9"
volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
services:
postgresql:
container_name: openmetadata_postgresql
@ -25,6 +20,8 @@ services:
POSTGRES_PASSWORD: password
expose:
- 5432
volumes:
- $PWD/docker-volume/db-data:/var/lib/postgresql
ports:
- "5432:5432"
networks:
@ -118,6 +115,8 @@ services:
expose:
- 8585
- 8586
volumes:
- $PWD/docker-volume/om-server:/tmp
ports:
- "8585:8585"
- "8586:8586"
@ -165,9 +164,9 @@ services:
networks:
- app_net
volumes:
- ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- ingestion-volume-dags:/opt/airflow/dags
- ingestion-volume-tmp:/tmp
- $PWD/docker-volume/ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- $PWD/docker-volume/ingestion-volume-dags:/opt/airflow/dags
- $PWD/docker-volume/ingestion-volume-tmp:/tmp
networks:
app_net:

View File

@ -10,11 +10,6 @@
# limitations under the License.
version: "3.9"
volumes:
ingestion-volume-dag-airflow:
ingestion-volume-dags:
ingestion-volume-tmp:
services:
mysql:
container_name: openmetadata_mysql
@ -24,6 +19,8 @@ services:
MYSQL_ROOT_PASSWORD: password
expose:
- 3306
volumes:
- $PWD/docker-volume/db-data:/var/lib/mysql
networks:
- app_net
healthcheck:
@ -118,6 +115,8 @@ services:
- 9200
- 9300
- 3306
volumes:
- $PWD/docker-volume/om-server:/tmp
ports:
- "8585:8585"
- "8586:8586"
@ -165,9 +164,10 @@ services:
networks:
- app_net
volumes:
- ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- ingestion-volume-dags:/opt/airflow/dags
- ingestion-volume-tmp:/tmp
- $PWD/docker-volume/ingestion-volume-dag-airflow:/opt/airflow/dag_generated_configs
- $PWD/docker-volume/ingestion-volume-dags:/opt/airflow/dags
- $PWD/docker-volume/ingestion-volume-tmp:/tmp
networks:
app_net:

View File

@ -55,20 +55,30 @@ else
echo "Skipping Maven Build"
fi
cd docker/local-metadata || exit
#cd docker/local-metadata || exit
echo "Stopping any previous Local Docker Containers"
docker compose -f docker-compose-postgres.yml down
docker compose down
docker compose -f docker/local-metadata/docker-compose-postgres.yml down
docker compose -f docker/local-metadata/docker-compose.yml down
if [ -d "/docker-volume" ]
then
echo "Do you want to delete the mounted directories from host?"
echo "Please enter [y/N]"
read input
if [[ $input == "Y" || $input == "y" ]]; then
rm -rf $PWD/docker-volume
fi
fi
echo "Starting Local Docker Containers"
mkdir docker-volume && mkdir docker-volume/db-data && mkdir docker-volume/ingestion-volume-dag-airflow && mkdir docker-volume/ingestion-volume-dags && mkdir docker-volume/ingestion-volume-tmp && mkdir docker-volume/om-server
cp -r ingestion/examples/airflow/dags/ docker-volume/ingestion-volume-dags/
echo "Using ingestion dependency: ${INGESTION_DEPENDENCY:-all}"
if [[ $database == "postgresql" ]]; then
docker compose -f docker-compose-postgres.yml build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose -f docker-compose-postgres.yml up -d
docker compose -f docker/local-metadata/docker-compose-postgres.yml build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose -f docker/local-metadata/docker-compose-postgres.yml up -d
else
docker compose build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose up --build -d
docker compose -f docker/local-metadata/docker-compose.yml build --build-arg INGESTION_DEPENDENCY="${INGESTION_DEPENDENCY:-all}" && docker compose -f docker/local-metadata/docker-compose.yml up --build -d
fi
until curl -s -f "http://localhost:9200/_cat/indices/team_search_index"; do

View File

@ -12,7 +12,9 @@
Docker functions for CLI
"""
import json
import os
import pathlib
import shutil
import sys
import tempfile
import time
@ -27,6 +29,9 @@ from metadata.generated.schema.entity.data.table import Table
from metadata.generated.schema.entity.services.connections.metadata.openMetadataConnection import (
OpenMetadataConnection,
)
from metadata.generated.schema.security.client.openMetadataJWTClientConfig import (
OpenMetadataJWTClientConfig,
)
from metadata.ingestion.ometa.client import REST, ClientConfig
from metadata.ingestion.ometa.ometa_api import OpenMetadata
from metadata.utils.client_version import get_client_version
@ -35,19 +40,81 @@ from metadata.utils.logger import cli_logger, ometa_logger
logger = cli_logger()
calc_gb = 1024 * 1024 * 1024
min_memory_limit = 6 * calc_gb
MAIN_DIR = "docker-volume"
AIRFLOW_DAGS_FILE = "ingestion/examples/airflow/dags"
DAG_VOLUME_BASE_PATH = "docker-volume/ingestion-volume-dags/"
MAIN_DIR = "docker-volume"
RELEASE_BRANCH_VERSION = get_client_version()
DOCKER_URL_ROOT = f"https://raw.githubusercontent.com/open-metadata/OpenMetadata/{RELEASE_BRANCH_VERSION}/docker/metadata/"
BASE_PATH = f"https://raw.githubusercontent.com/open-metadata/OpenMetadata/{RELEASE_BRANCH_VERSION}"
DOCKER_URL_ROOT = f"{BASE_PATH}/docker/metadata/"
DEFAULT_COMPOSE_FILE = "docker-compose.yml"
BACKEND_DATABASES = {
"mysql": DEFAULT_COMPOSE_FILE,
"postgres": "docker-compose-postgres.yml",
}
DEFUALT_JWT_TOKEN = "eyJraWQiOiJHYjM4OWEtOWY3Ni1nZGpzLWE5MmotMDI0MmJrOTQzNTYiLCJ0eXAiOiJKV1QiLCJhbGciOiJSUzI1NiJ9.eyJzdWIiOiJhZG1pbiIsImlzQm90IjpmYWxzZSwiaXNzIjoib3Blbi1tZXRhZGF0YS5vcmciLCJpYXQiOjE2NjM5Mzg0NjIsImVtYWlsIjoiYWRtaW5Ab3Blbm1ldGFkYXRhLm9yZyJ9.tS8um_5DKu7HgzGBzS1VTA5uUjKWOCU0B_j08WXBiEC0mr0zNREkqVfwFDD-d24HlNEbrqioLsBuFRiwIWKc1m_ZlVQbG7P36RUxhuv2vbSp80FKyNM-Tj93FDzq91jsyNmsQhyNv_fNr3TXfzzSPjHt8Go0FMMP66weoKMgW2PbXlhVKwEuXUHyakLLzewm9UMeQaEiRzhiTMU3UkLXcKbYEJJvfNFcLwSl9W8JCO_l0Yj3ud-qt_nQYEZwqW6u5nfdQllN133iikV4fM5QZsMCnm8Rq1mvLR0y9bmJiD7fwM1tmJ791TUWqmKaTnP49U493VanKpUAfzIiOiIbhg"
def docker_volume():
# create a main directory
if not os.path.exists(MAIN_DIR):
os.mkdir(MAIN_DIR)
db = "db-data"
dag_airflow = "ingestion-volume-dag-airflow"
dags = "ingestion-volume-dags"
tmp = "ingestion-volume-tmp"
om_server = "om-server"
path_to_join = [db, dag_airflow, dags, tmp, om_server]
final_path = []
for path in path_to_join:
temp_path = os.path.join(MAIN_DIR, path)
final_path.append(temp_path)
for path in final_path:
os.makedirs(path, exist_ok=True)
def download_and_save_dag(filename: str):
file = requests.get(f"{BASE_PATH}/{AIRFLOW_DAGS_FILE}/{filename}", stream=True)
with open(os.path.join(DAG_VOLUME_BASE_PATH, filename), "wb") as f:
for chunk in file.iter_content():
f.write(chunk)
def copy_dag_to_volumes():
dag_files = [
"airflow_lineage_example.py",
"airflow_metadata_extraction.py",
"airflow_metadata_to_es.py",
"airflow_sample_data.py",
"airflow_sample_usage.py",
]
for dag in dag_files:
download_and_save_dag(dag)
def docker_volume():
# create a main directory
if not os.path.exists(MAIN_DIR):
os.mkdir(MAIN_DIR)
db = "db-data"
dag_airflow = "ingestion-volume-dag-airflow"
dags = "ingestion-volume-dags"
tmp = "ingestion-volume-tmp"
om_server = "om-server"
path_to_join = [db, dag_airflow, dags, tmp, om_server]
final_path = []
for path in path_to_join:
temp_path = os.path.join(MAIN_DIR, path)
final_path.append(temp_path)
for path in final_path:
os.makedirs(path, exist_ok=True)
def start_docker(docker, start_time, file_path, ingest_sample_data: bool):
logger.info("Creating the docker volumes ..")
docker_volume()
logger.info("Running docker compose for OpenMetadata..")
click.secho("It may take some time on the first run", fg="bright_yellow")
if file_path:
@ -58,10 +125,15 @@ def start_docker(docker, start_time, file_path, ingest_sample_data: bool):
logger.info("Ran docker compose for OpenMetadata successfully.")
if ingest_sample_data:
logger.info("Waiting for ingestion to complete..")
wait_for_containers(docker)
# shutil.copytree("ingestion/examples/airflow/dags", "docker-volume/ingestion-volume-dags/", dirs_exist_ok=True)
copy_dag_to_volumes()
run_sample_data()
metadata_config = OpenMetadataConnection(
hostPort="http://localhost:8585/api", authProvider="no-auth"
hostPort="http://localhost:8585/api",
authProvider="openmetadata",
securityConfig=OpenMetadataJWTClientConfig(jwtToken=DEFUALT_JWT_TOKEN),
)
ometa_logger().disabled = True
ometa_client = OpenMetadata(metadata_config)
@ -209,6 +281,10 @@ def run_docker(
logger.info(
"Stopping docker compose for OpenMetadata and removing images, networks, volumes..."
)
logger.info("Do you want to Delete the docker mounted volumes from host")
user_response = click.prompt("Please enter [y/N]", type=str)
if user_response == "y":
shutil.rmtree(MAIN_DIR)
docker.compose.down(remove_orphans=True, remove_images="all", volumes=True)
logger.info(
"Stopped docker compose for OpenMetadata and removing images, networks, volumes."