mirror of
https://github.com/datahub-project/datahub.git
synced 2025-09-16 20:53:18 +00:00
fix(cli): remove duplicate labels from quickstart files (#7886)
This commit is contained in:
parent
29e5cfd643
commit
8f8c11081d
@ -48,9 +48,7 @@ services:
|
|||||||
- DATAHUB_SECRET=YouKnowNothing
|
- DATAHUB_SECRET=YouKnowNothing
|
||||||
- DATAHUB_APP_VERSION=1.0
|
- DATAHUB_APP_VERSION=1.0
|
||||||
- DATAHUB_PLAY_MEM_BUFFER_SIZE=10MB
|
- DATAHUB_PLAY_MEM_BUFFER_SIZE=10MB
|
||||||
- JAVA_OPTS=-Xms512m -Xmx512m -Dhttp.port=9002 -Dconfig.file=datahub-frontend/conf/application.conf
|
- JAVA_OPTS=-Xms512m -Xmx512m -Dhttp.port=9002 -Dconfig.file=datahub-frontend/conf/application.conf -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf -Dlogback.configurationFile=datahub-frontend/conf/logback.xml -Dlogback.debug=false -Dpidfile.path=/dev/null
|
||||||
-Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf -Dlogback.configurationFile=datahub-frontend/conf/logback.xml
|
|
||||||
-Dlogback.debug=false -Dpidfile.path=/dev/null
|
|
||||||
- KAFKA_BOOTSTRAP_SERVER=broker:29092
|
- KAFKA_BOOTSTRAP_SERVER=broker:29092
|
||||||
- DATAHUB_TRACKING_TOPIC=DataHubUsageEvent_v1
|
- DATAHUB_TRACKING_TOPIC=DataHubUsageEvent_v1
|
||||||
- ELASTIC_CLIENT_HOST=elasticsearch
|
- ELASTIC_CLIENT_HOST=elasticsearch
|
||||||
@ -140,8 +138,7 @@ services:
|
|||||||
start_period: 2m
|
start_period: 2m
|
||||||
test:
|
test:
|
||||||
- CMD-SHELL
|
- CMD-SHELL
|
||||||
- curl -sS --fail 'http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=0s'
|
- curl -sS --fail 'http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=0s' || exit 1
|
||||||
|| exit 1
|
|
||||||
hostname: elasticsearch
|
hostname: elasticsearch
|
||||||
image: elasticsearch:7.10.1
|
image: elasticsearch:7.10.1
|
||||||
mem_limit: 1g
|
mem_limit: 1g
|
||||||
|
@ -46,9 +46,7 @@ services:
|
|||||||
- DATAHUB_SECRET=YouKnowNothing
|
- DATAHUB_SECRET=YouKnowNothing
|
||||||
- DATAHUB_APP_VERSION=1.0
|
- DATAHUB_APP_VERSION=1.0
|
||||||
- DATAHUB_PLAY_MEM_BUFFER_SIZE=10MB
|
- DATAHUB_PLAY_MEM_BUFFER_SIZE=10MB
|
||||||
- JAVA_OPTS=-Xms512m -Xmx512m -Dhttp.port=9002 -Dconfig.file=datahub-frontend/conf/application.conf
|
- JAVA_OPTS=-Xms512m -Xmx512m -Dhttp.port=9002 -Dconfig.file=datahub-frontend/conf/application.conf -Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf -Dlogback.configurationFile=datahub-frontend/conf/logback.xml -Dlogback.debug=false -Dpidfile.path=/dev/null
|
||||||
-Djava.security.auth.login.config=datahub-frontend/conf/jaas.conf -Dlogback.configurationFile=datahub-frontend/conf/logback.xml
|
|
||||||
-Dlogback.debug=false -Dpidfile.path=/dev/null
|
|
||||||
- KAFKA_BOOTSTRAP_SERVER=broker:29092
|
- KAFKA_BOOTSTRAP_SERVER=broker:29092
|
||||||
- DATAHUB_TRACKING_TOPIC=DataHubUsageEvent_v1
|
- DATAHUB_TRACKING_TOPIC=DataHubUsageEvent_v1
|
||||||
- ELASTIC_CLIENT_HOST=elasticsearch
|
- ELASTIC_CLIENT_HOST=elasticsearch
|
||||||
@ -95,8 +93,6 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- ${HOME}/.datahub/plugins:/etc/datahub/plugins
|
- ${HOME}/.datahub/plugins:/etc/datahub/plugins
|
||||||
datahub-upgrade:
|
datahub-upgrade:
|
||||||
labels:
|
|
||||||
datahub_setup_job: true
|
|
||||||
command:
|
command:
|
||||||
- -u
|
- -u
|
||||||
- SystemUpdate
|
- SystemUpdate
|
||||||
@ -133,8 +129,7 @@ services:
|
|||||||
start_period: 2m
|
start_period: 2m
|
||||||
test:
|
test:
|
||||||
- CMD-SHELL
|
- CMD-SHELL
|
||||||
- curl -sS --fail 'http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=0s'
|
- curl -sS --fail 'http://localhost:9200/_cluster/health?wait_for_status=yellow&timeout=0s' || exit 1
|
||||||
|| exit 1
|
|
||||||
hostname: elasticsearch
|
hostname: elasticsearch
|
||||||
image: elasticsearch:7.10.1
|
image: elasticsearch:7.10.1
|
||||||
mem_limit: 1g
|
mem_limit: 1g
|
||||||
@ -143,8 +138,6 @@ services:
|
|||||||
volumes:
|
volumes:
|
||||||
- esdata:/usr/share/elasticsearch/data
|
- esdata:/usr/share/elasticsearch/data
|
||||||
elasticsearch-setup:
|
elasticsearch-setup:
|
||||||
labels:
|
|
||||||
datahub_setup_job: true
|
|
||||||
container_name: elasticsearch-setup
|
container_name: elasticsearch-setup
|
||||||
depends_on:
|
depends_on:
|
||||||
- elasticsearch
|
- elasticsearch
|
||||||
@ -157,8 +150,6 @@ services:
|
|||||||
labels:
|
labels:
|
||||||
datahub_setup_job: true
|
datahub_setup_job: true
|
||||||
kafka-setup:
|
kafka-setup:
|
||||||
labels:
|
|
||||||
datahub_setup_job: true
|
|
||||||
container_name: kafka-setup
|
container_name: kafka-setup
|
||||||
depends_on:
|
depends_on:
|
||||||
- broker
|
- broker
|
||||||
@ -187,8 +178,6 @@ services:
|
|||||||
- ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
- mysqldata:/var/lib/mysql
|
- mysqldata:/var/lib/mysql
|
||||||
mysql-setup:
|
mysql-setup:
|
||||||
labels:
|
|
||||||
datahub_setup_job: true
|
|
||||||
container_name: mysql-setup
|
container_name: mysql-setup
|
||||||
depends_on:
|
depends_on:
|
||||||
- mysql
|
- mysql
|
||||||
|
@ -178,8 +178,6 @@ services:
|
|||||||
- ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql
|
- ../mysql/init.sql:/docker-entrypoint-initdb.d/init.sql
|
||||||
- mysqldata:/var/lib/mysql
|
- mysqldata:/var/lib/mysql
|
||||||
mysql-setup:
|
mysql-setup:
|
||||||
labels:
|
|
||||||
datahub_setup_job: true
|
|
||||||
container_name: mysql-setup
|
container_name: mysql-setup
|
||||||
depends_on:
|
depends_on:
|
||||||
- mysql
|
- mysql
|
||||||
@ -191,6 +189,8 @@ services:
|
|||||||
- DATAHUB_DB_NAME=datahub
|
- DATAHUB_DB_NAME=datahub
|
||||||
hostname: mysql-setup
|
hostname: mysql-setup
|
||||||
image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head}
|
image: ${DATAHUB_MYSQL_SETUP_IMAGE:-acryldata/datahub-mysql-setup}:${DATAHUB_VERSION:-head}
|
||||||
|
labels:
|
||||||
|
datahub_setup_job: true
|
||||||
schema-registry:
|
schema-registry:
|
||||||
container_name: schema-registry
|
container_name: schema-registry
|
||||||
depends_on:
|
depends_on:
|
||||||
@ -214,7 +214,7 @@ services:
|
|||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/lib/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
version: "2.3"
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
esdata: null
|
esdata: null
|
||||||
mysqldata: null
|
mysqldata: null
|
||||||
|
@ -8,7 +8,7 @@ services:
|
|||||||
- OTEL_METRICS_EXPORTER=none
|
- OTEL_METRICS_EXPORTER=none
|
||||||
- OTEL_SERVICE_NAME=datahub-gms
|
- OTEL_SERVICE_NAME=datahub-gms
|
||||||
ports:
|
ports:
|
||||||
- "4318"
|
- '4318'
|
||||||
datahub-gms:
|
datahub-gms:
|
||||||
environment:
|
environment:
|
||||||
- ENABLE_PROMETHEUS=true
|
- ENABLE_PROMETHEUS=true
|
||||||
@ -18,7 +18,7 @@ services:
|
|||||||
- OTEL_METRICS_EXPORTER=none
|
- OTEL_METRICS_EXPORTER=none
|
||||||
- OTEL_SERVICE_NAME=datahub-gms
|
- OTEL_SERVICE_NAME=datahub-gms
|
||||||
ports:
|
ports:
|
||||||
- "4318"
|
- '4318'
|
||||||
grafana:
|
grafana:
|
||||||
depends_on:
|
depends_on:
|
||||||
- prometheus
|
- prometheus
|
||||||
@ -33,8 +33,8 @@ services:
|
|||||||
image: jaegertracing/all-in-one:latest
|
image: jaegertracing/all-in-one:latest
|
||||||
ports:
|
ports:
|
||||||
- 16686:16686
|
- 16686:16686
|
||||||
- "14268"
|
- '14268'
|
||||||
- "14250"
|
- '14250'
|
||||||
prometheus:
|
prometheus:
|
||||||
container_name: prometheus
|
container_name: prometheus
|
||||||
image: prom/prometheus:latest
|
image: prom/prometheus:latest
|
||||||
@ -42,6 +42,6 @@ services:
|
|||||||
- 9089:9090
|
- 9089:9090
|
||||||
volumes:
|
volumes:
|
||||||
- ../monitoring/prometheus.yaml:/etc/prometheus/prometheus.yml
|
- ../monitoring/prometheus.yaml:/etc/prometheus/prometheus.yml
|
||||||
version: "2.3"
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
grafana-storage: null
|
grafana-storage: null
|
||||||
|
@ -236,7 +236,7 @@ services:
|
|||||||
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
- ${DATAHUB_MAPPED_ZK_PORT:-2181}:2181
|
||||||
volumes:
|
volumes:
|
||||||
- zkdata:/var/lib/zookeeper
|
- zkdata:/var/lib/zookeeper
|
||||||
version: "2.3"
|
version: '2.3'
|
||||||
volumes:
|
volumes:
|
||||||
broker: null
|
broker: null
|
||||||
esdata: null
|
esdata: null
|
||||||
|
@ -1,8 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# this scripts checks if docker-compose$flavour.quickstart.yml is up to date for these 'flavours':
|
|
||||||
FLAVOURS=("" "-without-neo4j" ".monitoring")
|
|
||||||
|
|
||||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
||||||
cd "$DIR"
|
cd "$DIR"
|
||||||
|
|
||||||
@ -12,21 +9,4 @@ python3 -m venv venv
|
|||||||
source venv/bin/activate
|
source venv/bin/activate
|
||||||
|
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
python generate_docker_quickstart.py ../docker-compose.yml ../docker-compose.override.yml temp.quickstart.yml
|
python generate_docker_quickstart.py check-all
|
||||||
python generate_docker_quickstart.py ../docker-compose-without-neo4j.yml ../docker-compose-without-neo4j.override.yml temp-without-neo4j.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.yml ../docker-compose.override.yml ../docker-compose.m1.yml temp-m1.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose-without-neo4j.yml ../docker-compose-without-neo4j.override.yml ../docker-compose-without-neo4j.m1.yml temp-without-neo4j-m1.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../monitoring/docker-compose.monitoring.yml temp.monitoring.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.consumers.yml temp.consumers.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.consumers-without-neo4j.yml temp.consumers-without-neo4j.quickstart.yml
|
|
||||||
|
|
||||||
for flavour in "${FLAVOURS[@]}"
|
|
||||||
do
|
|
||||||
|
|
||||||
if cmp <(yq -i -P 'sort_keys(..)' docker-compose$flavour.quickstart.yml) <(yq -i -P 'sort_keys(..)' temp$flavour.quickstart.yml); then
|
|
||||||
echo "docker-compose$flavour.quickstart.yml is up to date."
|
|
||||||
else
|
|
||||||
echo "docker-compose$flavour.quickstart.yml is out of date."
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
done
|
|
||||||
|
@ -1,4 +1,8 @@
|
|||||||
|
from io import StringIO
|
||||||
|
from typing import List
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
from collections.abc import Mapping
|
from collections.abc import Mapping
|
||||||
|
|
||||||
import click
|
import click
|
||||||
@ -6,8 +10,38 @@ import yaml
|
|||||||
from dotenv import dotenv_values
|
from dotenv import dotenv_values
|
||||||
from yaml import Loader
|
from yaml import Loader
|
||||||
|
|
||||||
# Generates a merged docker-compose file with env variables inlined.
|
COMPOSE_SPECS = {
|
||||||
# Usage: python3 docker_compose_cli_gen.py ../docker-compose.yml ../docker-compose.override.yml ../docker-compose-gen.yml
|
"docker-compose.quickstart.yml": [
|
||||||
|
"../docker-compose.yml",
|
||||||
|
"../docker-compose.override.yml",
|
||||||
|
],
|
||||||
|
"docker-compose-m1.quickstart.yml": [
|
||||||
|
"../docker-compose.yml",
|
||||||
|
"../docker-compose.override.yml",
|
||||||
|
"../docker-compose.m1.yml",
|
||||||
|
],
|
||||||
|
"docker-compose-without-neo4j.quickstart.yml": [
|
||||||
|
"../docker-compose-without-neo4j.yml",
|
||||||
|
"../docker-compose-without-neo4j.override.yml",
|
||||||
|
],
|
||||||
|
"docker-compose-without-neo4j-m1.quickstart.yml": [
|
||||||
|
"../docker-compose-without-neo4j.yml",
|
||||||
|
"../docker-compose-without-neo4j.override.yml",
|
||||||
|
"../docker-compose-without-neo4j.m1.yml",
|
||||||
|
],
|
||||||
|
"docker-compose.monitoring.quickstart.yml": [
|
||||||
|
"../monitoring/docker-compose.monitoring.yml",
|
||||||
|
],
|
||||||
|
"docker-compose.consumers.quickstart.yml": [
|
||||||
|
"../docker-compose.consumers.yml",
|
||||||
|
],
|
||||||
|
"docker-compose.consumers-without-neo4j.quickstart.yml": [
|
||||||
|
"../docker-compose.consumers-without-neo4j.yml",
|
||||||
|
],
|
||||||
|
"docker-compose.kafka-setup.quickstart.yml": [
|
||||||
|
"../docker-compose.kafka-setup.yml",
|
||||||
|
],
|
||||||
|
}
|
||||||
|
|
||||||
omitted_services = [
|
omitted_services = [
|
||||||
"kafka-rest-proxy",
|
"kafka-rest-proxy",
|
||||||
@ -34,6 +68,7 @@ def dict_merge(dct, merge_dct):
|
|||||||
else:
|
else:
|
||||||
dct[k] = merge_dct[k]
|
dct[k] = merge_dct[k]
|
||||||
|
|
||||||
|
|
||||||
def modify_docker_config(base_path, docker_yaml_config):
|
def modify_docker_config(base_path, docker_yaml_config):
|
||||||
if not docker_yaml_config["services"]:
|
if not docker_yaml_config["services"]:
|
||||||
docker_yaml_config["services"] = {}
|
docker_yaml_config["services"] = {}
|
||||||
@ -91,17 +126,35 @@ def modify_docker_config(base_path, docker_yaml_config):
|
|||||||
docker_yaml_config["version"] = "2.3"
|
docker_yaml_config["version"] = "2.3"
|
||||||
|
|
||||||
|
|
||||||
@click.command()
|
def dedup_env_vars(merged_docker_config):
|
||||||
@click.argument(
|
for service in merged_docker_config["services"]:
|
||||||
"compose-files",
|
if "environment" in merged_docker_config["services"][service]:
|
||||||
nargs=-1,
|
lst = merged_docker_config["services"][service]["environment"]
|
||||||
type=click.Path(
|
if lst is not None:
|
||||||
exists=True,
|
# use a set to cache duplicates
|
||||||
dir_okay=False,
|
caches = set()
|
||||||
),
|
results = {}
|
||||||
)
|
for item in lst:
|
||||||
@click.argument("output-file", type=click.Path())
|
partitions = item.rpartition("=")
|
||||||
def generate(compose_files, output_file) -> None:
|
prefix = partitions[0]
|
||||||
|
suffix = partitions[1]
|
||||||
|
# check whether prefix already exists
|
||||||
|
if prefix not in caches and suffix != "":
|
||||||
|
results[prefix] = item
|
||||||
|
caches.add(prefix)
|
||||||
|
if set(lst) != set([v for k, v in results.items()]):
|
||||||
|
sorted_vars = sorted([k for k in results])
|
||||||
|
merged_docker_config["services"][service]["environment"] = [
|
||||||
|
results[var] for var in sorted_vars
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def merge_files(compose_files: List[str]) -> str:
|
||||||
|
"""
|
||||||
|
Generates a merged docker-compose file with env variables inlined.
|
||||||
|
|
||||||
|
Example Usage: python3 generate_docker_quickstart.py generate-one ../docker-compose.yml ../docker-compose.override.yml ../docker-compose-gen.yml
|
||||||
|
"""
|
||||||
|
|
||||||
# Resolve .env files to inlined vars
|
# Resolve .env files to inlined vars
|
||||||
modified_files = []
|
modified_files = []
|
||||||
@ -121,40 +174,77 @@ def generate(compose_files, output_file) -> None:
|
|||||||
# Dedup env vars, last wins
|
# Dedup env vars, last wins
|
||||||
dedup_env_vars(merged_docker_config)
|
dedup_env_vars(merged_docker_config)
|
||||||
|
|
||||||
# Write output file
|
# Generate yaml to string.
|
||||||
output_dir = os.path.dirname(output_file)
|
out = StringIO()
|
||||||
if len(output_dir) and not os.path.exists(output_dir):
|
|
||||||
os.makedirs(output_dir)
|
|
||||||
with open(output_file, "w") as new_conf_file:
|
|
||||||
yaml.dump(
|
yaml.dump(
|
||||||
merged_docker_config,
|
merged_docker_config,
|
||||||
new_conf_file,
|
out,
|
||||||
default_flow_style=False,
|
default_flow_style=False,
|
||||||
|
width=1000,
|
||||||
)
|
)
|
||||||
|
return out.getvalue()
|
||||||
|
|
||||||
|
|
||||||
|
@click.group()
|
||||||
|
def main_cmd() -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@main_cmd.command()
|
||||||
|
@click.argument(
|
||||||
|
"compose-files",
|
||||||
|
nargs=-1,
|
||||||
|
type=click.Path(
|
||||||
|
exists=True,
|
||||||
|
dir_okay=False,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
@click.argument("output-file", type=click.Path())
|
||||||
|
def generate_one(compose_files, output_file) -> None:
|
||||||
|
"""
|
||||||
|
Generates a merged docker-compose file with env variables inlined.
|
||||||
|
|
||||||
|
Example Usage: python3 generate_docker_quickstart.py generate-one ../docker-compose.yml ../docker-compose.override.yml ../docker-compose-gen.yml
|
||||||
|
"""
|
||||||
|
|
||||||
|
merged_contents = merge_files(compose_files)
|
||||||
|
|
||||||
|
# Write output file
|
||||||
|
pathlib.Path(output_file).parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
pathlib.Path(output_file).write_text(merged_contents)
|
||||||
|
|
||||||
print(f"Successfully generated {output_file}.")
|
print(f"Successfully generated {output_file}.")
|
||||||
|
|
||||||
|
|
||||||
def dedup_env_vars(merged_docker_config):
|
@main_cmd.command()
|
||||||
for service in merged_docker_config['services']:
|
@click.pass_context
|
||||||
if 'environment' in merged_docker_config['services'][service]:
|
def generate_all(ctx: click.Context) -> None:
|
||||||
lst = merged_docker_config['services'][service]['environment']
|
"""
|
||||||
if lst is not None:
|
Generates all merged docker-compose files with env variables inlined.
|
||||||
# use a set to cache duplicates
|
"""
|
||||||
caches = set()
|
|
||||||
results = {}
|
for output_compose_file, inputs in COMPOSE_SPECS.items():
|
||||||
for item in lst:
|
ctx.invoke(generate_one, compose_files=inputs, output_file=output_compose_file)
|
||||||
partitions = item.rpartition('=')
|
|
||||||
prefix = partitions[0]
|
|
||||||
suffix = partitions[1]
|
@main_cmd.command()
|
||||||
# check whether prefix already exists
|
def check_all() -> None:
|
||||||
if prefix not in caches and suffix != "":
|
"""
|
||||||
results[prefix] = item
|
Checks that the generated docker-compose files are up to date.
|
||||||
caches.add(prefix)
|
"""
|
||||||
if set(lst) != set([v for k,v in results.items()]):
|
|
||||||
sorted_vars = sorted([k for k in results])
|
for output_compose_file, inputs in COMPOSE_SPECS.items():
|
||||||
merged_docker_config['services'][service]['environment'] = [results[var] for var in sorted_vars]
|
expected = merge_files(inputs)
|
||||||
|
|
||||||
|
# Check that the files match.
|
||||||
|
current = pathlib.Path(output_compose_file).read_text()
|
||||||
|
|
||||||
|
if expected != current:
|
||||||
|
print(
|
||||||
|
f"File {output_compose_file} is out of date. Please run `python3 generate_docker_quickstart.py generate-all`."
|
||||||
|
)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
generate()
|
main_cmd()
|
||||||
|
@ -9,11 +9,4 @@ python3 -m venv venv
|
|||||||
source venv/bin/activate
|
source venv/bin/activate
|
||||||
|
|
||||||
pip install -r requirements.txt
|
pip install -r requirements.txt
|
||||||
python generate_docker_quickstart.py ../docker-compose.yml ../docker-compose.override.yml docker-compose.quickstart.yml
|
python generate_docker_quickstart.py generate-all
|
||||||
python generate_docker_quickstart.py ../docker-compose-without-neo4j.yml ../docker-compose-without-neo4j.override.yml docker-compose-without-neo4j.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.yml ../docker-compose.override.yml ../docker-compose.m1.yml docker-compose-m1.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose-without-neo4j.yml ../docker-compose-without-neo4j.override.yml ../docker-compose-without-neo4j.m1.yml docker-compose-without-neo4j-m1.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../monitoring/docker-compose.monitoring.yml docker-compose.monitoring.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.consumers.yml docker-compose.consumers.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.consumers-without-neo4j.yml docker-compose.consumers-without-neo4j.quickstart.yml
|
|
||||||
python generate_docker_quickstart.py ../docker-compose.kafka-setup.yml docker-compose.kafka-setup.quickstart.yml
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user