FQN changed from bigquery to bigquery_gcp

This commit is contained in:
Ayush Shah 2021-11-29 16:57:39 +05:30
parent 38b02a4bfb
commit 47a97e7be0
7 changed files with 8962 additions and 6312 deletions

View File

@ -21,7 +21,7 @@ cd ../ && mvn -DskipTests clean package
cd docker/local-metadata
echo "Starting Local Docker Containers"
docker-compose down && docker-compose up --build -d
until curl -s -f -o /dev/null "http://localhost:8585/api/v1/tables/name/bigquery.shopify.fact_sale"; do
until curl -s -f -o /dev/null "http://localhost:8585/api/v1/tables/name/bigquery_gcp.shopify.fact_sale"; do
printf '.'
sleep 2
done

View File

@ -43,7 +43,7 @@ config = """
"type": "sample-usage",
"config": {
"database": "warehouse",
"service_name": "gcp_bigquery",
"service_name": "bigquery_gcp",
"sample_data_folder": "./examples/sample_data"
}
},

View File

@ -34,16 +34,19 @@ def openmetadata_airflow_lineage_example():
@task(
inlets={
"tables": [
Table(fullyQualifiedName="bigquery.shopify.raw_order"),
Table(fullyQualifiedName="bigquery.shopify.raw_customer")
Table(fullyQualifiedName="bigquery_gcp.shopify.raw_order"),
Table(fullyQualifiedName="bigquery_gcp.shopify.raw_customer"),
],
},
outlets={"tables": [Table(fullyQualifiedName="bigquery.shopify.fact_order")]},
outlets={
"tables": [Table(fullyQualifiedName="bigquery_gcp.shopify.fact_order")]
},
)
def generate_data():
""" write your query to generate ETL"""
"""write your query to generate ETL"""
pass
generate_data()
openmetadata_airflow_lineage_example_dag = openmetadata_airflow_lineage_example()

File diff suppressed because it is too large Load Diff

View File

@ -26,6 +26,6 @@ airflow users create \
--password admin
airflow db upgrade
(while ! wget -O /dev/null -o /dev/null http://ingestion:8080; do sleep 5; done; sleep 5; curl -u admin:admin --data '{"dag_run_id":"sample_data_1"}' -H "Content-type: application/json" -X POST http://ingestion:8080/api/v1/dags/sample_data/dagRuns) &
(while ! wget -O /dev/null -o /dev/null http://localhost:8585/api/v1/tables/name/bigquery.shopify.fact_sale; do sleep 5; done; sleep 6; curl -u admin:admin --data '{"dag_run_id":"sample_usage_1"}' -H "Content-type: application/json" -X POST http://ingestion:8080/api/v1/dags/sample_usage/dagRuns) &
(while ! wget -O /dev/null -o /dev/null http://localhost:8585/api/v1/tables/name/bigquery.shopify.fact_sale; do sleep 5; done; sleep 7; curl -u admin:admin --data '{"dag_run_id":"index_metadata_1"}' -H "Content-type: application/json" -X POST http://ingestion:8080/api/v1/dags/index_metadata/dagRuns) &
(while ! wget -O /dev/null -o /dev/null http://openmetadata-server:8585/api/v1/tables/name/bigquery_gcp.shopify.fact_sale; do sleep 5; done; sleep 6; curl -u admin:admin --data '{"dag_run_id":"sample_usage_1"}' -H "Content-type: application/json" -X POST http://ingestion:8080/api/v1/dags/sample_usage/dagRuns) &
(while ! wget -O /dev/null -o /dev/null http://openmetadata-server:8585/api/v1/tables/name/bigquery_gcp.shopify.fact_sale; do sleep 5; done; sleep 7; curl -u admin:admin --data '{"dag_run_id":"index_metadata_1"}' -H "Content-type: application/json" -X POST http://ingestion:8080/api/v1/dags/index_metadata/dagRuns) &
airflow standalone

View File

@ -3,7 +3,7 @@
"type": "sample-usage",
"config": {
"database": "warehouse",
"service_name": "gcp_bigquery",
"service_name": "bigquery_gcp",
"sample_data_folder": "./examples/sample_data"
}
},

View File

@ -223,7 +223,7 @@ def docker(start, stop, clean, type, path) -> None:
ometa_client = OpenMetadata(metadata_config).client
while True:
try:
ometa_client.get(f"/tables/name/bigquery.shopify.dim_customer")
ometa_client.get(f"/tables/name/bigquery_gcp.shopify.dim_customer")
break
except Exception as err:
sys.stdout.write(".")