# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import pathlib from datetime import timedelta from airflow_provider_openmetadata import DAG try: from airflow_provider_openmetadata.operators.python import PythonOperator except ModuleNotFoundError: from airflow_provider_openmetadata.operators.python_operator import PythonOperator from metadata.config.common import load_config_file from metadata.ingestion.api.workflow import Workflow from airflow_provider_openmetadata.utils.dates import days_ago default_args = { "owner": "user_name", "email": ["username@org.com"], "email_on_failure": False, "retries": 3, "retry_delay": timedelta(minutes=5), "execution_timeout": timedelta(minutes=60) } def metadata_ingestion_workflow(): config_file = pathlib.Path("/tmp/sample_data.json") workflow_config = load_config_file(config_file) workflow = Workflow.create(workflow_config) workflow.execute() workflow.raise_from_status() workflow.print_status() workflow.stop() with DAG( "sample_metadata_ingestion_workflow", default_args=default_args, description="An example DAG which runs a OpenMetadata ingestion workflow", schedule_interval=timedelta(days=1), start_date=days_ago(1), catchup=False, ) as dag: ingest_task = PythonOperator( task_id="ingest_using_recipe", python_callable=metadata_ingestion_workflow, )