# Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import pathlib from datetime import timedelta from airflow import DAG try: from airflow.operators.python import PythonOperator except ModuleNotFoundError: from airflow.operators.python_operator import PythonOperator from airflow.utils.dates import days_ago from metadata.config.common import load_config_file from metadata.ingestion.api.workflow import Workflow default_args = { "owner": "user_name", "retries": 3, "retry_delay": timedelta(minutes=2), "execution_timeout": timedelta(minutes=60), "schedule_interval": "*/5 * * * *", } config = """ { "source": { "type": "metadata", "config": { "include_tables": "true", "include_topics": "true", "include_dashboards": "true", "limit_records": 10 } }, "sink": { "type": "elasticsearch", "config": { "index_tables": "true", "index_topics": "true", "index_dashboards": "true", "es_host": "localhost", "es_port": 9200 } }, "metadata_server": { "type": "metadata-server", "config": { "api_endpoint": "http://localhost:8585/api", "auth_provider_type": "no-auth" } } } """ def metadata_ingestion_workflow(): workflow_config = json.loads(config) workflow = Workflow.create(workflow_config) workflow.execute() workflow.raise_from_status() workflow.print_status() workflow.stop() with DAG( "index_metadata", default_args=default_args, description="An example DAG which runs a OpenMetadata ingestion workflow", start_date=days_ago(1), is_paused_upon_creation=False, catchup=False, ) as dag: ingest_task = PythonOperator( task_id="ingest_using_recipe", python_callable=metadata_ingestion_workflow, )