Rasnar 38e240e916
feat(ingest/airflow): platform_instance support in Airflow plugin (#12751)
Co-authored-by: rasnar <11248833+Rasnar@users.noreply.github.com>
Co-authored-by: Sergio Gómez Villamor <sgomezvillamor@gmail.com>
2025-04-04 09:26:58 +02:00

37 lines
988 B
Python

from datetime import datetime
from airflow import DAG
from airflow.operators.bash import BashOperator
from datahub_airflow_plugin.entities import Dataset, Urn
with DAG(
"dag_to_skip",
start_date=datetime(2023, 1, 1),
schedule_interval=None,
catchup=False,
) as dag:
task1 = BashOperator(
task_id="dag_to_skip_task_1",
dag=dag,
bash_command="echo 'dag_to_skip_task_1'",
inlets=[
Dataset(platform="snowflake", name="mydb.schema.tableA"),
Urn(
"urn:li:dataset:(urn:li:dataPlatform:snowflake,mydb.schema.tableC,PROD)"
),
Urn(
"urn:li:dataJob:(urn:li:dataFlow:(airflow,myairflow.test_dag,PROD),test_task)"
),
],
outlets=[Dataset("snowflake", "mydb.schema.tableD")],
)
task2 = BashOperator(
task_id="dag_to_skip_task_2",
dag=dag,
bash_command="echo 'dag_to_skip_task_2'",
)
task1 >> task2