diff --git a/dev/workflow/TK_Cust/partner_one_site_file/partner数据导入/wf_dag_partner_one_site_file.py b/dev/workflow/TK_Cust/partner_one_site_file/partner数据导入/wf_dag_partner_one_site_file.py new file mode 100644 index 0000000..b15b91b --- /dev/null +++ b/dev/workflow/TK_Cust/partner_one_site_file/partner数据导入/wf_dag_partner_one_site_file.py @@ -0,0 +1,62 @@ +#!/usr/bin/python +# -*- encoding=utf-8 -*- +from airflow import DAG +from datetime import datetime, timedelta +from airflow.contrib.hooks.ssh_hook import SSHHook +from airflow.contrib.operators.ssh_operator import SSHOperator +from airflow.sensors.external_task_sensor import ExternalTaskSensor +import json + +from airflow.operators.trigger_dagrun import TriggerDagRunOperator +from airflow.operators.email_operator import EmailOperator +from airflow.utils.trigger_rule import TriggerRule + + +sshHook = SSHHook(ssh_conn_id ='ssh_air') +default_args = { +'owner': 'tek_newsletter@163.com', +'email_on_failure': True, +'email_on_retry':True, +'start_date': datetime(2024, 1, 1), +'depends_on_past': False, +'retries': 6, +'retry_delay': timedelta(minutes=10), +} + +dag = DAG('wf_dag_partner_one_site_file', default_args=default_args, +schedule=None, +catchup=False, +dagrun_timeout=timedelta(minutes=160), +max_active_runs=3) + +task_failed = EmailOperator ( + dag=dag, + trigger_rule=TriggerRule.ONE_FAILED, + task_id="task_failed", + to=["tek_newsletter@163.com"], + cc=[""], + subject="partner_one_site_file_failed", + html_content='