add workflow 数据文件解压缩,dev

This commit is contained in:
root 2024-05-30 14:08:19 +08:00
parent 9d0bebda96
commit a67d0c4c88
2 changed files with 56 additions and 0 deletions

View File

@ -0,0 +1,50 @@
#!/usr/bin/python
# -*- encoding=utf-8 -*-
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.sensors.external_task_sensor import ExternalTaskSensor
import json
from airflow.operators.email_operator import EmailOperator
from airflow.utils.trigger_rule import TriggerRule
sshHook = SSHHook(ssh_conn_id ='ssh_air')
default_args = {
'owner': 'info@idgvalue.com',
'email_on_failure': True,
'email_on_retry':True,
'start_date': datetime(2024, 1, 1),
'depends_on_past': False,
'retries': 6,
'retry_delay': timedelta(minutes=10),
}
dag = DAG('wf_dag_dag_unzip_datafile', default_args=default_args,
schedule_interval="0-0/10 9-18 * * *",
catchup=False,
dagrun_timeout=timedelta(minutes=160),
max_active_runs=3)
task_failed = EmailOperator (
dag=dag,
trigger_rule=TriggerRule.ONE_FAILED,
task_id="task_failed",
to=["info@idgvalue.com"],
cc=[""],
subject="dag_unzip_datafile_failed",
html_content='<h3>您好dag_unzip_datafile作业失败请及时处理" </h3>')
uds_uzip_upload_datafile = SSHOperator(
ssh_hook=sshHook,
task_id='uds_uzip_upload_datafile',
command='/data/airflow/bin/uzip_file /data/airflow/bin/uzip_upload_datafile.sh >>/data/airflow/logs/run_uzip_upload_datafile.log 2>&1 {{ ds_nodash }} >>/data/airflow/logs/run_tpt_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"uds_uzip_upload_datafile"},
depends_on_past=False,
retries=3,
dag=dag)
uds_uzip_upload_datafile >> task_failed

View File

@ -0,0 +1,6 @@
[PATH]
DATA_FILE = /data/datafile/
UPLOAD_DIR = /data/datafile/upload
UZIP_TEMP_DIR = /data/datafile/upload/temp
UPLOAD_BK_DIR = /data/datafile/upload_bk
DATA_BK_DIR = /data/datafile/datafile_bk