add workflow 服务器维护,dev

This commit is contained in:
root 2024-06-13 14:06:49 +08:00
parent 5197bfd961
commit ab42074393
2 changed files with 60 additions and 0 deletions

View File

@ -0,0 +1,9 @@
#!/usr/bin/env python3
from move_data_file import move_and_compress_files
if __name__ == '__main__':
"备份数据文件"
move_and_compress_files(
"/data/datafile/"
)

View File

@ -0,0 +1,51 @@
#!/usr/bin/python
# -*- encoding=utf-8 -*-
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.sensors.external_task_sensor import ExternalTaskSensor
import json
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.operators.email_operator import EmailOperator
from airflow.utils.trigger_rule import TriggerRule
sshHook = SSHHook(ssh_conn_id ='ssh_air')
default_args = {
'owner': 'info@idgvalue.com',
'email_on_failure': True,
'email_on_retry':True,
'start_date': datetime(2024, 1, 1),
'depends_on_past': False,
'retries': 6,
'retry_delay': timedelta(minutes=10),
}
dag = DAG('wf_dag_ser_main', default_args=default_args,
schedule_interval="0 0 * * *",
catchup=False,
dagrun_timeout=timedelta(minutes=160),
max_active_runs=3)
task_failed = EmailOperator (
dag=dag,
trigger_rule=TriggerRule.ONE_FAILED,
task_id="task_failed",
to=["info@idgvalue.com"],
cc=[""],
subject="ser_main_failed",
html_content='<h3>您好ser_main作业失败请及时处理" </h3>')
uds_maintain_data_file = SSHOperator(
ssh_hook=sshHook,
task_id='uds_maintain_data_file',
command='python /data/airflow/etl/CTL/maintain_data_file.py /data/datafile',
params={'my_param':"uds_maintain_data_file"},
depends_on_past=False,
retries=3,
dag=dag)
uds_maintain_data_file >> task_failed