add workflow 招聘信息导入,dev

This commit is contained in:
root 2026-02-26 14:35:04 +08:00
parent 9239fa108d
commit 8ae983d191
2 changed files with 414 additions and 0 deletions

View File

@ -0,0 +1,361 @@
import requests
import json
import psycopg2
from psycopg2.extras import RealDictCursor
import time
from typing import List, Dict, Any
import logging
# 配置日志
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)
class APIClient:
"""简化的API客户端类"""
def __init__(self, api_url: str, auth_token: str):
self.api_url = api_url
self.headers = {
'Authorization': auth_token,
'Content-Type': 'application/json'
}
def call_api_with_inputs(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
"""直接调用带inputs参数的API
Args:
inputs: inputs参数字典
Returns:
API响应数据
"""
payload = json.dumps({
"inputs": inputs,
"response_mode": "blocking",
"user": "admin"
})
try:
logger.info("调用带inputs参数的API")
response = requests.post(
self.api_url,
headers=self.headers,
data=payload,
timeout=1200
)
response.raise_for_status()
logger.info(f"API调用成功状态码: {response.status_code}")
return {
"success": True,
"status_code": response.status_code,
"data": response.json()
}
except requests.exceptions.RequestException as e:
logger.error(f"API调用失败: {e}")
return {
"success": False,
"error": str(e)
}
def call_api_without_inputs(self, other_params: Dict[str, Any] = None) -> Dict[str, Any]:
"""直接调用不带inputs参数的API
Args:
other_params: 其他参数字典
Returns:
API响应数据
"""
payload = {
"inputs": {},
"response_mode": "blocking",
"user": "admin"
}
# 添加其他参数
if other_params:
payload.update(other_params)
try:
logger.info("调用不带inputs参数的API")
response = requests.post(
self.api_url,
headers=self.headers,
data=json.dumps(payload),
timeout=1200
)
response.raise_for_status()
logger.info(f"API调用成功状态码: {response.status_code}")
return {
"success": True,
"status_code": response.status_code,
"data": response.json()
}
except requests.exceptions.RequestException as e:
logger.error(f"API调用失败: {e}")
return {
"success": False,
"error": str(e)
}
def call_api_with_query(self, query: str = None) -> Dict[str, Any]:
"""直接调用带inputs参数的API
Args:
query: 查询语句
Returns:
API响应数据
"""
payload = json.dumps({
"inputs": {},
"query" : query,
"response_mode": "streaming",
"user": "admin"
})
try:
logger.info("调用带inputs参数的API")
response = requests.post(
self.api_url,
headers=self.headers,
data=payload,
timeout=2400
)
response.raise_for_status()
logger.info(f"API调用成功状态码: {response.status_code}")
return {
"success": True,
"status_code": response.status_code,
"data": response.json()
}
except requests.exceptions.RequestException as e:
logger.error(f"API调用失败: {e}")
return {
"success": False,
"error": str(e)
}
def call_api_without_query(self, other_params: Dict[str, Any] = None) -> Dict[str, Any]:
"""直接调用不带inputs参数的API
Args:
other_params: 其他参数字典
Returns:
API响应数据
"""
payload = {
"inputs": {},
"query":"",
"response_mode": "streaming",
"user": "admin"
}
# 添加其他参数
if other_params:
payload.update(other_params)
try:
logger.info("调用不带inputs参数的API")
response = requests.post(
self.api_url,
headers=self.headers,
data=json.dumps(payload),
timeout=2400
)
response.raise_for_status()
logger.info(f"API调用成功状态码: {response.status_code}")
return {
"success": True,
"status_code": response.status_code,
"data": response.json()
}
except requests.exceptions.RequestException as e:
logger.error(f"API调用失败: {e}")
return {
"success": False,
"error": str(e)
}
class DatabaseManager:
"""数据库管理类"""
def __init__(self, db_config: Dict[str, str]):
self.db_config = db_config
self.connection = None
def connect(self):
"""连接数据库"""
try:
self.connection = psycopg2.connect(
host=self.db_config.get('host', 'localhost'),
port=self.db_config.get('port', '5432'),
database=self.db_config.get('database', 'postgres'),
user=self.db_config.get('user', 'postgres'),
password=self.db_config.get('password', ''),
cursor_factory=RealDictCursor
)
logger.info("数据库连接成功")
except Exception as e:
logger.error(f"数据库连接失败: {e}")
raise
def disconnect(self):
"""断开数据库连接"""
if self.connection:
self.connection.close()
logger.info("数据库连接已关闭")
def get_urls_from_database(self, query: str = None) -> List[str]:
"""从数据库获取URL列表"""
if not self.connection:
self.connect()
# 默认查询语句,根据实际情况修改
if query is None:
query = "SELECT 1"
try:
with self.connection.cursor() as cursor:
cursor.execute(query)
results = cursor.fetchall()
# 处理结果,返回字符串列表
datas = [list(row.values())[0] for row in results]
return datas
except Exception as e:
logger.error(f"查询数据库失败: {e}")
return []
#DATABASE_URL=postgresql://dbuser_dba:EmBRxnmmjnE3@124.221.232.219:5432/daas_mpp
#DATABASE_SCHEMA=p70_ai_intelligence
def main():
"""主函数"""
# 数据库配置
db_config = {
'host': '124.221.232.219',
'port': '5432',
'database': 'dify',
'user': 'dbuser_dba',
'password': 'EmBRxnmmjnE3',
'schema': 'p70_ai_intelligence'
}
# API配置
api_config = {
'url': 'http://tk-agent.idgvalue.com/v1/workflows/run',
'auth_token': 'Bearer app-siNKLRExkzO9IM81P8hHuBx2'
}
api_client = APIClient(api_config['url'], api_config['auth_token'])
type = 'agent'
type = 'workflow'
try:
flag = True
if flag:
# 初始化
db_manager = DatabaseManager(db_config)
custom_query = """ SELECT
ds.document_id,
MIN(ds.created_at) AS doc_time,
string_agg(ds.content, '' ORDER BY ds.position) AS full_text
FROM public.document_segments ds
WHERE ds.dataset_id = '33d1b946-acd2-4538-b6cc-e0cd77824aba'
AND ds.enabled
AND ds.created_at >= NOW() - INTERVAL '24 hours'
GROUP BY ds.document_id
ORDER BY doc_time DESC """
try:
# 从数据库获取URL列表
list = db_manager.get_urls_from_database(custom_query)
if not list:
logger.warning("未获取到URL")
return
# 遍历每个URL调用API
for i, text in enumerate(list, 1):
logger.info(f"处理第 {i}/{len(list)} 个数据: {text}")
if type is 'workflow':
# 方法1: 使用带inputs参数的调用
inputs_data = {
"content": f"{text}",
"type":"招聘信息汇总"
}
result = api_client.call_api_with_inputs(inputs_data)
if result['success']:
logger.info(f"URL {text} 处理成功")
else:
logger.error(f"URL {text} 处理失败: {result.get('error')}")
else:
#agent
query = text
result = api_client.call_api_with_query(query)
if result['success']:
logger.info(f"URL {text} 处理成功")
else:
logger.error(f"URL {text} 处理失败: {result.get('error')}")
# 可选:添加延迟避免请求过于频繁
if i < len(text):
time.sleep(1)
except Exception as e:
logger.error(f"程序执行失败: {e}")
finally:
db_manager.disconnect()
else:
logger.info("调用不带inputs参数的API示例")
if type is 'workflow':
result2 = api_client.call_api_without_inputs()
if result2['success']:
logger.info("不带inputs参数的API调用成功")
else:
logger.error(f"不带inputs参数的API调用失败: {result2.get('error')}")
else:
#agent
result2 = api_client.call_api_without_query()
if result2['success']:
logger.info("不带inputs参数的API调用成功")
else:
logger.error(f"不带inputs参数的API调用失败: {result2.get('error')}")
except Exception as e:
logger.error(f"初始化失败: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,53 @@
#!/usr/bin/python
# -*- encoding=utf-8 -*-
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.sensors.external_task_sensor import ExternalTaskSensor
import json
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.operators.email_operator import EmailOperator
from airflow.utils.trigger_rule import TriggerRule
sshHook = SSHHook(ssh_conn_id ='ssh_air')
default_args = {
'owner': 'tek_newsletter@163.com',
'email_on_failure': True,
'email_on_retry':True,
'start_date': datetime(2024, 1, 1),
'depends_on_past': False,
'retries': 6,
'retry_delay': timedelta(minutes=10),
}
dag = DAG('wf_dag_agent_recruitment', default_args=default_args,
schedule_interval="0 1 * * 5",
catchup=False,
dagrun_timeout=timedelta(minutes=600),
max_active_runs=3,
tags=['64cbd1bbced14209b5e3a879f89e8ab1','TK_Cust','招聘信息导入']
)
task_failed = EmailOperator (
dag=dag,
trigger_rule=TriggerRule.ONE_FAILED,
task_id="task_failed",
to=["tek_newsletter@163.com"],
cc=[""],
subject="agent_recruitment_failed",
html_content='<h3>您好agent_recruitment作业失败请及时处理" </h3>')
batch_gen_recruiment = SSHOperator(
ssh_hook=sshHook,
task_id='batch_gen_recruiment',
command='cd /data/airflow/etl/agents && python batch_gen_recruiment.py',
params={'my_param':"batch_gen_recruiment"},
depends_on_past=False,
retries=1,
dag=dag)
batch_gen_recruiment >> task_failed