add workflow partner1site,dev

This commit is contained in:
root 2025-09-28 18:36:52 +08:00
parent da801f06a1
commit 833e2d61e7
3 changed files with 272 additions and 0 deletions

View File

@ -122,7 +122,27 @@ depends_on_past=False,
retries=3,
dag=dag)
part_summary_custome_feign = SSHOperator(
ssh_hook=sshHook,
task_id='part_summary_custome_feign',
command='python3 /data/airflow/etl/API/part_summary_custome_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
part_summary_custome_load = SSHOperator(
ssh_hook=sshHook,
task_id='part_summary_custome_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"part_summary_custome_load"},
depends_on_past=False,
retries=3,
dag=dag)
part_summary_custome_feign >> part_summary_custome_load
part_summary_visit_load >> partner_summary_visit_9060
part_summary_report_load >> partner_summary_report_6257
part_summary_pos_load >> partner_summary_pos_4937
partner_summary_pos_4937 >> task_failed
part_summary_custome_load >> task_failed

View File

@ -0,0 +1,177 @@
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Partner1site 全接口抓取脚本
分页结束条件hasNext == False
"""
import random
import hmac
import hashlib
import base64
import requests
import json
import uuid
from datetime import datetime, timezone, timedelta
from typing import Dict, Any
import psycopg2
# ======= 配置区 =======
ACCESS_KEY = "75c4ab4d-6a67-4aed-8b1d-5bb64fd36afc"
SECRET_KEY = "117347a7dd066a50a4d2973c5f3d5ba9101094c5"
# URL 用占位符(外部替换)
BASE_URLS = {
"visits": "http://onesite.tek.cn/api/summary/customer_and_contact_datas", # 客户拜访数据
"reports": "http://onesite.tek.cn/api/summary/customer_and_contact_datas", # 报备数据
"pos_datas": "http://onesite.tek.cn/api/summary/customer_and_contact_datas", # POS数据
"customer_and_contact_datas": "http://onesite.tek.cn/api/summary/customer_and_contact_datas" # 客户及联系人数据
}
PG_DSN = dict(
database="dataops_db",
user="dbuser_dba",
password="EmBRxnmmjnE3",
host="124.221.232.219",
port="5432"
)
API_ID = "e35b97d7-3b93-4b15-9bba-ff91ebd4" # 外部传入 api_id占位符
# ======================
class Partner1SiteClient:
"""Partner1site API 客户端"""
def __init__(self, access_key: str, secret_key: str):
self.ak = access_key
self.sk = secret_key
@staticmethod
def urlsafe_b64encode(data: bytes) -> str:
return base64.urlsafe_b64encode(data).decode()
def gen_token(self, expire_sec: int = 600) -> str:
"""生成 API Token"""
random_num = str(random.randint(100000, 999999))
deadline = int(datetime.now(timezone.utc).timestamp()) + expire_sec
parm_str = f"{random_num}:{deadline}"
enc_parm = self.urlsafe_b64encode(parm_str.encode())
sign = hmac.new(self.sk.encode(), enc_parm.encode(), hashlib.sha1).digest()
enc_sign = self.urlsafe_b64encode(sign.hex().encode())
return f"{self.ak}:{enc_sign}:{enc_parm}"
def fetch_all_pages(self, api_name: str, params: Dict[str, Any]):
"""通用分页请求结束条件hasNext == False"""
if api_name not in BASE_URLS:
raise ValueError(f"未知 API 数据来源: {api_name}")
base_url = BASE_URLS[api_name]
all_data = []
page_num = 0
page_size = 50 # 固定每页大小
while True:
token = self.gen_token()
params_with_paging = dict(params)
params_with_paging.update({
"token": token,
"size": page_size,
"page": page_num
})
resp = requests.get(base_url, params=params_with_paging, timeout=30)
resp.raise_for_status()
data_json = resp.json()
if data_json.get("code") != 100 or not data_json.get("success", False):
raise RuntimeError(f"{api_name} API 错误: {data_json.get('message')}")
content = data_json.get("data", {}).get("content", [])
all_data.extend(content)
total_elements = data_json.get("data", {}).get("totalElements")
has_next = data_json.get("data", {}).get("hasNext", False)
print(f"[{api_name}] 页码 {page_num} -> 本页 {len(content)} 条,累计 {len(all_data)} 条 / 总数 {total_elements}")
if not has_next:
break
page_num += 1
return all_data
def save_json_to_pg(data: list, api_id: str) -> None:
"""写入 PostgreSQL软删历史 + 插入新数据"""
print(f"[save_to_pg] API={api_id} 写入 PG记录数={len(data)}")
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
print(f"[save_to_pg] API={api_id} 写入完成")
except Exception as e:
raise RuntimeError(f"PG写入错误: {e}")
def get_previous_date(days: int = 0) -> str:
return (datetime.now() - timedelta(days=days)).strftime("%Y-%m-%d")
def main():
client = Partner1SiteClient(ACCESS_KEY, SECRET_KEY)
# ✅ 客户拜访数据(执行)
visits_data = client.fetch_all_pages(
api_name="visits",
params={} # 拉全量,若有默认时间限制可改成 {"startInsertDate":"2000-01-01","endInsertDate":get_previous_date(0)}
)
save_json_to_pg(visits_data, API_ID)
# ❌ 报备数据(暂时注释)
"""
reports_data = client.fetch_all_pages(
api_name="reports",
params={}
)
save_json_to_pg(reports_data, API_ID)
"""
# ❌ POS 数据(暂时注释)
"""
pos_data = client.fetch_all_pages(
api_name="pos_datas",
params={}
)
save_json_to_pg(pos_data, API_ID)
"""
# ❌ 客户及联系人数据(暂时注释)
"""
cust_contact_data = client.fetch_all_pages(
api_name="customer_and_contact_datas",
params={}
)
save_json_to_pg(cust_contact_data, API_ID)
"""
if __name__ == "__main__":
main()

View File

@ -0,0 +1,75 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.partner_summary_customer;
insert into data_api.partner_summary_customer (
address
, city
, contact_address
, contact_city
, contact_name
, contact_province
, customer_contact_id
, customer_id
, customer_name
, department
, email
, industry
, phone_number
, province
, sub_industry
, website_desc
, website_url
,etl_tx_dt
)
select
case when trim(both from address)='' then null else address::text end address
, case when trim(both from city)='' then null else city::text end city
, case when trim(both from contact_address)='' then null else contact_address::text end contact_address
, case when trim(both from contact_city)='' then null else contact_city::text end contact_city
, case when trim(both from contact_name)='' then null else contact_name::text end contact_name
, case when trim(both from contact_province)='' then null else contact_province::text end contact_province
, case when trim(both from customer_contact_id)='' then null else customer_contact_id::text end customer_contact_id
, case when trim(both from customer_id)='' then null else customer_id::text end customer_id
, case when trim(both from customer_name)='' then null else customer_name::text end customer_name
, case when trim(both from department)='' then null else department::text end department
, case when trim(both from email)='' then null else email::text end email
, case when trim(both from industry)='' then null else industry::text end industry
, case when trim(both from phone_number)='' then null else phone_number::text end phone_number
, case when trim(both from province)='' then null else province::text end province
, case when trim(both from sub_industry)='' then null else sub_industry::text end sub_industry
, case when trim(both from website_desc)='' then null else website_desc::text end website_desc
, case when trim(both from website_url)='' then null else website_url::text end website_url
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'address') address
, (json_array_elements(data::json)::json->>'city') city
, (json_array_elements(data::json)::json->>'contactAddress') contact_address
, (json_array_elements(data::json)::json->>'contactCity') contact_city
, (json_array_elements(data::json)::json->>'contactName') contact_name
, (json_array_elements(data::json)::json->>'contactProvince') contact_province
, (json_array_elements(data::json)::json->>'customerContactId') customer_contact_id
, (json_array_elements(data::json)::json->>'customerId') customer_id
, (json_array_elements(data::json)::json->>'customerName') customer_name
, (json_array_elements(data::json)::json->>'department') department
, (json_array_elements(data::json)::json->>'email') email
, (json_array_elements(data::json)::json->>'industry') industry
, (json_array_elements(data::json)::json->>'phoneNumber') phone_number
, (json_array_elements(data::json)::json->>'province') province
, (json_array_elements(data::json)::json->>'subIndustry') sub_industry
, (json_array_elements(data::json)::json->>'websiteDesc') website_desc
, (json_array_elements(data::json)::json->>'websiteUrl') website_url
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='e35b97d7-3b93-4b15-9bba-ff91ebd4' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='e35b97d7-3b93-4b15-9bba-ff91ebd4';
\q