add workflow 简道云销售拜访数据同步,dev

This commit is contained in:
root 2025-09-22 19:01:10 +08:00
parent 77ee32efc1
commit f90e05a55b
26 changed files with 3373 additions and 0 deletions

View File

@ -0,0 +1,154 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
delete from p10_sa.S98_S_jdy_account
;
insert into p10_sa.S98_S_jdy_account
( oracle_number
, import_id
, account_id
, account_nick_name
, account_name
, search_result
, city
, code
, legal_name
, depart
, channel
, status
, address
, post_address
, sale_id
, parent_company
, industry
, sub_industry
, custom_class
, custom_type
, create_by
, create_tm
, addition_tm
, wx_open_id
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
oracle_number
, import_id
, account_id
, account_nick_name
, account_name
, search_result
, city
, code
, legal_name
, depart
, channel
, status
, address
, post_address
, sale_id
, parent_company
, industry
, sub_industry
, custom_class
, custom_type
, create_by
, create_tm
, addition_tm
, wx_open_id
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p00_tal.S98_S_jdy_account
;
delete from p12_sfull.S98_S_jdy_account
;
;
insert into p12_sfull.S98_S_jdy_account
( oracle_number
, import_id
, account_id
, account_nick_name
, account_name
, search_result
, city
, code
, legal_name
, depart
, channel
, status
, address
, post_address
, sale_id
, parent_company
, industry
, sub_industry
, custom_class
, custom_type
, create_by
, create_tm
, addition_tm
, wx_open_id
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
oracle_number
, import_id
, account_id
, account_nick_name
, account_name
, search_result
, city
, code
, legal_name
, depart
, channel
, status
, address
, post_address
, sale_id
, parent_company
, industry
, sub_industry
, custom_class
, custom_type
, create_by
, create_tm
, addition_tm
, wx_open_id
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p10_sa.S98_S_jdy_account
;
\q

View File

@ -0,0 +1,45 @@
CREATE FOREIGN TABLE if not exists p00_tal.S98_S_jdy_account (
oracle_number TEXT
, import_id TEXT
, account_id TEXT
, account_nick_name TEXT
, account_name TEXT
, search_result TEXT
, city TEXT
, code TEXT
, legal_name TEXT
, depart TEXT
, channel TEXT
, status TEXT
, address TEXT
, post_address TEXT
, sale_id TEXT
, parent_company TEXT
, industry TEXT
, sub_industry TEXT
, custom_class TEXT
, custom_type TEXT
, create_by TEXT
, create_tm TEXT
, addition_tm TEXT
, wx_open_id TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
)
SERVER pgsql_server_S98_S OPTIONS(schema_name 'data_api', table_name 'jdy_account' );

View File

@ -0,0 +1,151 @@
create table if not exists p10_sa.S98_S_jdy_account (
oracle_number TEXT
, import_id TEXT
, account_id TEXT
, account_nick_name TEXT
, account_name TEXT
, search_result TEXT
, city TEXT
, code TEXT
, legal_name TEXT
, depart TEXT
, channel TEXT
, status TEXT
, address TEXT
, post_address TEXT
, sale_id TEXT
, parent_company TEXT
, industry TEXT
, sub_industry TEXT
, custom_class TEXT
, custom_type TEXT
, create_by TEXT
, create_tm TEXT
, addition_tm TEXT
, wx_open_id TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.oracle_number IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.import_id IS '导入ID';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.account_id IS '公司ID';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.account_nick_name IS '公司简称';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.account_name IS '公司名称';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.search_result IS '模糊查询结果';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.city IS '城市';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.code IS '邮编';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.legal_name IS '公司英文名称legal name';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.depart IS '部门学院';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.channel IS '渠道来源';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.status IS '状态(添加时不显示,仅管理员可编辑';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.address IS '公司地址';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.post_address IS '详细寄件地址';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.sale_id IS '所属销售';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.parent_company IS '上级公司';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.industry IS '行业';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.sub_industry IS '子行业';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.custom_class IS '客户分类';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.custom_type IS '客户类型';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.create_by IS '创建人';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.create_tm IS '创建时间';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.addition_tm IS 'F-辅助时间';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.wx_open_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.creator IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.create_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.updater IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.update_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.delete_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.deleter IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.app_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.entry_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_account.etl_tx_dt IS '';
COMMENT ON TABLE p10_sa.S98_S_jdy_account IS '';
create table if not exists p12_sfull.S98_S_jdy_account (
oracle_number TEXT
, import_id TEXT
, account_id TEXT
, account_nick_name TEXT
, account_name TEXT
, search_result TEXT
, city TEXT
, code TEXT
, legal_name TEXT
, depart TEXT
, channel TEXT
, status TEXT
, address TEXT
, post_address TEXT
, sale_id TEXT
, parent_company TEXT
, industry TEXT
, sub_industry TEXT
, custom_class TEXT
, custom_type TEXT
, create_by TEXT
, create_tm TEXT
, addition_tm TEXT
, wx_open_id TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.oracle_number IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.import_id IS '导入ID';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.account_id IS '公司ID';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.account_nick_name IS '公司简称';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.account_name IS '公司名称';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.search_result IS '模糊查询结果';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.city IS '城市';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.code IS '邮编';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.legal_name IS '公司英文名称legal name';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.depart IS '部门学院';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.channel IS '渠道来源';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.status IS '状态(添加时不显示,仅管理员可编辑';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.address IS '公司地址';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.post_address IS '详细寄件地址';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.sale_id IS '所属销售';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.parent_company IS '上级公司';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.industry IS '行业';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.sub_industry IS '子行业';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.custom_class IS '客户分类';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.custom_type IS '客户类型';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.create_by IS '创建人';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.create_tm IS '创建时间';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.addition_tm IS 'F-辅助时间';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.wx_open_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.creator IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.create_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.updater IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.update_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.delete_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.deleter IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.app_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.entry_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_account.etl_tx_dt IS '';
COMMENT ON TABLE p12_sfull.S98_S_jdy_account IS '';

View File

@ -0,0 +1,70 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
delete from p10_sa.S98_S_jdy_ae_plan
;
insert into p10_sa.S98_S_jdy_ae_plan
( ae
, task_flowup_time
, status
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
ae
, task_flowup_time
, status
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p00_tal.S98_S_jdy_ae_plan
;
delete from p12_sfull.S98_S_jdy_ae_plan
;
;
insert into p12_sfull.S98_S_jdy_ae_plan
( ae
, task_flowup_time
, status
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
ae
, task_flowup_time
, status
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p10_sa.S98_S_jdy_ae_plan
;
\q

View File

@ -0,0 +1,24 @@
CREATE FOREIGN TABLE if not exists p00_tal.S98_S_jdy_ae_plan (
ae TEXT
, task_flowup_time TEXT
, status TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
)
SERVER pgsql_server_S98_S OPTIONS(schema_name 'data_api', table_name 'jdy_ae_plan' );

View File

@ -0,0 +1,67 @@
create table if not exists p10_sa.S98_S_jdy_ae_plan (
ae TEXT
, task_flowup_time TEXT
, status TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.ae IS 'AE';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.task_flowup_time IS 'Task Followup Date/Time';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.status IS '排期状态';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.creator IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.create_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.updater IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.update_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.delete_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.deleter IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.app_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.entry_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_ae_plan.etl_tx_dt IS '';
COMMENT ON TABLE p10_sa.S98_S_jdy_ae_plan IS '';
create table if not exists p12_sfull.S98_S_jdy_ae_plan (
ae TEXT
, task_flowup_time TEXT
, status TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.ae IS 'AE';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.task_flowup_time IS 'Task Followup Date/Time';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.status IS '排期状态';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.creator IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.create_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.updater IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.update_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.delete_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.deleter IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.app_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.entry_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_ae_plan.etl_tx_dt IS '';
COMMENT ON TABLE p12_sfull.S98_S_jdy_ae_plan IS '';

View File

@ -0,0 +1,86 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
delete from p10_sa.S98_S_jdy_calendar
;
insert into p10_sa.S98_S_jdy_calendar
( tektronix_ap
, standard_year_calendar
, tek_year
, tektronix_ap_week
, tek_month
, tek_q
, calendar_week
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
tektronix_ap
, standard_year_calendar
, tek_year
, tektronix_ap_week
, tek_month
, tek_q
, calendar_week
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p00_tal.S98_S_jdy_calendar
;
delete from p12_sfull.S98_S_jdy_calendar
;
;
insert into p12_sfull.S98_S_jdy_calendar
( tektronix_ap
, standard_year_calendar
, tek_year
, tektronix_ap_week
, tek_month
, tek_q
, calendar_week
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
tektronix_ap
, standard_year_calendar
, tek_year
, tektronix_ap_week
, tek_month
, tek_q
, calendar_week
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p10_sa.S98_S_jdy_calendar
;
\q

View File

@ -0,0 +1,28 @@
CREATE FOREIGN TABLE if not exists p00_tal.S98_S_jdy_calendar (
tektronix_ap TEXT
, standard_year_calendar TEXT
, tek_year TEXT
, tektronix_ap_week TEXT
, tek_month TEXT
, tek_q TEXT
, calendar_week TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
)
SERVER pgsql_server_S98_S OPTIONS(schema_name 'data_api', table_name 'jdy_calendar' );

View File

@ -0,0 +1,83 @@
create table if not exists p10_sa.S98_S_jdy_calendar (
tektronix_ap TEXT
, standard_year_calendar TEXT
, tek_year TEXT
, tektronix_ap_week TEXT
, tek_month TEXT
, tek_q TEXT
, calendar_week TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.tektronix_ap IS 'Tektronix AP';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.standard_year_calendar IS 'Standard Year Calendar';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.tek_year IS 'Tek Year';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.tektronix_ap_week IS 'Tektronix AP-WEEK';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.tek_month IS 'Tek Month';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.tek_q IS 'Tek Q';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.calendar_week IS 'Calendar Week';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.creator IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.create_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.updater IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.update_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.delete_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.deleter IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.app_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.entry_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_calendar.etl_tx_dt IS '';
COMMENT ON TABLE p10_sa.S98_S_jdy_calendar IS '';
create table if not exists p12_sfull.S98_S_jdy_calendar (
tektronix_ap TEXT
, standard_year_calendar TEXT
, tek_year TEXT
, tektronix_ap_week TEXT
, tek_month TEXT
, tek_q TEXT
, calendar_week TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.tektronix_ap IS 'Tektronix AP';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.standard_year_calendar IS 'Standard Year Calendar';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.tek_year IS 'Tek Year';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.tektronix_ap_week IS 'Tektronix AP-WEEK';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.tek_month IS 'Tek Month';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.tek_q IS 'Tek Q';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.calendar_week IS 'Calendar Week';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.creator IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.create_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.updater IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.update_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.delete_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.deleter IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.app_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.entry_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_calendar.etl_tx_dt IS '';
COMMENT ON TABLE p12_sfull.S98_S_jdy_calendar IS '';

View File

@ -0,0 +1,110 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
delete from p10_sa.S98_S_jdy_contact
;
insert into p10_sa.S98_S_jdy_contact
( id
, account_id
, account_name
, legal_name
, abbreviation
, city_name
, contact_id
, contact_name
, phone
, wechat_id
, email
, salers
, title
, phone2
, app_id
, create_time
, creator
, delete_time
, deleter
, entry_id
, update_time
, updater
, etl_tx_dt )
select
id
, account_id
, account_name
, legal_name
, abbreviation
, city_name
, contact_id
, contact_name
, phone
, wechat_id
, email
, salers
, title
, phone2
, app_id
, create_time
, creator
, delete_time
, deleter
, entry_id
, update_time
, updater
, etl_tx_dt
from p00_tal.S98_S_jdy_contact
;
delete from p12_sfull.S98_S_jdy_contact
;
;
insert into p12_sfull.S98_S_jdy_contact
( id
, account_id
, account_name
, legal_name
, abbreviation
, city_name
, contact_id
, contact_name
, phone
, wechat_id
, email
, salers
, title
, phone2
, app_id
, create_time
, creator
, delete_time
, deleter
, entry_id
, update_time
, updater
, etl_tx_dt )
select
id
, account_id
, account_name
, legal_name
, abbreviation
, city_name
, contact_id
, contact_name
, phone
, wechat_id
, email
, salers
, title
, phone2
, app_id
, create_time
, creator
, delete_time
, deleter
, entry_id
, update_time
, updater
, etl_tx_dt
from p10_sa.S98_S_jdy_contact
;
\q

View File

@ -0,0 +1,34 @@
CREATE FOREIGN TABLE if not exists p00_tal.S98_S_jdy_contact (
id TEXT
, account_id TEXT
, account_name TEXT
, legal_name TEXT
, abbreviation TEXT
, city_name TEXT
, contact_id TEXT
, contact_name TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, salers TEXT
, title TEXT
, phone2 TEXT
, app_id TEXT
, create_time TEXT
, creator TEXT
, delete_time TEXT
, deleter TEXT
, entry_id TEXT
, update_time TEXT
, updater TEXT
, etl_tx_dt TIMESTAMP
)
SERVER pgsql_server_S98_S OPTIONS(schema_name 'data_api', table_name 'jdy_contact' );

View File

@ -0,0 +1,107 @@
create table if not exists p10_sa.S98_S_jdy_contact (
id TEXT
, account_id TEXT
, account_name TEXT
, legal_name TEXT
, abbreviation TEXT
, city_name TEXT
, contact_id TEXT
, contact_name TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, salers TEXT
, title TEXT
, phone2 TEXT
, app_id TEXT
, create_time TEXT
, creator TEXT
, delete_time TEXT
, deleter TEXT
, entry_id TEXT
, update_time TEXT
, updater TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.account_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.account_name IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.legal_name IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.abbreviation IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.city_name IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.contact_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.contact_name IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.phone IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.wechat_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.email IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.salers IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.title IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.phone2 IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.app_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.create_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.creator IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.delete_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.deleter IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.entry_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.update_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.updater IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_contact.etl_tx_dt IS '';
COMMENT ON TABLE p10_sa.S98_S_jdy_contact IS '';
create table if not exists p12_sfull.S98_S_jdy_contact (
id TEXT
, account_id TEXT
, account_name TEXT
, legal_name TEXT
, abbreviation TEXT
, city_name TEXT
, contact_id TEXT
, contact_name TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, salers TEXT
, title TEXT
, phone2 TEXT
, app_id TEXT
, create_time TEXT
, creator TEXT
, delete_time TEXT
, deleter TEXT
, entry_id TEXT
, update_time TEXT
, updater TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.account_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.account_name IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.legal_name IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.abbreviation IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.city_name IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.contact_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.contact_name IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.phone IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.wechat_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.email IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.salers IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.title IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.phone2 IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.app_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.create_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.creator IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.delete_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.deleter IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.entry_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.update_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.updater IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_contact.etl_tx_dt IS '';
COMMENT ON TABLE p12_sfull.S98_S_jdy_contact IS '';

View File

@ -0,0 +1,234 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
delete from p10_sa.S98_S_jdy_daily_visit
;
insert into p10_sa.S98_S_jdy_daily_visit
( account
, assign_to
, task_name
, visit_time
, task_follow_up_time
, irtnc_state
, segments
, application
, intelligence_source
, funnels_created
, status
, order_ae_flag
, ae_confict_flag
, account2
, city
, company_name_en
, nick_name
, company_id
, salar
, contact
, phone
, wechat_id
, email
, new_flag
, account_type
, products
, product_family1
, product_family2
, product_family3
, attachments
, remarks
, contact_phone_flag
, repeat_flag
, am
, visit_tm
, replan_flag
, status2
, ap_week
, year_text
, year_no
, quater_no
, assigned_to_text
, time_no
, creator
, create_time
, updater
, update_time
, flow_state
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
account
, assign_to
, task_name
, visit_time
, task_follow_up_time
, irtnc_state
, segments
, application
, intelligence_source
, funnels_created
, status
, order_ae_flag
, ae_confict_flag
, account2
, city
, company_name_en
, nick_name
, company_id
, salar
, contact
, phone
, wechat_id
, email
, new_flag
, account_type
, products
, product_family1
, product_family2
, product_family3
, attachments
, remarks
, contact_phone_flag
, repeat_flag
, am
, visit_tm
, replan_flag
, status2
, ap_week
, year_text
, year_no
, quater_no
, assigned_to_text
, time_no
, creator
, create_time
, updater
, update_time
, flow_state
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p00_tal.S98_S_jdy_daily_visit
;
delete from p12_sfull.S98_S_jdy_daily_visit
;
;
insert into p12_sfull.S98_S_jdy_daily_visit
( account
, assign_to
, task_name
, visit_time
, task_follow_up_time
, irtnc_state
, segments
, application
, intelligence_source
, funnels_created
, status
, order_ae_flag
, ae_confict_flag
, account2
, city
, company_name_en
, nick_name
, company_id
, salar
, contact
, phone
, wechat_id
, email
, new_flag
, account_type
, products
, product_family1
, product_family2
, product_family3
, attachments
, remarks
, contact_phone_flag
, repeat_flag
, am
, visit_tm
, replan_flag
, status2
, ap_week
, year_text
, year_no
, quater_no
, assigned_to_text
, time_no
, creator
, create_time
, updater
, update_time
, flow_state
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt )
select
account
, assign_to
, task_name
, visit_time
, task_follow_up_time
, irtnc_state
, segments
, application
, intelligence_source
, funnels_created
, status
, order_ae_flag
, ae_confict_flag
, account2
, city
, company_name_en
, nick_name
, company_id
, salar
, contact
, phone
, wechat_id
, email
, new_flag
, account_type
, products
, product_family1
, product_family2
, product_family3
, attachments
, remarks
, contact_phone_flag
, repeat_flag
, am
, visit_tm
, replan_flag
, status2
, ap_week
, year_text
, year_no
, quater_no
, assigned_to_text
, time_no
, creator
, create_time
, updater
, update_time
, flow_state
, delete_time
, deleter
, id
, app_id
, entry_id
, etl_tx_dt
from p10_sa.S98_S_jdy_daily_visit
;
\q

View File

@ -0,0 +1,65 @@
CREATE FOREIGN TABLE if not exists p00_tal.S98_S_jdy_daily_visit (
account TEXT
, assign_to TEXT
, task_name TEXT
, visit_time TEXT
, task_follow_up_time TEXT
, irtnc_state TEXT
, segments TEXT
, application TEXT
, intelligence_source TEXT
, funnels_created TEXT
, status TEXT
, order_ae_flag TEXT
, ae_confict_flag TEXT
, account2 TEXT
, city TEXT
, company_name_en TEXT
, nick_name TEXT
, company_id TEXT
, salar TEXT
, contact TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, new_flag TEXT
, account_type TEXT
, products TEXT
, product_family1 TEXT
, product_family2 TEXT
, product_family3 TEXT
, attachments TEXT
, remarks TEXT
, contact_phone_flag TEXT
, repeat_flag TEXT
, am TEXT
, visit_tm TEXT
, replan_flag TEXT
, status2 TEXT
, ap_week TEXT
, year_text TEXT
, year_no TEXT
, quater_no TEXT
, assigned_to_text TEXT
, time_no TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, flow_state TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
)
SERVER pgsql_server_S98_S OPTIONS(schema_name 'data_api', table_name 'jdy_daily_visit' );

View File

@ -0,0 +1,231 @@
create table if not exists p10_sa.S98_S_jdy_daily_visit (
account TEXT
, assign_to TEXT
, task_name TEXT
, visit_time TEXT
, task_follow_up_time TEXT
, irtnc_state TEXT
, segments TEXT
, application TEXT
, intelligence_source TEXT
, funnels_created TEXT
, status TEXT
, order_ae_flag TEXT
, ae_confict_flag TEXT
, account2 TEXT
, city TEXT
, company_name_en TEXT
, nick_name TEXT
, company_id TEXT
, salar TEXT
, contact TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, new_flag TEXT
, account_type TEXT
, products TEXT
, product_family1 TEXT
, product_family2 TEXT
, product_family3 TEXT
, attachments TEXT
, remarks TEXT
, contact_phone_flag TEXT
, repeat_flag TEXT
, am TEXT
, visit_tm TEXT
, replan_flag TEXT
, status2 TEXT
, ap_week TEXT
, year_text TEXT
, year_no TEXT
, quater_no TEXT
, assigned_to_text TEXT
, time_no TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, flow_state TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.account IS 'Account';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.assign_to IS 'Assign to';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.task_name IS 'Task Name';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.visit_time IS 'Visit time';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.task_follow_up_time IS 'Task Followup Date/Time';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.irtnc_state IS 'IRTNC State';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.segments IS 'Segments';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.application IS 'Application';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.intelligence_source IS 'Intelligence Source';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.funnels_created IS 'Funnels Created(K/USD)';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.status IS 'Status';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.order_ae_flag IS '是否预约AE';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.ae_confict_flag IS 'AE时间是否冲突';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.account2 IS 'Account';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.city IS 'City';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.company_name_en IS '公司英文名称';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.nick_name IS '公司简称';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.company_id IS '公司ID';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.salar IS '所属销售';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.contact IS '联系人';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.phone IS 'Phone';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.wechat_id IS '微信';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.email IS 'Email';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.new_flag IS 'New/Old';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.account_type IS 'Account Type';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.products IS 'Products';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.product_family1 IS 'Product Family1';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.product_family2 IS 'Product Family2';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.product_family3 IS 'Product Family3';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.attachments IS 'Attachments';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.remarks IS 'Remarks';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.contact_phone_flag IS '判断是否能返回联系人电话号';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.repeat_flag IS '判断是否重复数据';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.am IS 'AM';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.visit_tm IS '选择拜访时间';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.replan_flag IS '辅助-判断是否重复排期2';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.status2 IS 'Status2';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.ap_week IS 'AP-WEEK';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.year_text IS 'Year-文本';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.year_no IS 'Year';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.quater_no IS 'Quater';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.assigned_to_text IS 'Assigned to-文本';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.time_no IS '时间-数字';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.creator IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.create_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.updater IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.update_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.flow_state IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.delete_time IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.deleter IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.app_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.entry_id IS '';
COMMENT ON COLUMN p10_sa.S98_S_jdy_daily_visit.etl_tx_dt IS '';
COMMENT ON TABLE p10_sa.S98_S_jdy_daily_visit IS '';
create table if not exists p12_sfull.S98_S_jdy_daily_visit (
account TEXT
, assign_to TEXT
, task_name TEXT
, visit_time TEXT
, task_follow_up_time TEXT
, irtnc_state TEXT
, segments TEXT
, application TEXT
, intelligence_source TEXT
, funnels_created TEXT
, status TEXT
, order_ae_flag TEXT
, ae_confict_flag TEXT
, account2 TEXT
, city TEXT
, company_name_en TEXT
, nick_name TEXT
, company_id TEXT
, salar TEXT
, contact TEXT
, phone TEXT
, wechat_id TEXT
, email TEXT
, new_flag TEXT
, account_type TEXT
, products TEXT
, product_family1 TEXT
, product_family2 TEXT
, product_family3 TEXT
, attachments TEXT
, remarks TEXT
, contact_phone_flag TEXT
, repeat_flag TEXT
, am TEXT
, visit_tm TEXT
, replan_flag TEXT
, status2 TEXT
, ap_week TEXT
, year_text TEXT
, year_no TEXT
, quater_no TEXT
, assigned_to_text TEXT
, time_no TEXT
, creator TEXT
, create_time TEXT
, updater TEXT
, update_time TEXT
, flow_state TEXT
, delete_time TEXT
, deleter TEXT
, id TEXT
, app_id TEXT
, entry_id TEXT
, etl_tx_dt TIMESTAMP
) ;
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.account IS 'Account';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.assign_to IS 'Assign to';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.task_name IS 'Task Name';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.visit_time IS 'Visit time';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.task_follow_up_time IS 'Task Followup Date/Time';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.irtnc_state IS 'IRTNC State';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.segments IS 'Segments';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.application IS 'Application';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.intelligence_source IS 'Intelligence Source';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.funnels_created IS 'Funnels Created(K/USD)';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.status IS 'Status';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.order_ae_flag IS '是否预约AE';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.ae_confict_flag IS 'AE时间是否冲突';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.account2 IS 'Account';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.city IS 'City';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.company_name_en IS '公司英文名称';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.nick_name IS '公司简称';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.company_id IS '公司ID';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.salar IS '所属销售';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.contact IS '联系人';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.phone IS 'Phone';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.wechat_id IS '微信';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.email IS 'Email';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.new_flag IS 'New/Old';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.account_type IS 'Account Type';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.products IS 'Products';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.product_family1 IS 'Product Family1';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.product_family2 IS 'Product Family2';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.product_family3 IS 'Product Family3';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.attachments IS 'Attachments';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.remarks IS 'Remarks';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.contact_phone_flag IS '判断是否能返回联系人电话号';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.repeat_flag IS '判断是否重复数据';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.am IS 'AM';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.visit_tm IS '选择拜访时间';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.replan_flag IS '辅助-判断是否重复排期2';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.status2 IS 'Status2';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.ap_week IS 'AP-WEEK';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.year_text IS 'Year-文本';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.year_no IS 'Year';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.quater_no IS 'Quater';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.assigned_to_text IS 'Assigned to-文本';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.time_no IS '时间-数字';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.creator IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.create_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.updater IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.update_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.flow_state IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.delete_time IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.deleter IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.app_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.entry_id IS '';
COMMENT ON COLUMN p12_sfull.S98_S_jdy_daily_visit.etl_tx_dt IS '';
COMMENT ON TABLE p12_sfull.S98_S_jdy_daily_visit IS '';

View File

@ -0,0 +1,234 @@
import requests
import json
from typing import List, Dict, Any, Optional
import datetime as dt
import psycopg2
import uuid
class JiandaoyunClient:
"""
简道云API客户端支持分页功能
"""
def __init__(self, base_url: str, app_id: str, auth_token: str):
"""
初始化简道云客户端
参数:
base_url: API基础URL
app_id: 应用ID
auth_token: 授权令牌
"""
self.base_url = base_url
self.app_id = app_id
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {auth_token}'
}
def fetch_all_data(self, entry_id: str, limit: int = 100,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取所有数据自动处理分页
参数:
entry_id: 表单ID
limit: 每页记录数最大100
filter_condition: 可选的过滤条件
返回:
所有数据记录的列表
"""
all_data = []
last_data_id = None
while True:
# 获取一页数据
page_data = self._fetch_page(entry_id, limit, last_data_id, filter_condition)
# 添加到结果集合中
all_data.extend(page_data)
# 如果获取的数据少于限制数量,说明已到达末尾
if len(page_data) < limit:
break
# 为下一次迭代设置data_id
if page_data:
last_data_id = page_data[-1].get('_id')
else:
break
return all_data
def _fetch_page(self, entry_id: str, limit: int,
last_data_id: Optional[str] = None,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取单页数据
参数:
entry_id: 表单ID
limit: 要获取的记录数
last_data_id: 上一页最后一条记录的ID
filter_condition: 可选的过滤条件
返回:
本页数据记录的列表
"""
url = f"{self.base_url}"
payload = {
"app_id": self.app_id,
"entry_id": entry_id,
"limit": limit
}
# 如果提供了分页参数则添加data_id
if last_data_id:
payload["data_id"] = last_data_id
# 如果提供了过滤条件则添加filter
if filter_condition:
payload["filter"] = filter_condition
response = requests.post(
url,
headers=self.headers,
data=json.dumps(payload)
)
# 对于错误状态码抛出异常
response.raise_for_status()
result = response.json()
return result.get("data", [])
PG_DSN = dict(database="dataops_db",
user ="dbuser_dba",
password="EmBRxnmmjnE3",
host ="124.221.232.219",
port ="5432")
def save_json_to_pg(data: list, api_id: str) -> None:
"""把列表落库:先软删历史,再插入新批次"""
print('[save_to_pg] 写入 PG...')
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
cur.close()
print('[save_to_pg] 写入完成')
except psycopg2.Error as e:
print(f'[save_to_pg] 数据库错误: {e}')
raise
except Exception as e:
print(f'[save_to_pg] 未知错误: {e}')
raise
finally:
if 'conn' in locals():
conn.close()
"""
获取指定时间段前的时间
:param h: 时间段
:return: 时间
"""
def formatted2_previous_hour(h):
if h==0:
return dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
start_of_current_hour = dt.datetime.now().replace(minute=0, second=0, microsecond=0)
# 减去一个小时,得到前一个小时的开始时间
start_of_previous_hour = start_of_current_hour - dt.timedelta(hours=h)
return start_of_previous_hour.strftime("%Y-%m-%d %H:%M:%S")
"""
获取指定天数前的日期
:param days: 天数0表示今天1表示昨天以此类推
:return: YYYY-MM-DD格式的日期字符串
"""
def get_previous_date(days: int = 0) -> str:
"""
获取指定天数前的日期
参数:
days: 天数0表示今天正数表示之前的日期负数表示之后的日期
返回:
YYYY-MM-DD格式的日期字符串
"""
target_date = dt.datetime.now() - dt.timedelta(days=days)
return target_date.strftime("%Y-%m-%d")
# 使用示例
def main():
# 配置信息
BASE_URL = "https://api.jiandaoyun.com/api/v5/app/entry/data/list"
AUTH_TOKEN = "tPh9Fm9qumixcfDDvk42TUsc3Y0OoarD"
API_ID = "b27090a7db5d49fea811d68bdef4cbf0"
params = {'entry_id':'674d123f70d2a0f1b1387f47','app_id':'673dc4d1d771318d06974add',}
APP_ID = params['app_id']
ENTRY_ID = params['entry_id']
start_dt = get_previous_date(1)
end_dt = get_previous_date(0)
# 创建客户端
client = JiandaoyunClient(BASE_URL, APP_ID, AUTH_TOKEN)
# 定义过滤条件
filter_condition = {
"rel": "and",
"cond": [
{
"field": "updateTime",
"type": "datetime",
"method": "range",
"value": [
start_dt,
end_dt
]
}
]
}
try:
# 获取所有数据(自动处理分页)
all_data = client.fetch_all_data(
entry_id=ENTRY_ID,
limit=100, # 可根据需要调整最大100
filter_condition=filter_condition
)
print(f"获取到的总记录数: {len(all_data)}")
save_json_to_pg(all_data, API_ID)
# # 在此处处理您的数据
# for record in all_data:
# print(f"记录ID: {record.get('_id')}")
except requests.exceptions.RequestException as e:
print(f"获取数据时出错: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,123 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.jdy_account;
insert into data_api.jdy_account (
oracle_number
, import_id
, account_id
, account_nick_name
, account_name
, search_result
, city
, code
, legal_name
, depart
, channel
, status
, address
, post_address
, sale_id
, parent_company
, industry
, sub_industry
, custom_class
, custom_type
, create_by
, create_tm
, addition_tm
, wx_open_id
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
,etl_tx_dt
)
select
case when trim(both from oracle_number)='' then null else oracle_number::text end oracle_number
, case when trim(both from import_id)='' then null else import_id::text end import_id
, case when trim(both from account_id)='' then null else account_id::text end account_id
, case when trim(both from account_nick_name)='' then null else account_nick_name::text end account_nick_name
, case when trim(both from account_name)='' then null else account_name::text end account_name
, case when trim(both from search_result)='' then null else search_result::text end search_result
, case when trim(both from city)='' then null else city::text end city
, case when trim(both from code)='' then null else code::text end code
, case when trim(both from legal_name)='' then null else legal_name::text end legal_name
, case when trim(both from depart)='' then null else depart::text end depart
, case when trim(both from channel)='' then null else channel::text end channel
, case when trim(both from status)='' then null else status::text end status
, case when trim(both from address)='' then null else address::text end address
, case when trim(both from post_address)='' then null else post_address::text end post_address
, case when trim(both from sale_id)='' then null else sale_id::text end sale_id
, case when trim(both from parent_company)='' then null else parent_company::text end parent_company
, case when trim(both from industry)='' then null else industry::text end industry
, case when trim(both from sub_industry)='' then null else sub_industry::text end sub_industry
, case when trim(both from custom_class)='' then null else custom_class::text end custom_class
, case when trim(both from custom_type)='' then null else custom_type::text end custom_type
, case when trim(both from create_by)='' then null else create_by::text end create_by
, case when trim(both from create_tm)='' then null else create_tm::text end create_tm
, case when trim(both from addition_tm)='' then null else addition_tm::text end addition_tm
, case when trim(both from wx_open_id)='' then null else wx_open_id::text end wx_open_id
, case when trim(both from creator)='' then null else creator::text end creator
, case when trim(both from create_time)='' then null else create_time::text end create_time
, case when trim(both from updater)='' then null else updater::text end updater
, case when trim(both from update_time)='' then null else update_time::text end update_time
, case when trim(both from delete_time)='' then null else delete_time::text end delete_time
, case when trim(both from deleter)='' then null else deleter::text end deleter
, case when trim(both from id)='' then null else id::text end id
, case when trim(both from app_id)='' then null else app_id::text end app_id
, case when trim(both from entry_id)='' then null else entry_id::text end entry_id
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'_widget_1735105058034') oracle_number
, (json_array_elements(data::json)::json->>'_widget_1734159435310') import_id
, (json_array_elements(data::json)::json->>'_widget_1733470698210') account_id
, (json_array_elements(data::json)::json->>'_widget_1733107466966') account_nick_name
, (json_array_elements(data::json)::json->>'_widget_1733106413573') account_name
, (json_array_elements(data::json)::json->>'_widget_1735033447237') search_result
, (json_array_elements(data::json)::json->>'_widget_1733106413579') city
, (json_array_elements(data::json)::json->>'_widget_1733106413582') code
, (json_array_elements(data::json)::json->>'_widget_1733106413576') legal_name
, (json_array_elements(data::json)::json->>'_widget_1733107466967') depart
, (json_array_elements(data::json)::json->>'_widget_1733470698211') channel
, (json_array_elements(data::json)::json->>'_widget_1733106413580') status
, (json_array_elements(data::json)::json->>'_widget_1735033312674') address
, (json_array_elements(data::json)::json->>'_widget_1733106413578') post_address
, (json_array_elements(data::json)::json->>'_widget_1733106413586') sale_id
, (json_array_elements(data::json)::json->>'_widget_1733474922088') parent_company
, (json_array_elements(data::json)::json->>'_widget_1733106413584') industry
, (json_array_elements(data::json)::json->>'_widget_1733106413585') sub_industry
, (json_array_elements(data::json)::json->>'_widget_1733106413591') custom_class
, (json_array_elements(data::json)::json->>'_widget_1733106413583') custom_type
, (json_array_elements(data::json)::json->>'_widget_1733106413589') create_by
, (json_array_elements(data::json)::json->>'_widget_1733106413590') create_tm
, (json_array_elements(data::json)::json->>'_widget_1743145837335') addition_tm
, (json_array_elements(data::json)::json->>'wx_open_id') wx_open_id
, (json_array_elements(data::json)::json->>'creator') creator
, (json_array_elements(data::json)::json->>'createTime') create_time
, (json_array_elements(data::json)::json->>'updater') updater
, (json_array_elements(data::json)::json->>'updateTime') update_time
, (json_array_elements(data::json)::json->>'deleteTime') delete_time
, (json_array_elements(data::json)::json->>'deleter') deleter
, (json_array_elements(data::json)::json->>'_id') id
, (json_array_elements(data::json)::json->>'appId') app_id
, (json_array_elements(data::json)::json->>'entryId') entry_id
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='b27090a7db5d49fea811d68bdef4cbf0' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='b27090a7db5d49fea811d68bdef4cbf0';
\q

View File

@ -0,0 +1,234 @@
import requests
import json
from typing import List, Dict, Any, Optional
import datetime as dt
import psycopg2
import uuid
class JiandaoyunClient:
"""
简道云API客户端支持分页功能
"""
def __init__(self, base_url: str, app_id: str, auth_token: str):
"""
初始化简道云客户端
参数:
base_url: API基础URL
app_id: 应用ID
auth_token: 授权令牌
"""
self.base_url = base_url
self.app_id = app_id
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {auth_token}'
}
def fetch_all_data(self, entry_id: str, limit: int = 100,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取所有数据自动处理分页
参数:
entry_id: 表单ID
limit: 每页记录数最大100
filter_condition: 可选的过滤条件
返回:
所有数据记录的列表
"""
all_data = []
last_data_id = None
while True:
# 获取一页数据
page_data = self._fetch_page(entry_id, limit, last_data_id, filter_condition)
# 添加到结果集合中
all_data.extend(page_data)
# 如果获取的数据少于限制数量,说明已到达末尾
if len(page_data) < limit:
break
# 为下一次迭代设置data_id
if page_data:
last_data_id = page_data[-1].get('_id')
else:
break
return all_data
def _fetch_page(self, entry_id: str, limit: int,
last_data_id: Optional[str] = None,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取单页数据
参数:
entry_id: 表单ID
limit: 要获取的记录数
last_data_id: 上一页最后一条记录的ID
filter_condition: 可选的过滤条件
返回:
本页数据记录的列表
"""
url = f"{self.base_url}"
payload = {
"app_id": self.app_id,
"entry_id": entry_id,
"limit": limit
}
# 如果提供了分页参数则添加data_id
if last_data_id:
payload["data_id"] = last_data_id
# 如果提供了过滤条件则添加filter
if filter_condition:
payload["filter"] = filter_condition
response = requests.post(
url,
headers=self.headers,
data=json.dumps(payload)
)
# 对于错误状态码抛出异常
response.raise_for_status()
result = response.json()
return result.get("data", [])
PG_DSN = dict(database="dataops_db",
user ="dbuser_dba",
password="EmBRxnmmjnE3",
host ="124.221.232.219",
port ="5432")
def save_json_to_pg(data: list, api_id: str) -> None:
"""把列表落库:先软删历史,再插入新批次"""
print('[save_to_pg] 写入 PG...')
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
cur.close()
print('[save_to_pg] 写入完成')
except psycopg2.Error as e:
print(f'[save_to_pg] 数据库错误: {e}')
raise
except Exception as e:
print(f'[save_to_pg] 未知错误: {e}')
raise
finally:
if 'conn' in locals():
conn.close()
"""
获取指定时间段前的时间
:param h: 时间段
:return: 时间
"""
def formatted2_previous_hour(h):
if h==0:
return dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
start_of_current_hour = dt.datetime.now().replace(minute=0, second=0, microsecond=0)
# 减去一个小时,得到前一个小时的开始时间
start_of_previous_hour = start_of_current_hour - dt.timedelta(hours=h)
return start_of_previous_hour.strftime("%Y-%m-%d %H:%M:%S")
"""
获取指定天数前的日期
:param days: 天数0表示今天1表示昨天以此类推
:return: YYYY-MM-DD格式的日期字符串
"""
def get_previous_date(days: int = 0) -> str:
"""
获取指定天数前的日期
参数:
days: 天数0表示今天正数表示之前的日期负数表示之后的日期
返回:
YYYY-MM-DD格式的日期字符串
"""
target_date = dt.datetime.now() - dt.timedelta(days=days)
return target_date.strftime("%Y-%m-%d")
# 使用示例
def main():
# 配置信息
BASE_URL = "https://api.jiandaoyun.com/api/v5/app/entry/data/list"
AUTH_TOKEN = "tPh9Fm9qumixcfDDvk42TUsc3Y0OoarD"
API_ID = "c72487e8-b309-47a8-aa6a-1a15e794"
params = {'app_id':'673dc4d1d771318d06974add','entry_id':'675689842895c3f137c99a21',}
APP_ID = params['app_id']
ENTRY_ID = params['entry_id']
start_dt = get_previous_date(1)
end_dt = get_previous_date(0)
# 创建客户端
client = JiandaoyunClient(BASE_URL, APP_ID, AUTH_TOKEN)
# 定义过滤条件
filter_condition = {
"rel": "and",
"cond": [
{
"field": "updateTime",
"type": "datetime",
"method": "range",
"value": [
start_dt,
end_dt
]
}
]
}
try:
# 获取所有数据(自动处理分页)
all_data = client.fetch_all_data(
entry_id=ENTRY_ID,
limit=100, # 可根据需要调整最大100
filter_condition=filter_condition
)
print(f"获取到的总记录数: {len(all_data)}")
save_json_to_pg(all_data, API_ID)
# # 在此处处理您的数据
# for record in all_data:
# print(f"记录ID: {record.get('_id')}")
except requests.exceptions.RequestException as e:
print(f"获取数据时出错: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,90 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.jdy_contact;
insert into data_api.jdy_contact (
id
, account_id
, account_name
, legal_name
, abbreviation
, city_name
, contact_id
, contact_name
, phone
, wechat_id
, email
, salers
, title
, phone2
, app_id
, create_time
, creator
, delete_time
, deleter
, entry_id
, update_time
, updater
,etl_tx_dt
)
select
case when trim(both from id)='' then null else id::text end id
, case when trim(both from account_id)='' then null else account_id::text end account_id
, case when trim(both from account_name)='' then null else account_name::text end account_name
, case when trim(both from legal_name)='' then null else legal_name::text end legal_name
, case when trim(both from abbreviation)='' then null else abbreviation::text end abbreviation
, case when trim(both from city_name)='' then null else city_name::text end city_name
, case when trim(both from contact_id)='' then null else contact_id::text end contact_id
, case when trim(both from contact_name)='' then null else contact_name::text end contact_name
, case when trim(both from phone)='' then null else phone::text end phone
, case when trim(both from wechat_id)='' then null else wechat_id::text end wechat_id
, case when trim(both from email)='' then null else email::text end email
, case when trim(both from salers)='' then null else salers::text end salers
, case when trim(both from title)='' then null else title::text end title
, case when trim(both from phone2)='' then null else phone2::text end phone2
, case when trim(both from app_id)='' then null else app_id::text end app_id
, case when trim(both from create_time)='' then null else create_time::text end create_time
, case when trim(both from creator)='' then null else creator::text end creator
, case when trim(both from delete_time)='' then null else delete_time::text end delete_time
, case when trim(both from deleter)='' then null else deleter::text end deleter
, case when trim(both from entry_id)='' then null else entry_id::text end entry_id
, case when trim(both from update_time)='' then null else update_time::text end update_time
, case when trim(both from updater)='' then null else updater::text end updater
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'_id') id
, (json_array_elements(data::json)::json->>'_widget_1733473057517') account_id
, (json_array_elements(data::json)::json->>'_widget_1733473057518') account_name
, (json_array_elements(data::json)::json->>'_widget_1733473057519') legal_name
, (json_array_elements(data::json)::json->>'_widget_1733473057520') abbreviation
, (json_array_elements(data::json)::json->>'_widget_1733473057521') city_name
, (json_array_elements(data::json)::json->>'_widget_1733724552032') contact_id
, (json_array_elements(data::json)::json->>'_widget_1733724552033') contact_name
, (json_array_elements(data::json)::json->>'_widget_1733724552034') phone
, (json_array_elements(data::json)::json->>'_widget_1733724552035') wechat_id
, (json_array_elements(data::json)::json->>'_widget_1733724552036') email
, (json_array_elements(data::json)::json->>'_widget_1733902521036') salers
, (json_array_elements(data::json)::json->>'_widget_1735969558260') title
, (json_array_elements(data::json)::json->>'_widget_1736845577224') phone2
, (json_array_elements(data::json)::json->>'appId') app_id
, (json_array_elements(data::json)::json->>'createTime') create_time
, (json_array_elements(data::json)::json->>'creator') creator
, (json_array_elements(data::json)::json->>'deleteTime') delete_time
, (json_array_elements(data::json)::json->>'deleter') deleter
, (json_array_elements(data::json)::json->>'entryId') entry_id
, (json_array_elements(data::json)::json->>'updateTime') update_time
, (json_array_elements(data::json)::json->>'updater') updater
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='c72487e8-b309-47a8-aa6a-1a15e794' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='c72487e8-b309-47a8-aa6a-1a15e794';
\q

View File

@ -0,0 +1,234 @@
import requests
import json
from typing import List, Dict, Any, Optional
import datetime as dt
import psycopg2
import uuid
class JiandaoyunClient:
"""
简道云API客户端支持分页功能
"""
def __init__(self, base_url: str, app_id: str, auth_token: str):
"""
初始化简道云客户端
参数:
base_url: API基础URL
app_id: 应用ID
auth_token: 授权令牌
"""
self.base_url = base_url
self.app_id = app_id
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {auth_token}'
}
def fetch_all_data(self, entry_id: str, limit: int = 100,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取所有数据自动处理分页
参数:
entry_id: 表单ID
limit: 每页记录数最大100
filter_condition: 可选的过滤条件
返回:
所有数据记录的列表
"""
all_data = []
last_data_id = None
while True:
# 获取一页数据
page_data = self._fetch_page(entry_id, limit, last_data_id, filter_condition)
# 添加到结果集合中
all_data.extend(page_data)
# 如果获取的数据少于限制数量,说明已到达末尾
if len(page_data) < limit:
break
# 为下一次迭代设置data_id
if page_data:
last_data_id = page_data[-1].get('_id')
else:
break
return all_data
def _fetch_page(self, entry_id: str, limit: int,
last_data_id: Optional[str] = None,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取单页数据
参数:
entry_id: 表单ID
limit: 要获取的记录数
last_data_id: 上一页最后一条记录的ID
filter_condition: 可选的过滤条件
返回:
本页数据记录的列表
"""
url = f"{self.base_url}"
payload = {
"app_id": self.app_id,
"entry_id": entry_id,
"limit": limit
}
# 如果提供了分页参数则添加data_id
if last_data_id:
payload["data_id"] = last_data_id
# 如果提供了过滤条件则添加filter
if filter_condition:
payload["filter"] = filter_condition
response = requests.post(
url,
headers=self.headers,
data=json.dumps(payload)
)
# 对于错误状态码抛出异常
response.raise_for_status()
result = response.json()
return result.get("data", [])
PG_DSN = dict(database="dataops_db",
user ="dbuser_dba",
password="EmBRxnmmjnE3",
host ="124.221.232.219",
port ="5432")
def save_json_to_pg(data: list, api_id: str) -> None:
"""把列表落库:先软删历史,再插入新批次"""
print('[save_to_pg] 写入 PG...')
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
cur.close()
print('[save_to_pg] 写入完成')
except psycopg2.Error as e:
print(f'[save_to_pg] 数据库错误: {e}')
raise
except Exception as e:
print(f'[save_to_pg] 未知错误: {e}')
raise
finally:
if 'conn' in locals():
conn.close()
"""
获取指定时间段前的时间
:param h: 时间段
:return: 时间
"""
def formatted2_previous_hour(h):
if h==0:
return dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
start_of_current_hour = dt.datetime.now().replace(minute=0, second=0, microsecond=0)
# 减去一个小时,得到前一个小时的开始时间
start_of_previous_hour = start_of_current_hour - dt.timedelta(hours=h)
return start_of_previous_hour.strftime("%Y-%m-%d %H:%M:%S")
"""
获取指定天数前的日期
:param days: 天数0表示今天1表示昨天以此类推
:return: YYYY-MM-DD格式的日期字符串
"""
def get_previous_date(days: int = 0) -> str:
"""
获取指定天数前的日期
参数:
days: 天数0表示今天正数表示之前的日期负数表示之后的日期
返回:
YYYY-MM-DD格式的日期字符串
"""
target_date = dt.datetime.now() - dt.timedelta(days=days)
return target_date.strftime("%Y-%m-%d")
# 使用示例
def main():
# 配置信息
BASE_URL = "https://api.jiandaoyun.com/api/v5/app/entry/data/list"
AUTH_TOKEN = "tPh9Fm9qumixcfDDvk42TUsc3Y0OoarD"
API_ID = "1dbb92ea95d541508fb91b806a640fab"
params = {'entry_id':'6752adf26c804e7925bf53a9','app_id':'671062779c3695dfa9c0f892',}
APP_ID = params['app_id']
ENTRY_ID = params['entry_id']
start_dt = get_previous_date(1)
end_dt = get_previous_date(0)
# 创建客户端
client = JiandaoyunClient(BASE_URL, APP_ID, AUTH_TOKEN)
# 定义过滤条件
filter_condition = {
"rel": "and",
"cond": [
{
"field": "updateTime",
"type": "datetime",
"method": "range",
"value": [
start_dt,
end_dt
]
}
]
}
try:
# 获取所有数据(自动处理分页)
all_data = client.fetch_all_data(
entry_id=ENTRY_ID,
limit=100, # 可根据需要调整最大100
filter_condition=filter_condition
)
print(f"获取到的总记录数: {len(all_data)}")
save_json_to_pg(all_data, API_ID)
# # 在此处处理您的数据
# for record in all_data:
# print(f"记录ID: {record.get('_id')}")
except requests.exceptions.RequestException as e:
print(f"获取数据时出错: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,60 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.jdy_ae_plan;
insert into data_api.jdy_ae_plan (
ae
, task_flowup_time
, status
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
,etl_tx_dt
)
select
case when trim(both from ae)='' then null else ae::text end ae
, case when trim(both from task_flowup_time)='' then null else task_flowup_time::text end task_flowup_time
, case when trim(both from status)='' then null else status::text end status
, case when trim(both from creator)='' then null else creator::text end creator
, case when trim(both from create_time)='' then null else create_time::text end create_time
, case when trim(both from updater)='' then null else updater::text end updater
, case when trim(both from update_time)='' then null else update_time::text end update_time
, case when trim(both from delete_time)='' then null else delete_time::text end delete_time
, case when trim(both from deleter)='' then null else deleter::text end deleter
, case when trim(both from id)='' then null else id::text end id
, case when trim(both from app_id)='' then null else app_id::text end app_id
, case when trim(both from entry_id)='' then null else entry_id::text end entry_id
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'_widget_1733111644417') ae
, (json_array_elements(data::json)::json->>'_widget_1733111644418') task_flowup_time
, (json_array_elements(data::json)::json->>'_widget_1733111644421') status
, (json_array_elements(data::json)::json->>'creator') creator
, (json_array_elements(data::json)::json->>'createTime') create_time
, (json_array_elements(data::json)::json->>'updater') updater
, (json_array_elements(data::json)::json->>'updateTime') update_time
, (json_array_elements(data::json)::json->>'deleteTime') delete_time
, (json_array_elements(data::json)::json->>'deleter') deleter
, (json_array_elements(data::json)::json->>'_id') id
, (json_array_elements(data::json)::json->>'appId') app_id
, (json_array_elements(data::json)::json->>'entryId') entry_id
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='1dbb92ea95d541508fb91b806a640fab' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='1dbb92ea95d541508fb91b806a640fab';
\q

View File

@ -0,0 +1,234 @@
import requests
import json
from typing import List, Dict, Any, Optional
import datetime as dt
import psycopg2
import uuid
class JiandaoyunClient:
"""
简道云API客户端支持分页功能
"""
def __init__(self, base_url: str, app_id: str, auth_token: str):
"""
初始化简道云客户端
参数:
base_url: API基础URL
app_id: 应用ID
auth_token: 授权令牌
"""
self.base_url = base_url
self.app_id = app_id
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {auth_token}'
}
def fetch_all_data(self, entry_id: str, limit: int = 100,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取所有数据自动处理分页
参数:
entry_id: 表单ID
limit: 每页记录数最大100
filter_condition: 可选的过滤条件
返回:
所有数据记录的列表
"""
all_data = []
last_data_id = None
while True:
# 获取一页数据
page_data = self._fetch_page(entry_id, limit, last_data_id, filter_condition)
# 添加到结果集合中
all_data.extend(page_data)
# 如果获取的数据少于限制数量,说明已到达末尾
if len(page_data) < limit:
break
# 为下一次迭代设置data_id
if page_data:
last_data_id = page_data[-1].get('_id')
else:
break
return all_data
def _fetch_page(self, entry_id: str, limit: int,
last_data_id: Optional[str] = None,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取单页数据
参数:
entry_id: 表单ID
limit: 要获取的记录数
last_data_id: 上一页最后一条记录的ID
filter_condition: 可选的过滤条件
返回:
本页数据记录的列表
"""
url = f"{self.base_url}"
payload = {
"app_id": self.app_id,
"entry_id": entry_id,
"limit": limit
}
# 如果提供了分页参数则添加data_id
if last_data_id:
payload["data_id"] = last_data_id
# 如果提供了过滤条件则添加filter
if filter_condition:
payload["filter"] = filter_condition
response = requests.post(
url,
headers=self.headers,
data=json.dumps(payload)
)
# 对于错误状态码抛出异常
response.raise_for_status()
result = response.json()
return result.get("data", [])
PG_DSN = dict(database="dataops_db",
user ="dbuser_dba",
password="EmBRxnmmjnE3",
host ="124.221.232.219",
port ="5432")
def save_json_to_pg(data: list, api_id: str) -> None:
"""把列表落库:先软删历史,再插入新批次"""
print('[save_to_pg] 写入 PG...')
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
cur.close()
print('[save_to_pg] 写入完成')
except psycopg2.Error as e:
print(f'[save_to_pg] 数据库错误: {e}')
raise
except Exception as e:
print(f'[save_to_pg] 未知错误: {e}')
raise
finally:
if 'conn' in locals():
conn.close()
"""
获取指定时间段前的时间
:param h: 时间段
:return: 时间
"""
def formatted2_previous_hour(h):
if h==0:
return dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
start_of_current_hour = dt.datetime.now().replace(minute=0, second=0, microsecond=0)
# 减去一个小时,得到前一个小时的开始时间
start_of_previous_hour = start_of_current_hour - dt.timedelta(hours=h)
return start_of_previous_hour.strftime("%Y-%m-%d %H:%M:%S")
"""
获取指定天数前的日期
:param days: 天数0表示今天1表示昨天以此类推
:return: YYYY-MM-DD格式的日期字符串
"""
def get_previous_date(days: int = 0) -> str:
"""
获取指定天数前的日期
参数:
days: 天数0表示今天正数表示之前的日期负数表示之后的日期
返回:
YYYY-MM-DD格式的日期字符串
"""
target_date = dt.datetime.now() - dt.timedelta(days=days)
return target_date.strftime("%Y-%m-%d")
# 使用示例
def main():
# 配置信息
BASE_URL = "https://api.jiandaoyun.com/api/v5/app/entry/data/list"
AUTH_TOKEN = "tPh9Fm9qumixcfDDvk42TUsc3Y0OoarD"
API_ID = "251f0714de2d4240a14404bac29f7b31"
params = {'entry_id':'6752adebf850c56051660641','app_id':'671062779c3695dfa9c0f892',}
APP_ID = params['app_id']
ENTRY_ID = params['entry_id']
start_dt = get_previous_date(1)
end_dt = get_previous_date(0)
# 创建客户端
client = JiandaoyunClient(BASE_URL, APP_ID, AUTH_TOKEN)
# 定义过滤条件
filter_condition = {
"rel": "and",
"cond": [
{
"field": "updateTime",
"type": "datetime",
"method": "range",
"value": [
start_dt,
end_dt
]
}
]
}
try:
# 获取所有数据(自动处理分页)
all_data = client.fetch_all_data(
entry_id=ENTRY_ID,
limit=100, # 可根据需要调整最大100
filter_condition=filter_condition
)
print(f"获取到的总记录数: {len(all_data)}")
save_json_to_pg(all_data, API_ID)
# # 在此处处理您的数据
# for record in all_data:
# print(f"记录ID: {record.get('_id')}")
except requests.exceptions.RequestException as e:
print(f"获取数据时出错: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,183 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.jdy_daily_visit;
insert into data_api.jdy_daily_visit (
account
, assign_to
, task_name
, visit_time
, task_follow_up_time
, irtnc_state
, segments
, application
, intelligence_source
, funnels_created
, status
, order_ae_flag
, ae_confict_flag
, account2
, city
, company_name_en
, nick_name
, company_id
, salar
, contact
, phone
, wechat_id
, email
, new_flag
, account_type
, products
, product_family1
, product_family2
, product_family3
, attachments
, remarks
, contact_phone_flag
, repeat_flag
, am
, visit_tm
, replan_flag
, status2
, ap_week
, year_text
, year_no
, quater_no
, assigned_to_text
, time_no
, creator
, create_time
, updater
, update_time
, flow_state
, delete_time
, deleter
, id
, app_id
, entry_id
,etl_tx_dt
)
select
case when trim(both from account)='' then null else account::text end account
, case when trim(both from assign_to)='' then null else assign_to::text end assign_to
, case when trim(both from task_name)='' then null else task_name::text end task_name
, case when trim(both from visit_time)='' then null else visit_time::text end visit_time
, case when trim(both from task_follow_up_time)='' then null else task_follow_up_time::text end task_follow_up_time
, case when trim(both from irtnc_state)='' then null else irtnc_state::text end irtnc_state
, case when trim(both from segments)='' then null else segments::text end segments
, case when trim(both from application)='' then null else application::text end application
, case when trim(both from intelligence_source)='' then null else intelligence_source::text end intelligence_source
, case when trim(both from funnels_created)='' then null else funnels_created::text end funnels_created
, case when trim(both from status)='' then null else status::text end status
, case when trim(both from order_ae_flag)='' then null else order_ae_flag::text end order_ae_flag
, case when trim(both from ae_confict_flag)='' then null else ae_confict_flag::text end ae_confict_flag
, case when trim(both from account2)='' then null else account2::text end account2
, case when trim(both from city)='' then null else city::text end city
, case when trim(both from company_name_en)='' then null else company_name_en::text end company_name_en
, case when trim(both from nick_name)='' then null else nick_name::text end nick_name
, case when trim(both from company_id)='' then null else company_id::text end company_id
, case when trim(both from salar)='' then null else salar::text end salar
, case when trim(both from contact)='' then null else contact::text end contact
, case when trim(both from phone)='' then null else phone::text end phone
, case when trim(both from wechat_id)='' then null else wechat_id::text end wechat_id
, case when trim(both from email)='' then null else email::text end email
, case when trim(both from new_flag)='' then null else new_flag::text end new_flag
, case when trim(both from account_type)='' then null else account_type::text end account_type
, case when trim(both from products)='' then null else products::text end products
, case when trim(both from product_family1)='' then null else product_family1::text end product_family1
, case when trim(both from product_family2)='' then null else product_family2::text end product_family2
, case when trim(both from product_family3)='' then null else product_family3::text end product_family3
, case when trim(both from attachments)='' then null else attachments::text end attachments
, case when trim(both from remarks)='' then null else remarks::text end remarks
, case when trim(both from contact_phone_flag)='' then null else contact_phone_flag::text end contact_phone_flag
, case when trim(both from repeat_flag)='' then null else repeat_flag::text end repeat_flag
, case when trim(both from am)='' then null else am::text end am
, case when trim(both from visit_tm)='' then null else visit_tm::text end visit_tm
, case when trim(both from replan_flag)='' then null else replan_flag::text end replan_flag
, case when trim(both from status2)='' then null else status2::text end status2
, case when trim(both from ap_week)='' then null else ap_week::text end ap_week
, case when trim(both from year_text)='' then null else year_text::text end year_text
, case when trim(both from year_no)='' then null else year_no::text end year_no
, case when trim(both from quater_no)='' then null else quater_no::text end quater_no
, case when trim(both from assigned_to_text)='' then null else assigned_to_text::text end assigned_to_text
, case when trim(both from time_no)='' then null else time_no::text end time_no
, case when trim(both from creator)='' then null else creator::text end creator
, case when trim(both from create_time)='' then null else create_time::text end create_time
, case when trim(both from updater)='' then null else updater::text end updater
, case when trim(both from update_time)='' then null else update_time::text end update_time
, case when trim(both from flow_state)='' then null else flow_state::text end flow_state
, case when trim(both from delete_time)='' then null else delete_time::text end delete_time
, case when trim(both from deleter)='' then null else deleter::text end deleter
, case when trim(both from id)='' then null else id::text end id
, case when trim(both from app_id)='' then null else app_id::text end app_id
, case when trim(both from entry_id)='' then null else entry_id::text end entry_id
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'_widget_1736992438123') account
, (json_array_elements(data::json)::json->>'_widget_1733121178723') assign_to
, (json_array_elements(data::json)::json->>'_widget_1729230462865') task_name
, (json_array_elements(data::json)::json->>'_widget_1733121178724') visit_time
, (json_array_elements(data::json)::json->>'_widget_1733475752906') task_follow_up_time
, (json_array_elements(data::json)::json->>'_widget_1729230462878') irtnc_state
, (json_array_elements(data::json)::json->>'_widget_1733468584207') segments
, (json_array_elements(data::json)::json->>'_widget_1733468584209') application
, (json_array_elements(data::json)::json->>'_widget_1733471779238') intelligence_source
, (json_array_elements(data::json)::json->>'_widget_1729230462876') funnels_created
, (json_array_elements(data::json)::json->>'_widget_1729230462879') status
, (json_array_elements(data::json)::json->>'_widget_1733724443628') order_ae_flag
, (json_array_elements(data::json)::json->>'_widget_1733121604682') ae_confict_flag
, (json_array_elements(data::json)::json->>'_widget_1729230462867') account2
, (json_array_elements(data::json)::json->>'_widget_1733471779230') city
, (json_array_elements(data::json)::json->>'_widget_1736996031737') company_name_en
, (json_array_elements(data::json)::json->>'_widget_1736996031738') nick_name
, (json_array_elements(data::json)::json->>'_widget_1736996031739') company_id
, (json_array_elements(data::json)::json->>'_widget_1736996031740') salar
, (json_array_elements(data::json)::json->>'_widget_1733711581534') contact
, (json_array_elements(data::json)::json->>'_widget_1736996217003') phone
, (json_array_elements(data::json)::json->>'_widget_1733711581537') wechat_id
, (json_array_elements(data::json)::json->>'_widget_1733711581536') email
, (json_array_elements(data::json)::json->>'_widget_1729230462887') new_flag
, (json_array_elements(data::json)::json->>'_widget_1734934349841') account_type
, (json_array_elements(data::json)::json->>'_widget_1729230462873') products
, (json_array_elements(data::json)::json->>'_widget_1733467937072') product_family1
, (json_array_elements(data::json)::json->>'_widget_1733467937075') product_family2
, (json_array_elements(data::json)::json->>'_widget_1733467937077') product_family3
, (json_array_elements(data::json)::json->>'_widget_1733475752894') attachments
, (json_array_elements(data::json)::json->>'_widget_1733475022193') remarks
, (json_array_elements(data::json)::json->>'_widget_1736995320796') contact_phone_flag
, (json_array_elements(data::json)::json->>'_widget_1736995320797') repeat_flag
, (json_array_elements(data::json)::json->>'_widget_1733111407897') am
, (json_array_elements(data::json)::json->>'_widget_1733121604679') visit_tm
, (json_array_elements(data::json)::json->>'_widget_1733121604680') replan_flag
, (json_array_elements(data::json)::json->>'_widget_1733724920730') status2
, (json_array_elements(data::json)::json->>'_widget_1733813397274') ap_week
, (json_array_elements(data::json)::json->>'_widget_1740975177108') year_text
, (json_array_elements(data::json)::json->>'_widget_1740972648538') year_no
, (json_array_elements(data::json)::json->>'_widget_1733813589842') quater_no
, (json_array_elements(data::json)::json->>'_widget_1739432471884') assigned_to_text
, (json_array_elements(data::json)::json->>'_widget_1739964269165') time_no
, (json_array_elements(data::json)::json->>'creator') creator
, (json_array_elements(data::json)::json->>'createTime') create_time
, (json_array_elements(data::json)::json->>'updater') updater
, (json_array_elements(data::json)::json->>'updateTime') update_time
, (json_array_elements(data::json)::json->>'flowState') flow_state
, (json_array_elements(data::json)::json->>'deleteTime') delete_time
, (json_array_elements(data::json)::json->>'deleter') deleter
, (json_array_elements(data::json)::json->>'_id') id
, (json_array_elements(data::json)::json->>'appId') app_id
, (json_array_elements(data::json)::json->>'entryId') entry_id
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='251f0714de2d4240a14404bac29f7b31' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='251f0714de2d4240a14404bac29f7b31';
\q

View File

@ -0,0 +1,234 @@
import requests
import json
from typing import List, Dict, Any, Optional
import datetime as dt
import psycopg2
import uuid
class JiandaoyunClient:
"""
简道云API客户端支持分页功能
"""
def __init__(self, base_url: str, app_id: str, auth_token: str):
"""
初始化简道云客户端
参数:
base_url: API基础URL
app_id: 应用ID
auth_token: 授权令牌
"""
self.base_url = base_url
self.app_id = app_id
self.headers = {
'Content-Type': 'application/json',
'Authorization': f'Bearer {auth_token}'
}
def fetch_all_data(self, entry_id: str, limit: int = 100,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取所有数据自动处理分页
参数:
entry_id: 表单ID
limit: 每页记录数最大100
filter_condition: 可选的过滤条件
返回:
所有数据记录的列表
"""
all_data = []
last_data_id = None
while True:
# 获取一页数据
page_data = self._fetch_page(entry_id, limit, last_data_id, filter_condition)
# 添加到结果集合中
all_data.extend(page_data)
# 如果获取的数据少于限制数量,说明已到达末尾
if len(page_data) < limit:
break
# 为下一次迭代设置data_id
if page_data:
last_data_id = page_data[-1].get('_id')
else:
break
return all_data
def _fetch_page(self, entry_id: str, limit: int,
last_data_id: Optional[str] = None,
filter_condition: Optional[Dict] = None) -> List[Dict[str, Any]]:
"""
获取单页数据
参数:
entry_id: 表单ID
limit: 要获取的记录数
last_data_id: 上一页最后一条记录的ID
filter_condition: 可选的过滤条件
返回:
本页数据记录的列表
"""
url = f"{self.base_url}"
payload = {
"app_id": self.app_id,
"entry_id": entry_id,
"limit": limit
}
# 如果提供了分页参数则添加data_id
if last_data_id:
payload["data_id"] = last_data_id
# 如果提供了过滤条件则添加filter
if filter_condition:
payload["filter"] = filter_condition
response = requests.post(
url,
headers=self.headers,
data=json.dumps(payload)
)
# 对于错误状态码抛出异常
response.raise_for_status()
result = response.json()
return result.get("data", [])
PG_DSN = dict(database="dataops_db",
user ="dbuser_dba",
password="EmBRxnmmjnE3",
host ="124.221.232.219",
port ="5432")
def save_json_to_pg(data: list, api_id: str) -> None:
"""把列表落库:先软删历史,再插入新批次"""
print('[save_to_pg] 写入 PG...')
sql = """
UPDATE data_api.api_data
SET is_loaded = '1'
WHERE api_id = %s;
INSERT INTO data_api.api_data
(id, api_id, data, total_num, is_loaded, status,
request_tm, execute_tm, remark)
VALUES (%s, %s, %s, %s, '0', '0',
current_timestamp(0), current_timestamp(0), '');
"""
try:
with psycopg2.connect(**PG_DSN) as conn:
with conn.cursor() as cur:
cur.execute(sql,
(api_id,
str(uuid.uuid4()),
api_id,
json.dumps(data, ensure_ascii=False),
len(data)))
conn.commit()
cur.close()
print('[save_to_pg] 写入完成')
except psycopg2.Error as e:
print(f'[save_to_pg] 数据库错误: {e}')
raise
except Exception as e:
print(f'[save_to_pg] 未知错误: {e}')
raise
finally:
if 'conn' in locals():
conn.close()
"""
获取指定时间段前的时间
:param h: 时间段
:return: 时间
"""
def formatted2_previous_hour(h):
if h==0:
return dt.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
start_of_current_hour = dt.datetime.now().replace(minute=0, second=0, microsecond=0)
# 减去一个小时,得到前一个小时的开始时间
start_of_previous_hour = start_of_current_hour - dt.timedelta(hours=h)
return start_of_previous_hour.strftime("%Y-%m-%d %H:%M:%S")
"""
获取指定天数前的日期
:param days: 天数0表示今天1表示昨天以此类推
:return: YYYY-MM-DD格式的日期字符串
"""
def get_previous_date(days: int = 0) -> str:
"""
获取指定天数前的日期
参数:
days: 天数0表示今天正数表示之前的日期负数表示之后的日期
返回:
YYYY-MM-DD格式的日期字符串
"""
target_date = dt.datetime.now() - dt.timedelta(days=days)
return target_date.strftime("%Y-%m-%d")
# 使用示例
def main():
# 配置信息
BASE_URL = "https://api.jiandaoyun.com/api/v5/app/entry/data/list"
AUTH_TOKEN = "tPh9Fm9qumixcfDDvk42TUsc3Y0OoarD"
API_ID = "5ecaf6b1143e4ac082f7f6793e8aea8d"
params = {'app_id':'671062779c3695dfa9c0f892','entry_id':'6757e3d41d422256a76cb023',}
APP_ID = params['app_id']
ENTRY_ID = params['entry_id']
start_dt = get_previous_date(1)
end_dt = get_previous_date(0)
# 创建客户端
client = JiandaoyunClient(BASE_URL, APP_ID, AUTH_TOKEN)
# 定义过滤条件
filter_condition = {
"rel": "and",
"cond": [
{
"field": "updateTime",
"type": "datetime",
"method": "range",
"value": [
start_dt,
end_dt
]
}
]
}
try:
# 获取所有数据(自动处理分页)
all_data = client.fetch_all_data(
entry_id=ENTRY_ID,
limit=100, # 可根据需要调整最大100
filter_condition=filter_condition
)
print(f"获取到的总记录数: {len(all_data)}")
save_json_to_pg(all_data, API_ID)
# # 在此处处理您的数据
# for record in all_data:
# print(f"记录ID: {record.get('_id')}")
except requests.exceptions.RequestException as e:
print(f"获取数据时出错: {e}")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,72 @@
/*******Main Section**************************************************************************/
\set ON_ERROR_STOP on
\set AUTOCOMMIT on
\timing on
DELETE FROM data_api.jdy_calendar;
insert into data_api.jdy_calendar (
tektronix_ap
, standard_year_calendar
, tek_year
, tektronix_ap_week
, tek_month
, tek_q
, calendar_week
, creator
, create_time
, updater
, update_time
, delete_time
, deleter
, id
, app_id
, entry_id
,etl_tx_dt
)
select
case when trim(both from tektronix_ap)='' then null else tektronix_ap::text end tektronix_ap
, case when trim(both from standard_year_calendar)='' then null else standard_year_calendar::text end standard_year_calendar
, case when trim(both from tek_year)='' then null else tek_year::text end tek_year
, case when trim(both from tektronix_ap_week)='' then null else tektronix_ap_week::text end tektronix_ap_week
, case when trim(both from tek_month)='' then null else tek_month::text end tek_month
, case when trim(both from tek_q)='' then null else tek_q::text end tek_q
, case when trim(both from calendar_week)='' then null else calendar_week::text end calendar_week
, case when trim(both from creator)='' then null else creator::text end creator
, case when trim(both from create_time)='' then null else create_time::text end create_time
, case when trim(both from updater)='' then null else updater::text end updater
, case when trim(both from update_time)='' then null else update_time::text end update_time
, case when trim(both from delete_time)='' then null else delete_time::text end delete_time
, case when trim(both from deleter)='' then null else deleter::text end deleter
, case when trim(both from id)='' then null else id::text end id
, case when trim(both from app_id)='' then null else app_id::text end app_id
, case when trim(both from entry_id)='' then null else entry_id::text end entry_id
,etl_tx_dt
from (
select
(json_array_elements(data::json)::json->>'_widget_1733813203127') tektronix_ap
, (json_array_elements(data::json)::json->>'_widget_1733813203128') standard_year_calendar
, (json_array_elements(data::json)::json->>'_widget_1740972659618') tek_year
, (json_array_elements(data::json)::json->>'_widget_1733813203130') tektronix_ap_week
, (json_array_elements(data::json)::json->>'_widget_1733813203131') tek_month
, (json_array_elements(data::json)::json->>'_widget_1733813203132') tek_q
, (json_array_elements(data::json)::json->>'_widget_1733813203133') calendar_week
, (json_array_elements(data::json)::json->>'creator') creator
, (json_array_elements(data::json)::json->>'createTime') create_time
, (json_array_elements(data::json)::json->>'updater') updater
, (json_array_elements(data::json)::json->>'updateTime') update_time
, (json_array_elements(data::json)::json->>'deleteTime') delete_time
, (json_array_elements(data::json)::json->>'deleter') deleter
, (json_array_elements(data::json)::json->>'_id') id
, (json_array_elements(data::json)::json->>'appId') app_id
, (json_array_elements(data::json)::json->>'entryId') entry_id
,CURRENT_TIMESTAMP(0) etl_tx_dt
from (select * from data_api.api_data
WHERE api_id='5ecaf6b1143e4ac082f7f6793e8aea8d' and is_loaded = '0' order by request_tm desc limit 1) p )p;
update data_api.api_data
set is_loaded = '1' ,
status = '1',
request_tm = current_timestamp(0)
where api_id='5ecaf6b1143e4ac082f7f6793e8aea8d';
\q

View File

@ -0,0 +1,186 @@
#!/usr/bin/python
# -*- encoding=utf-8 -*-
from airflow import DAG
from datetime import datetime, timedelta
from airflow.contrib.hooks.ssh_hook import SSHHook
from airflow.contrib.operators.ssh_operator import SSHOperator
from airflow.sensors.external_task_sensor import ExternalTaskSensor
import json
from airflow.operators.trigger_dagrun import TriggerDagRunOperator
from airflow.operators.email_operator import EmailOperator
from airflow.utils.trigger_rule import TriggerRule
sshHook = SSHHook(ssh_conn_id ='ssh_air')
default_args = {
'owner': 'tek_newsletter@163.com',
'email_on_failure': True,
'email_on_retry':True,
'start_date': datetime(2024, 1, 1),
'depends_on_past': False,
'retries': 6,
'retry_delay': timedelta(minutes=10),
}
dag = DAG('wf_dag_jdy_account_vist_syncs', default_args=default_args,
schedule_interval="0 1 * * *",
catchup=False,
dagrun_timeout=timedelta(minutes=160),
max_active_runs=3)
task_failed = EmailOperator (
dag=dag,
trigger_rule=TriggerRule.ONE_FAILED,
task_id="task_failed",
to=["tek_newsletter@163.com"],
cc=[""],
subject="jdy_account_vist_syncs_failed",
html_content='<h3>您好jdy_account_vist_syncs作业失败请及时处理" </h3>')
Jdy_ae_01_feign = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_ae_01_feign',
command='python3 /data/airflow/etl/API/Jdy_ae_01_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
Jdy_ae_01_load = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_ae_01_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"Jdy_ae_01_load"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_ae_01_feign >> Jdy_ae_01_load
Jdy_contact_01_feign = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_contact_01_feign',
command='python3 /data/airflow/etl/API/Jdy_contact_01_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
Jdy_contact_01_load = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_contact_01_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"Jdy_contact_01_load"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_contact_01_feign >> Jdy_contact_01_load
Jdy_daily_visit_feign = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_daily_visit_feign',
command='python3 /data/airflow/etl/API/Jdy_daily_visit_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
Jdy_daily_visit_load = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_daily_visit_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"Jdy_daily_visit_load"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_daily_visit_feign >> Jdy_daily_visit_load
Jdy_account_01_feign = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_account_01_feign',
command='python3 /data/airflow/etl/API/Jdy_account_01_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
Jdy_account_01_load = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_account_01_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"Jdy_account_01_load"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_account_01_feign >> Jdy_account_01_load
Jdy_calendar_01_feign = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_calendar_01_feign',
command='python3 /data/airflow/etl/API/Jdy_calendar_01_feign.py',
depends_on_past=False,
retries=3,
dag=dag)
Jdy_calendar_01_load = SSHOperator(
ssh_hook=sshHook,
task_id='Jdy_calendar_01_load',
command='/data/airflow/etl/API/run_psql.sh {{ ds_nodash }} {{params.my_param }}',
params={'my_param':"Jdy_calendar_01_load"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_calendar_01_feign >> Jdy_calendar_01_load
jdy_calendar_8832 = SSHOperator(
ssh_hook=sshHook,
task_id='jdy_calendar_8832',
command='/data/airflow/etl/SA/run_sa.sh {{ ds_nodash }} {{ params.my_param }} >>/data/airflow/logs/run_psql_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"S98_S_jdy_calendar"},
depends_on_past=False,
retries=3,
dag=dag)
jdy_account_4319 = SSHOperator(
ssh_hook=sshHook,
task_id='jdy_account_4319',
command='/data/airflow/etl/SA/run_sa.sh {{ ds_nodash }} {{ params.my_param }} >>/data/airflow/logs/run_psql_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"S98_S_jdy_account"},
depends_on_past=False,
retries=3,
dag=dag)
jdy_contact_6445 = SSHOperator(
ssh_hook=sshHook,
task_id='jdy_contact_6445',
command='/data/airflow/etl/SA/run_sa.sh {{ ds_nodash }} {{ params.my_param }} >>/data/airflow/logs/run_psql_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"S98_S_jdy_contact"},
depends_on_past=False,
retries=3,
dag=dag)
jdy_daily_visit_9783 = SSHOperator(
ssh_hook=sshHook,
task_id='jdy_daily_visit_9783',
command='/data/airflow/etl/SA/run_sa.sh {{ ds_nodash }} {{ params.my_param }} >>/data/airflow/logs/run_psql_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"S98_S_jdy_daily_visit"},
depends_on_past=False,
retries=3,
dag=dag)
jdy_ae_plan_2600 = SSHOperator(
ssh_hook=sshHook,
task_id='jdy_ae_plan_2600',
command='/data/airflow/etl/SA/run_sa.sh {{ ds_nodash }} {{ params.my_param }} >>/data/airflow/logs/run_psql_{{ds_nodash}}.log 2>&1 ',
params={'my_param':"S98_S_jdy_ae_plan"},
depends_on_past=False,
retries=3,
dag=dag)
Jdy_contact_01_load >> jdy_contact_6445
Jdy_ae_01_load >> jdy_ae_plan_2600
Jdy_daily_visit_load >> jdy_daily_visit_9783
Jdy_account_01_load >> jdy_account_4319
Jdy_calendar_01_load >> jdy_calendar_8832
jdy_calendar_8832 >> task_failed