Skip to content
Projects
Groups
Snippets
Help
This project
Loading...
Sign in / Register
Toggle navigation
A
Amazon-Selection-Data
Overview
Overview
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
abel_cjy
Amazon-Selection-Data
Commits
ba46b2f2
Commit
ba46b2f2
authored
Mar 17, 2026
by
chenyuanjie
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
ABA周增长
parent
af0b00c2
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
142 additions
and
0 deletions
+142
-0
dim_st_detail_week.py
Pyspark_job/dim/dim_st_detail_week.py
+0
-0
dim_st_detail_week.py
Pyspark_job/sqoop_export/dim_st_detail_week.py
+142
-0
No files found.
Pyspark_job/dim/dim_st_detail_week.py
0 → 100644
View file @
ba46b2f2
This diff is collapsed.
Click to expand it.
Pyspark_job/sqoop_export/dim_st_detail_week.py
0 → 100644
View file @
ba46b2f2
import
os
import
sys
sys
.
path
.
append
(
os
.
path
.
dirname
(
sys
.
path
[
0
]))
from
utils.ssh_util
import
SSHUtil
from
utils.common_util
import
CommonUtil
from
utils.db_util
import
DBUtil
,
DbTypes
if
__name__
==
'__main__'
:
site_name
=
CommonUtil
.
get_sys_arg
(
1
,
None
)
date_type
=
CommonUtil
.
get_sys_arg
(
2
,
None
)
date_info
=
CommonUtil
.
get_sys_arg
(
3
,
None
)
CommonUtil
.
judge_is_work_hours
(
site_name
=
site_name
,
date_type
=
date_type
,
date_info
=
date_info
,
principal
=
'chenyuanjie'
,
priority
=
1
,
export_tools_type
=
1
,
belonging_to_process
=
'ABA周增长'
)
db_type
=
DbTypes
.
postgresql_cluster
.
name
print
(
"导出到PG集群中"
)
year_str
,
week_str
=
date_info
.
split
(
'-'
)
year
=
int
(
year_str
)
week
=
int
(
week_str
)
export_master_tb
=
f
"{site_name}_aba_report_week_{year}"
export_tb
=
f
"{export_master_tb}_{week}"
next_week
=
week
+
1
engine
=
DBUtil
.
get_db_engine
(
db_type
,
site_name
)
with
engine
.
connect
()
as
connection
:
sql
=
f
"""
drop table if exists {export_tb};
create table if not exists {export_tb}
(
like {export_master_tb} including defaults including comments
);
"""
print
(
"================================执行sql================================"
)
print
(
sql
)
connection
.
execute
(
sql
)
# 导出表名
sh
=
CommonUtil
.
build_export_sh
(
site_name
=
site_name
,
db_type
=
db_type
,
hive_tb
=
"dim_st_detail_week"
,
export_tb
=
export_tb
,
col
=
[
'st_key'
,
'search_term'
,
'is_search_text'
,
'is_ascending_text'
,
'is_first_text'
,
'is_high_return_text'
,
'rank'
,
'search_volume'
,
'orders'
,
'rank_last_1_week'
,
'rank_last_4_week'
,
'rank_last_12_week'
,
'rank_change_last_1_week'
,
'rank_change_last_4_week'
,
'rank_change_last_12_week'
,
'rank_rate_last_1_week'
,
'rank_rate_last_4_week'
,
'rank_rate_last_12_week'
,
'st_word_num'
,
'asin1'
,
'asin2'
,
'asin3'
,
'product_title1'
,
'product_title2'
,
'product_title3'
,
'click_share1'
,
'click_share2'
,
'click_share3'
,
'click_share_total'
,
'conversion_share1'
,
'conversion_share2'
,
'conversion_share3'
,
'conversion_share_total'
,
'brand1'
,
'brand2'
,
'brand3'
,
'category1'
,
'category2'
,
'category3'
,
'quantity_being_sold'
,
'category_id'
,
'category_current_id'
,
'market_cycle_type'
,
'week'
,
'rank_change_1_week_ago'
,
'rank_change_2_week_ago'
,
'rank_change_3_week_ago'
,
'rank_rate_1_week_ago'
,
'rank_rate_2_week_ago'
,
'rank_rate_3_week_ago'
],
partition_dict
=
{
"site_name"
:
site_name
,
"date_type"
:
date_type
,
"date_info"
:
date_info
}
)
client
=
SSHUtil
.
get_ssh_client
()
SSHUtil
.
exec_command_async
(
client
,
sh
,
ignore_err
=
False
)
client
.
close
()
with
engine
.
connect
()
as
connection
:
sql
=
f
"""
update {export_tb} set keyword_tsv = to_tsvector('english_amazonword', search_term);
"""
print
(
"================================执行sql================================"
)
print
(
sql
)
connection
.
execute
(
sql
)
# 将子表加入母表中并复制母表索引
DBUtil
.
add_pg_part
(
engine
,
source_tb_name
=
export_tb
,
part_master_tb
=
export_master_tb
,
part_val
=
{
"from"
:
[
week
],
"to"
:
[
next_week
]
},
cp_index_flag
=
True
,
)
# 插入流程记录表
sql
=
f
"""
REPLACE INTO selection.workflow_everyday
(site_name, report_date, status, status_val, table_name, date_type, page, is_end, remark, export_db_type)
VALUES
('{site_name}', '{date_info}', '导出PG数据库', 14, '{site_name}_aba_report_week', 'week', 'ABA搜索词周报告', '是', 'ABA搜索词周报告表', 'postgresql_cluster');
"""
DBUtil
.
engine_exec_sql
(
DBUtil
.
get_db_engine
(
'mysql'
,
'us'
),
sql
)
print
(
"success"
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment