1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
"""
@Author : HuangJian
@Description : 各站点店铺asin详情表-- 月抓取
@SourceTable : us_asin_detail_product_2023
@SinkTable : ods_asin_detail_product
@CreateTime : 2022/05/19 14:55
@UpdateTime : 2022/05/19 14:55
"""
import os
import sys
sys.path.append(os.path.dirname(sys.path[0]))
from utils.ssh_util import SSHUtil
from utils.db_util import DBUtil
from utils.common_util import CommonUtil
from utils.common_util import DateTypes
from utils.hdfs_utils import HdfsUtils
from utils.spark_util import SparkUtil
if __name__ == '__main__':
site_name = CommonUtil.get_sys_arg(1, None)
asin_type = CommonUtil.get_sys_arg(2, None)
date_type = CommonUtil.get_sys_arg(3, None)
date_info = CommonUtil.get_sys_arg(4, None)
assert site_name is not None, "site_name 不能为空!"
assert date_type is not None, "date_type 不能为空!"
assert date_info is not None, "date_info 不能为空!"
hive_table = f"ods_keepa_asin_trend"
partition_dict = {
"site_name": site_name,
"asin_type": asin_type,
"date_type": date_type,
"date_info": date_info
}
# 落表路径校验
hdfs_path = CommonUtil.build_hdfs_path(hive_table, partition_dict=partition_dict)
print(f"hdfs_path is {hdfs_path}")
import_table = f"{site_name}_keepa_trend_{asin_type}"
db_type = 'postgresql'
print("当前链接的数据库为:", db_type, " 同步的表为:", import_table)
sql_query = f"""
select
id,
asin,
title,
img_url,
listed_since,
first_bsr_label,
last_bsr_label,
last_bsr_rank,
last_price,
last_count_reviews,
currency,
times_list,
price_list,
rank_list,
first_bsr_list,
last_bsr_list,
count_reviews_list,
variations,
data_zoom_start,
date_range,
color,
size,
last_bsr_rating,
rating_count,
rating_count_list,
min_list,
max_list,
current_list,
avg_list,
avg90_list,
avg180_list,
bsr_orders_list,
bsr_orders_sale_list,
created_time,
updated_time
from {import_table}
where 1=1
and \$CONDITIONS
"""
# 进行schema和数据校验
CommonUtil.check_schema_before_import(db_type=db_type,
site_name=site_name,
query=sql_query,
hive_tb_name=hive_table,
msg_usr=['chenyuanjie'])
# 生成导出脚本
import_sh = CommonUtil.build_import_sh(site_name=site_name,
db_type=db_type,
query=sql_query,
hdfs_path=hdfs_path,
map_num=20,
key='asin')
# 导入前先删除原始hdfs数据
HdfsUtils.delete_hdfs_file(hdfs_path)
# 创建ssh Client对象--用于执行cmd命令
client = SSHUtil.get_ssh_client()
SSHUtil.exec_command_async(client, import_sh, ignore_err=False)
# 创建lzo索引和修复元数据
CommonUtil.after_import(hdfs_path=hdfs_path, hive_tb=hive_table)
# 关闭链接
client.close()