1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
import os
import re
import sys
sys.path.append(os.path.dirname(sys.path[0])) # 上级目录
from utils.templates import Templates
from pyspark.sql import functions as F
from pyspark.sql.types import IntegerType
class DwtBulkMarket(Templates):
def __init__(self, site_name='us', date_type="week", date_info='2023-44'):
super().__init__()
self.site_name = site_name
self.date_type = date_type
self.date_info = date_info
self.db_save = f'dwt_bulk_market'
self.spark = self.create_spark_object(
app_name=f"{self.db_save}: {self.site_name}, {self.date_type}, {self.date_info}")
self.reset_partitions(partitions_num=5)
self.partitions_by = ['site_name', 'date_type', 'date_info']
self.df_dwd = self.spark.sql(f"select 1+1;")
self.df_dim = self.spark.sql(f"select 1+1;")
self.df_joined = self.spark.sql(f"select 1+1;")
self.df_save = self.spark.sql(f"select 1+1;")
self.u_match = self.spark.udf.register('u_match',self.udf_ele_mattch,IntegerType())
def read_data(self):
sql1 = f"""
select
search_term,
asin,
is_self_asin
from
dwd_bulk_market
where
site_name = '{self.site_name}'
and date_type = '{self.date_type}'
and date_info = '{self.date_info}';
"""
print(sql1)
self.df_dwd = self.spark.sql(sqlQuery=sql1).cache()
sql2 = f"""
select
asin,
lower(asin_title) as asin_title
from
dim_cal_asin_history_detail
where
site_name = '{self.site_name}';
"""
print(sql2)
self.df_dim = self.spark.sql(sqlQuery=sql2).cache()
def handle_data(self):
self.df_joined = self.df_dwd.join(self.df_dim, 'asin', 'left')
self.df_joined = self.df_joined.withColumn("bulk_match", self.u_match(F.col("asin_title")))
self.df_save = self.df_joined.groupby(['search_term'])\
.agg(
F.count("asin").alias("asin_count"),
F.sum(F.when(F.col("is_self_asin") == "1", 1).otherwise(0)).alias("self_asin_count"),
F.round((F.sum(F.when(F.col("is_self_asin") == "1", 1).otherwise(0)) / F.count("asin")), 4).alias("proportion"),
F.sum(F.when(F.col("is_self_asin") == "1", F.col("bulk_match")).otherwise(0)).alias("self_asin_title_including_bulk"),
)
# 填充分区字段
self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name))
self.df_save = self.df_save.withColumn("date_type", F.lit(self.date_type))
self.df_save = self.df_save.withColumn("date_info", F.lit(self.date_info))
@staticmethod
def udf_ele_mattch(match_text: str):
pattern = re.compile(r'(?<!\+|\*|\-|\%|\.)\b(bulk)\b', flags=re.IGNORECASE)
ele_list = re.findall(pattern, match_text)
if ele_list:
return 1
else:
return 0
if __name__ == '__main__':
site_name = sys.argv[1]
date_type = sys.argv[2]
date_info = sys.argv[3]
handle_obj = DwtBulkMarket(site_name=site_name, date_type=date_type, date_info=date_info)
handle_obj.run()