1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
import os
import sys
sys.path.append(os.path.dirname(sys.path[0])) # 上级目录
from utils.templates import Templates
from pyspark.sql import functions as F
class UsStKeepaSyn(Templates):
def __init__(self, site_name='us'):
super().__init__()
self.site_name = site_name
self.db_save = f'us_st_keepa_syn_2023_copy'
self.spark = self.create_spark_object(
app_name=f"{self.db_save}: {self.site_name}")
self.reset_partitions(partitions_num=10)
self.partitions_by = ['site_name']
self.df_asin = self.spark.sql(f"select 1+1;")
self.df_parent_asin = self.spark.sql(f"select 1+1;")
self.df_save = self.spark.sql(f"select 1+1;")
def read_data(self):
sql = f"""
select
*
from
us_st_keepa_syn_2023
where
site_name = '{self.site_name}';
"""
print(sql)
self.df_asin = self.spark.sql(sqlQuery=sql).cache()
sql = f"""
select
asin,
parent_asin,
updated_time
from
ods_asin_variat
where
site_name = '{self.site_name}';
"""
print(sql)
self.df_parent_asin = self.spark.sql(sqlQuery=sql).cache()
def handle_data(self):
self.df_parent_asin = self.df_parent_asin.orderBy(self.df_parent_asin.updated_time.desc_nulls_last())
self.df_parent_asin = self.df_parent_asin.drop_duplicates(['asin']).cache()
self.df_parent_asin = self.df_parent_asin.drop('updated_time')
self.df_save = self.df_asin.join(self.df_parent_asin, 'asin', 'left')
self.df_save.show(10)
self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name))
if __name__ == '__main__':
site_name = sys.argv[1]
handle_obj = UsStKeepaSyn(site_name=site_name)
handle_obj.run()