import os import sys sys.path.append(os.path.dirname(sys.path[0])) # 上级目录 from utils.templates import Templates from pyspark.sql import functions as F class UsStKeepaSyn(Templates): def __init__(self, site_name='us'): super().__init__() self.site_name = site_name self.db_save = f'us_st_keepa_syn_2023_copy' self.spark = self.create_spark_object( app_name=f"{self.db_save}: {self.site_name}") self.reset_partitions(partitions_num=10) self.partitions_by = ['site_name'] self.df_asin = self.spark.sql(f"select 1+1;") self.df_parent_asin = self.spark.sql(f"select 1+1;") self.df_save = self.spark.sql(f"select 1+1;") def read_data(self): sql = f""" select * from us_st_keepa_syn_2023 where site_name = '{self.site_name}'; """ print(sql) self.df_asin = self.spark.sql(sqlQuery=sql).cache() sql = f""" select asin, parent_asin, updated_time from ods_asin_variat where site_name = '{self.site_name}'; """ print(sql) self.df_parent_asin = self.spark.sql(sqlQuery=sql).cache() def handle_data(self): self.df_parent_asin = self.df_parent_asin.orderBy(self.df_parent_asin.updated_time.desc_nulls_last()) self.df_parent_asin = self.df_parent_asin.drop_duplicates(['asin']).cache() self.df_parent_asin = self.df_parent_asin.drop('updated_time') self.df_save = self.df_asin.join(self.df_parent_asin, 'asin', 'left') self.df_save.show(10) self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name)) if __name__ == '__main__': site_name = sys.argv[1] handle_obj = UsStKeepaSyn(site_name=site_name) handle_obj.run()