import os import sys sys.path.append(os.path.dirname(sys.path[0])) # 上级目录 from utils.templates import Templates from pyspark.sql import functions as F class DwdBulkMarket(Templates): def __init__(self, site_name='us', date_type="week", date_info='2023-44'): super().__init__() self.site_name = site_name self.date_type = date_type self.date_info = date_info self.db_save = f'dwd_bulk_market' self.spark = self.create_spark_object( app_name=f"{self.db_save}: {self.site_name}, {self.date_type}, {self.date_info}") self.reset_partitions(partitions_num=5) self.partitions_by = ['site_name', 'date_type', 'date_info'] self.df_ac = self.spark.sql(f"select 1+1;") self.df_bs = self.spark.sql(f"select 1+1;") self.df_hr = self.spark.sql(f"select 1+1;") self.df_sb = self.spark.sql(f"select 1+1;") self.df_sp = self.spark.sql(f"select 1+1;") self.df_zr = self.spark.sql(f"select 1+1;") self.df_title = self.spark.sql(f"select 1+1;") self.df_union = self.spark.sql(f"select 1+1;") self.df_self_asin = self.spark.sql(f"select 1+1;") self.df_save = self.spark.sql(f"select 1+1;") def read_data(self): sql1 = f""" select search_term, asin from ods_search_term_ac where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql1) self.df_ac = self.spark.sql(sqlQuery=sql1).cache() sql2 = f""" select search_term, asin from ods_search_term_bs where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql2) self.df_bs = self.spark.sql(sqlQuery=sql2).cache() sql3 = f""" select search_term, asin from ods_search_term_hr where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql3) self.df_hr = self.spark.sql(sqlQuery=sql3).cache() sql4 = f""" select search_term, asin from ods_search_term_sb where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql4) self.df_sb = self.spark.sql(sqlQuery=sql4).cache() sql5 = f""" select search_term, asin from ods_search_term_sp where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql5) self.df_sp = self.spark.sql(sqlQuery=sql5).cache() sql6 = f""" select search_term, asin from ods_search_term_zr where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql6) self.df_zr = self.spark.sql(sqlQuery=sql6).cache() sql7 = f""" select asin, '1' as is_self_asin from ods_self_asin where site_name = '{self.site_name}' group by asin; """ print(sql7) self.df_self_asin = self.spark.sql(sqlQuery=sql7).cache() sql8 = f""" select asin, lower(asin_title) as asin_title from dim_cal_asin_history_detail where site_name = '{self.site_name}'; """ print(sql8) self.df_title = self.spark.sql(sqlQuery=sql8).cache() def handle_data(self): self.df_union = self.df_ac\ .unionAll(self.df_bs)\ .unionAll(self.df_hr)\ .unionAll(self.df_sb)\ .unionAll(self.df_sp)\ .unionAll(self.df_zr) self.df_union = self.df_union.dropDuplicates(["search_term", "asin"]) self.df_save = self.df_union\ .join(self.df_self_asin, 'asin', 'left') \ .fillna('0') self.df_save = self.df_save.join(self.df_title, 'asin', 'left') self.df_save.show(10) self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name)) self.df_save = self.df_save.withColumn("date_type", F.lit(self.date_type)) self.df_save = self.df_save.withColumn("date_info", F.lit(self.date_info)) if __name__ == '__main__': site_name = sys.argv[1] date_type = sys.argv[2] date_info = sys.argv[3] handle_obj = DwdBulkMarket(site_name=site_name, date_type=date_type, date_info=date_info) handle_obj.run()