import os import sys sys.path.append(os.path.dirname(sys.path[0])) # 上级目录 from utils.templates import Templates from pyspark.sql import functions as F class DwtBulkMarketPg(Templates): def __init__(self, site_name='us', date_type="week", date_info='2023-44'): super().__init__() self.site_name = site_name self.date_type = date_type self.date_info = date_info self.db_save = f'dwt_bulk_market_pg' self.spark = self.create_spark_object( app_name=f"{self.db_save}: {self.site_name}, {self.date_type}, {self.date_info}") self.reset_partitions(partitions_num=5) self.partitions_by = ['site_name', 'date_type', 'date_info'] self.df_ods = self.spark.sql(f"select 1+1;") self.df_dwt = self.spark.sql(f"select 1+1;") self.df_save = self.spark.sql(f"select 1+1;") def read_data(self): sql1 = f""" select search_term, search_volume, search_volume_avg3_month, search_volume_avg12_month, st_asin_count, st_asin_page1_review_count, st_depth, st_sponsored_ads, st_appearance from ods_bulk_market where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql1) self.df_ods = self.spark.sql(sqlQuery=sql1).cache() sql2 = f""" select search_term, asin_count, self_asin_count, proportion from dwt_bulk_market where site_name = '{self.site_name}' and date_type = '{self.date_type}' and date_info = '{self.date_info}'; """ print(sql2) self.df_dwt = self.spark.sql(sqlQuery=sql2).cache() def handle_data(self): self.df_save = self.df_ods.join(self.df_dwt, 'search_term', 'left') # 填充分区字段 self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name)) self.df_save = self.df_save.withColumn("date_type", F.lit(self.date_type)) self.df_save = self.df_save.withColumn("date_info", F.lit(self.date_info)) if __name__ == '__main__': site_name = sys.argv[1] date_type = sys.argv[2] date_info = sys.argv[3] handle_obj = DwtBulkMarketPg(site_name=site_name, date_type=date_type, date_info=date_info) handle_obj.run()