import os import sys os.environ["PYARROW_IGNORE_TIMEZONE"] = "1" sys.path.append(os.path.dirname(sys.path[0])) # 上级目录 from utils.templates import Templates # from ..utils.templates import Templates from pyspark.sql import functions as F from pyspark.sql.window import Window class DimAsinImgInfo(Templates): def __init__(self, site_name='us'): super().__init__() self.site_name = site_name self.db_save = f'dim_asin_img_info' self.spark = self.create_spark_object(app_name=f"{self.db_save}: {self.site_name}") self.df_asin_img = self.spark.sql(f"select 1+1;") self.df_asin_truncate = self.spark.sql(f"select 1+1;") self.df_save = self.spark.sql(f"select 1+1;") self.partitions_by = ['site_name'] self.reset_partitions(100) def read_data(self): sql = f"select asin, asin_img_url, asin_img_path, asin_trun_1, asin_trun_2, asin_trun_3, asin_trun_4, asin_trun_5, " \ f"asin_trun_6, asin_trun_7, asin_trun_8, asin_trun_9, date_info_img_url as date_info, site_name " \ f"from dim_asin_stable_info where site_name='{self.site_name}';" print("sql:", sql) self.df_asin_img = self.spark.sql(sql).cache() self.df_asin_img.show(10, truncate=False) # print(111, self.df_asin_img.count()) self.df_asin_img = self.df_asin_img.filter("asin_img_url is not null") # print(222, self.df_asin_img.count()) sql = f"select asin, asin_img_url, 3 as state " \ f"from dim_cal_asin_truncate where site_name='{self.site_name}' and asin_img_url is not null;" print("sql:", sql) self.df_asin_truncate = self.spark.sql(sql).cache() self.df_asin_truncate.show(10, truncate=False) print(self.df_asin_truncate.count()) def handle_data(self): self.df_save = self.df_asin_img.join( self.df_asin_truncate, on=['asin', 'asin_img_url'], how='left' ) self.df_save = self.df_save.fillna({'state': 1}) # self.df_save.show(10, truncate=False) # print(self.df_save.count()) # quit() if __name__ == '__main__': site_name = sys.argv[1] # 参数1:站点 handle_obj = DimAsinImgInfo(site_name=site_name) handle_obj.run()