dim_asin_buy_data.py 2.48 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64
import os
import sys

sys.path.append(os.path.dirname(sys.path[0]))
from utils.common_util import CommonUtil, DateTypes
from utils.hdfs_utils import HdfsUtils
from utils.spark_util import SparkUtil
from pyspark.sql.window import Window
from pyspark.sql import functions as F


class DimAsinBuyData(object):

    def __init__(self, site_name):
        self.site_name = site_name
        app_name = f"{self.__class__.__name__}:{site_name}"
        self.spark = SparkUtil.get_spark_session(app_name)
        self.hive_table = "dim_asin_buy_data"
        self.hdfs_path = f"/home/{SparkUtil.DEF_USE_DB}/dim/{self.hive_table}/site_name={self.site_name}"
        self.partitions_num = CommonUtil.reset_partitions(site_name, 10)
        self.df_asin_buy_data = self.spark.sql(f"select 1+1;")
        self.df_save = self.spark.sql(f"select 1+1;")

    def run(self):
        # 读取ods_other_search_term_data
        sql = f"""
        select 
            asin,buy_data,updated_time 
        from ods_other_search_term_data 
        where site_name = '{self.site_name}' 
        and date_type = 'month';
        """
        print(sql)
        self.df_asin_buy_data = self.spark.sql(sqlQuery=sql).cache()

        # 去重保留每个asin的最新记录
        window = Window.partitionBy(self.df_asin_buy_data.asin).orderBy(
            self.df_asin_buy_data.updated_time.desc_nulls_last()
        )
        self.df_asin_buy_data = self.df_asin_buy_data.withColumn("rk", F.row_number().over(window=window))
        self.df_asin_buy_data = self.df_asin_buy_data.filter("rk=1")

        # 补全字段
        self.df_save = self.df_asin_buy_data.select(
            F.col('asin'),
            F.col('buy_data'),
            F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('created_time'),
            F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('updated_time'),
            F.lit(self.site_name).alias("site_name")
        )

        self.df_save = self.df_save.repartition(self.partitions_num)
        partition_by = ["site_name"]
        print(f"清除hdfs目录中.....{self.hdfs_path}")
        HdfsUtils.delete_file_in_folder(self.hdfs_path)
        print(f"当前存储的表名为:{self.hive_table},分区为{partition_by}")
        self.df_save.write.saveAsTable(name=self.hive_table, format='hive', mode='append', partitionBy=partition_by)
        print("success")


if __name__ == '__main__':
    site_name = CommonUtil.get_sys_arg(1, None)
    obj = DimAsinBuyData(site_name)
    obj.run()