import os
import sys

sys.path.append(os.path.dirname(sys.path[0]))
from utils.common_util import CommonUtil, DateTypes
from utils.hdfs_utils import HdfsUtils
from utils.spark_util import SparkUtil
from pyspark.sql.window import Window
from pyspark.sql import functions as F


class DimAsinBuyData(object):

    def __init__(self, site_name):
        self.site_name = site_name
        app_name = f"{self.__class__.__name__}:{site_name}"
        self.spark = SparkUtil.get_spark_session(app_name)
        self.hive_table = "dim_asin_buy_data"
        self.hdfs_path = f"/home/{SparkUtil.DEF_USE_DB}/dim/{self.hive_table}/site_name={self.site_name}"
        self.partitions_num = CommonUtil.reset_partitions(site_name, 10)
        self.df_asin_buy_data = self.spark.sql(f"select 1+1;")
        self.df_save = self.spark.sql(f"select 1+1;")

    def run(self):
        # 读取ods_other_search_term_data
        sql = f"""
        select 
            asin,buy_data,updated_time 
        from ods_other_search_term_data 
        where site_name = '{self.site_name}' 
        and date_type = 'month';
        """
        print(sql)
        self.df_asin_buy_data = self.spark.sql(sqlQuery=sql).cache()

        # 去重保留每个asin的最新记录
        window = Window.partitionBy(self.df_asin_buy_data.asin).orderBy(
            self.df_asin_buy_data.updated_time.desc_nulls_last()
        )
        self.df_asin_buy_data = self.df_asin_buy_data.withColumn("rk", F.row_number().over(window=window))
        self.df_asin_buy_data = self.df_asin_buy_data.filter("rk=1")

        # 补全字段
        self.df_save = self.df_asin_buy_data.select(
            F.col('asin'),
            F.col('buy_data'),
            F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('created_time'),
            F.date_format(F.current_timestamp(), 'yyyy-MM-dd HH:mm:SS').alias('updated_time'),
            F.lit(self.site_name).alias("site_name")
        )

        self.df_save = self.df_save.repartition(self.partitions_num)
        partition_by = ["site_name"]
        print(f"清除hdfs目录中.....{self.hdfs_path}")
        HdfsUtils.delete_file_in_folder(self.hdfs_path)
        print(f"当前存储的表名为:{self.hive_table},分区为{partition_by}")
        self.df_save.write.saveAsTable(name=self.hive_table, format='hive', mode='append', partitionBy=partition_by)
        print("success")


if __name__ == '__main__':
    site_name = CommonUtil.get_sys_arg(1, None)
    obj = DimAsinBuyData(site_name)
    obj.run()