dim_asin_image_info.py 3.64 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91
import os
import sys
import re

sys.path.append(os.path.dirname(sys.path[0]))  # 上级目录
from utils.templates import Templates
# from ..utils.templates import Templates
from pyspark.sql import functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import StructType, StructField, IntegerType, StringType
from utils.common_util import CommonUtil
from utils.hdfs_utils import HdfsUtils


class DimAsinImageTmp(Templates):

    def __init__(self, site_name='us', date_type="month", date_info='2022-1'):
        super().__init__()
        self.site_name = site_name
        self.date_type = date_type
        self.date_info = date_info
        # 初始化self.spark对
        self.db_save = 'dim_asin_image_info'
        self.spark = self.create_spark_object(app_name=f"{self.db_save}: {self.site_name}, {self.date_type}, {self.date_info}")
        self.u_asin_to_number = F.udf(self.udf_asin_to_number, IntegerType())
        self.df_save = self.spark.sql("select 1+1;")
        # self.partitions_by = ['site_name', 'date_type', 'date_info']
        self.partitions_by = ['site_name']
        self.partitions_dict = {
            "site_name": site_name
        }
        self.reset_partitions(partitions_num=100)


    @staticmethod
    # 将asin转换成数值--从而可以划分指定分区表
    def udf_asin_to_number(asin):
        """
        Convert a 10-character ASIN string to a unique number.
        This function assumes that ASIN consists of uppercase letters and digits.
        """

        def char_to_number(char):
            if char.isdigit():
                return int(char)
            else:
                return ord(char) - 55  # 'A' -> 10, 'B' -> 11, ..., 'Z' -> 35

        if len(asin) != 10:
            raise ValueError("ASIN must be 10 characters long")

        base = 36
        asin_number = 0
        for i, char in enumerate(reversed(asin)):
            asin_number += char_to_number(char) * (base ** i)

        # The final number is taken modulo 1 billion to fit the range 1-10 billion
        return asin_number % 1000000000

    def read_data(self):
        sql = f"select * from ods_asin_image where site_name='{self.site_name}' and date_type='{self.date_type}' and date_info in ('0000-00', '{self.date_info}');"
        print(f"sql:  {sql}")
        self.df_save = self.spark.sql(sql).cache()
        self.df_save.show(10)
        self.df_save = self.df_save.withColumn("mapped_asin", self.u_asin_to_number("asin"))
        # self.df_save.show(10)

    def handle_data(self):
        window = Window.partitionBy(['asin', 'data_type', 'img_order_by']).orderBy(
            self.df_save.created_at.desc(),
        )
        self.df_save = self.df_save.withColumn(
            "row_number", F.row_number().over(window=window)
        )
        self.df_save = self.df_save.filter("row_number=1").drop("row_number")
        # self.df_save = self.df_save.withColumn("date_type", F.lit(self.date_type))
        # self.df_save = self.df_save.withColumn("date_info", F.lit(self.date_info))

        hdfs_path = CommonUtil.build_hdfs_path(self.db_save, partition_dict=self.partitions_dict)
        print(f"当前存储的表名为:{self.db_save},分区为{self.partitions_dict}")
        print(f"清除hdfs目录中.....{hdfs_path}")
        HdfsUtils.delete_file_in_folder(hdfs_path)
        self.df_save.show(10)


if __name__ == '__main__':
    site_name = sys.argv[1]  # 参数1:站点
    date_type = sys.argv[2]  # 参数2:类型:week/4_week/month/quarter
    date_info = sys.argv[3]  # 参数3:年-周/年-月/年-季, 比如: 2022-1
    handle_obj = DimAsinImageTmp(site_name=site_name, date_type=date_type, date_info=date_info)
    handle_obj.run()