dwd_asin_variat.py 3.02 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84
"""
author: 汪瑞(wangrui)
description: 基于ods_asin_variat进行数据清洗
table_read_name: ods_asin_variat
table_save_name: dwd_asin_variat
table_save_level: dwd
version: 3.0
created_date: 2023-01-03
updated_date: 2023-01-03
"""

import os
import sys

from pyspark.storagelevel import StorageLevel

sys.path.append(os.path.dirname(sys.path[0]))  # 上级目录
from utils.templates import Templates
# from ..utils.templates import Templates
#from AmazonSpider.pyspark_job.utils.templates import Templates
# 分组排序的udf窗口函数
from pyspark.sql.window import Window
from pyspark.sql import functions as F
from pyspark.sql.types import StringType, IntegerType


class DwdStAsinInfo(Templates):

    def __init__(self, site_name="us"):
        super().__init__()
        self.site_name = site_name
        self.db_save = f"dwd_asin_variat"
        self.spark = self.create_spark_object(app_name=f"{self.db_save} {self.site_name}, {self.date_info}")
        self.df_date = self.get_year_week_tuple()
        self.df_save = self.spark.sql(f"select 1+1;")
        self.df_asin_variat = self.spark.sql(f"select 1+1;")
        self.week_counts = 1 if self.date_type == 'week' else len(self.year_week_tuple)
        self.partitions_by = ['site_name']
        self.reset_partitions(20)
        if self.date_type in ["week"]:
            self.reset_partitions(100)
        elif self.date_type in ["month", "4_week"]:
            self.reset_partitions(350)
        elif self.date_type in ["quarter"]:
            self.reset_partitions(600)


    def read_data(self):
        print("1.1 读取ods_asin_variat表")
        sql = f"select " \
              f"asin, " \
              f"parent_asin, " \
              f"color as asin_color, " \
              f"size as asin_size, " \
              f"style as asin_style, " \
              f"created_time, " \
              f"updated_time " \
              f"from dim_asin_variation_info where site_name='{self.site_name}';"
        self.df_asin_variat = self.spark.sql(sqlQuery=sql).cache()
        self.df_asin_variat.show(10, truncate=False)

    def handle_variat(self):
        self.df_asin_variat = self.df_asin_variat.select("asin", "parent_asin", "asin_color", "asin_size", "asin_style", "created_time", "updated_time").dropDuplicates()
        variat_window = Window.partitionBy(["asin"]).orderBy(
            self.df_asin_variat.updated_time.desc_nulls_last()
        )
        self.df_asin_variat = self.df_asin_variat.withColumn("variat_rank",
                                                             F.row_number().over(window=variat_window))
        self.df_asin_variat = self.df_asin_variat.filter("variat_rank=1")
        self.df_save= self.df_asin_variat.drop("variat_rank")
        self.df_save.show(10, truncate=False)


    def handle_data(self):
        self.handle_variat()
        self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name))



if __name__ == '__main__':
    site_name = sys.argv[1]  # 参数1:站点
    handle_obj = DwdStAsinInfo(site_name=site_name)
    handle_obj.run()