dim_asin_amorders_info.py 6.27 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120
import os
import sys
import re

sys.path.append(os.path.dirname(sys.path[0]))  # 上级目录
from utils.templates import Templates
# from ..utils.templates import Templates
from pyspark.sql import functions as F
from pyspark.sql.window import Window
from pyspark.sql.types import StructType, StructField, IntegerType, StringType
# 导入udf公共方法
from yswg_utils.common_udf import udf_parse_bs_category
# from ..yswg_utils.common_udf import udf_parse_bs_category
from utils.spark_util import SparkUtil
from utils.hdfs_utils import HdfsUtils


class DimAsinAmordersInfo(Templates):

    def __init__(self, site_name='us', date_type="month", date_info='2022-1'):
        super().__init__()
        self.site_name = site_name
        self.date_type = date_type
        self.date_info = date_info
        # 初始化self.spark对
        self.db_save = 'dim_asin_amorders_info'
        self.spark = self.create_spark_object(
            app_name=f"{self.db_save}: {self.site_name}, {self.date_type}, {self.date_info}")
        self.get_year_week_tuple()
        self.df_save = self.spark.sql("select 1+1;")
        self.df_asin_amazon_orders = self.spark.sql("select 1+1;")
        self.df_asin_detail = self.spark.sql("select 1+1;")
        self.partitions_by = ['site_name', 'date_type', 'date_info']
        self.reset_partitions(partitions_num=10)
        self.u_parse_amazon_orders = self.spark.udf.register('u_parse_amazon_orders', self.udf_parse_amazon_orders, IntegerType())
        self.hdfs_path = f"/home/{SparkUtil.DEF_USE_DB}/dim/{self.db_save}/site_name={self.site_name}/date_type={self.date_type}/date_info={self.date_info}"

    def read_data(self):
        # us month, month_week, 4_week, week
        # uk/de month, 4_week, week
        if self.site_name in ['us', 'uk', 'de']:
            if self.date_type in ['month', 'month_week']:
                if (self.site_name == 'us') or (self.site_name in ['uk', 'de'] and self.date_info >= '2024-05'):
                    params = f"date_type='{self.date_type}' and date_info = '{self.date_info}'"
                else:
                    params = f"date_type='week' and date_info in {self.year_week_tuple}"
            else:
                params = f"date_type='week' and date_info in {self.year_week_tuple}"
        else:
            params = f"date_type='week' and date_info in {self.year_week_tuple}"

        sql = f"select asin, buy_data as asin_amazon_orders_str, created_time, 2 as asin_amazon_orders_label " \
              f"from ods_other_search_term_data where site_name='{self.site_name}' and {params} and buy_data is not null;"  # and date_info>='2023-15'
        print(f"1. 读取ods_other_search_term_data表数据: sql -- {sql}")
        self.df_asin_amazon_orders = self.spark.sql(sqlQuery=sql).cache()
        self.df_asin_amazon_orders.show(10, truncate=False)
        sql = f"select asin, buy_sales as asin_amazon_orders_str, created_at as created_time, 1 as asin_amazon_orders_label " \
              f"from ods_asin_detail where site_name='{self.site_name}' and {params} and buy_sales is not null;"  # and date_info>='2023-15'
        print(f"1. 读取df_asin_detail表数据: sql -- {sql}")
        self.df_asin_detail = self.spark.sql(sqlQuery=sql).cache()
        self.df_asin_detail.show(10, truncate=False)

    @staticmethod
    def udf_parse_amazon_orders(asin_amazon_orders_str):
        """
        解析asin详情页面的月销字段
        """
        pattern = "(\d+[k]{0,})\+"
        results_list = re.findall(pattern, str(asin_amazon_orders_str).lower())
        if len(results_list) == 1:
            result = int(results_list[0].replace("k", "000").replace(" ", ""))
        else:
            result = None
        return result

    def handle_data(self):
        # 处理关键词页面的asin月销数据
        window = Window.partitionBy(['asin']).orderBy(
            self.df_asin_amazon_orders.asin_amazon_orders_str.desc_nulls_last(),
            self.df_asin_amazon_orders.created_time.desc_nulls_last(),
        )
        self.df_asin_amazon_orders = self.df_asin_amazon_orders.withColumn("rk", F.row_number().over(window=window))
        self.df_asin_amazon_orders = self.df_asin_amazon_orders.filter("rk=1").drop("rk").cache()
        self.df_asin_amazon_orders.show(10, truncate=False)
        # join
        self.df_save = self.df_asin_detail.unionByName(self.df_asin_amazon_orders, allowMissingColumns=True)

        # 处理asin详情页面的asin月销数据
        window = Window.partitionBy(['asin']).orderBy(
            self.df_save.asin_amazon_orders_str.desc_nulls_last(),
            self.df_save.created_time.desc_nulls_last(),
        )
        self.df_save = self.df_save.withColumn("rk", F.row_number().over(window=window))
        self.df_save = self.df_save.filter("rk=1").drop("rk").cache()
        # 窗口函数还是会有重复
        # self.df_save = self.df_save.dropDuplicates(['asin'])

        # 解析亚马逊月销字段
        self.df_save = self.df_save.withColumn('asin_amazon_orders', self.u_parse_amazon_orders('asin_amazon_orders_str'))
        self.df_save = self.df_save.withColumn("site_name", F.lit(self.site_name))
        self.df_save = self.df_save.withColumn("date_type", F.lit(self.date_type))
        self.df_save = self.df_save.withColumn("date_info", F.lit(self.date_info))
        self.df_save.show(10, truncate=False)
        self.df_save.filter("asin_amazon_orders is not null").show(10, truncate=False)
        self.df_save = self.df_save.withColumn('asin_amazon_orders', self.u_parse_amazon_orders('asin_amazon_orders_str'))
        self.df_save.filter("asin_amazon_orders is not null").show(10, truncate=False)
        print(f"清除hdfs目录中.....{self.hdfs_path}")
        HdfsUtils.delete_file_in_folder(self.hdfs_path)
        print(f"当前存储的表名为:{self.db_save},分区为{self.partitions_by}")
        # self.df_save.write.saveAsTable(name=self.db_save, format='hive', mode='append', partitionBy=self.partitions_by)
        # print("success")
        # quit()


if __name__ == '__main__':
    site_name = sys.argv[1]  # 参数1:站点
    date_type = sys.argv[2]  # 参数2:类型:week/4_week/month/quarter
    date_info = sys.argv[3]  # 参数3:年-周/年-月/年-季, 比如: 2022-1
    handle_obj = DimAsinAmordersInfo(site_name=site_name, date_type=date_type, date_info=date_info)
    handle_obj.run()