test_read_hive.py 1.3 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52
import os
import sys

sys.path.append(os.path.dirname(sys.path[0]))

from utils.common_util import CommonUtil
from utils.spark_util import SparkUtil

if __name__ == '__main__':
    site_name = CommonUtil.get_sys_arg(1, None)
    date_type = CommonUtil.get_sys_arg(2, None)
    date_info = CommonUtil.get_sys_arg(3, None)

    print(site_name)
    print(date_type)
    print(date_info)

    # select category_id,
    # 	   max(and_en_name),
    # 	   sum(is_1_day_flag),
    # 	   sum(is_7_day_flag),
    # 	   sum(is_30_day_flag),
    # 	   sum(is_asin_new),
    # 	   sum(is_asin_bsr_new)
    # from big_data_selection.dwd_bsr_asin_rank dbar
    # 		 left join (
    # 	select id, and_en_name
    # 	from big_data_selection.ods_bs_category
    # 	where site_name = 'us'
    # ) tmp on dbar.category_id = tmp.id
    # where 1 = 1
    #   and site_name = 'us'
    #   and date_type = 'last30day'
    #   and date_info = '2023-02-27'
    # group by category_id
    # having category_id = 50850

    spark = SparkUtil.get_spark_session("test")
    sql = """
        select count(asin)
        from big_data_selection.dwd_bsr_asin_rank dbar
        where 1 = 1
          and site_name = 'us'
          and date_type = 'last30day'
          and date_info = '2023-02-27'
    """

    df = spark.sql(sql)

    df.show()

    print("success")