import os import sys sys.path.append(os.path.dirname(sys.path[0])) from utils.common_util import CommonUtil from utils.spark_util import SparkUtil if __name__ == '__main__': site_name = CommonUtil.get_sys_arg(1, None) date_type = CommonUtil.get_sys_arg(2, None) date_info = CommonUtil.get_sys_arg(3, None) print(site_name) print(date_type) print(date_info) # select category_id, # max(and_en_name), # sum(is_1_day_flag), # sum(is_7_day_flag), # sum(is_30_day_flag), # sum(is_asin_new), # sum(is_asin_bsr_new) # from big_data_selection.dwd_bsr_asin_rank dbar # left join ( # select id, and_en_name # from big_data_selection.ods_bs_category # where site_name = 'us' # ) tmp on dbar.category_id = tmp.id # where 1 = 1 # and site_name = 'us' # and date_type = 'last30day' # and date_info = '2023-02-27' # group by category_id # having category_id = 50850 spark = SparkUtil.get_spark_session("test") sql = """ select count(asin) from big_data_selection.dwd_bsr_asin_rank dbar where 1 = 1 and site_name = 'us' and date_type = 'last30day' and date_info = '2023-02-27' """ df = spark.sql(sql) df.show() print("success")