ods_seller_account_feedback.py 2.52 KB
Newer Older
chenyuanjie committed
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74
import os
import sys

sys.path.append(os.path.dirname(sys.path[0]))
from utils.ssh_util import SSHUtil
from utils.common_util import CommonUtil
from utils.common_util import DateTypes
from utils.hdfs_utils import HdfsUtils


if __name__ == '__main__':
    site_name = CommonUtil.get_sys_arg(1, None)
    date_type = CommonUtil.get_sys_arg(2, None)
    date_info = CommonUtil.get_sys_arg(3, None)
    assert site_name is not None, "site_name 不能为空!"
    assert date_type is not None, "date_type 不能为空!"
    assert date_info is not None, "date_info 不能为空!"

    hive_table = f"ods_seller_account_feedback"
    partition_dict = {
        "site_name": site_name,
        "date_type": date_type,
        "date_info": date_info
    }

    # 落表路径校验
    hdfs_path = CommonUtil.build_hdfs_path(hive_table, partition_dict=partition_dict)
    print(f"hdfs_path is {hdfs_path}")

    suffix = str(date_info).replace("-", "_")
    import_table = f"{site_name}_seller_account_feedback_{suffix}"
    if date_type == DateTypes.month.name and date_info >= '2023-08':
        db_type = 'postgresql_14'
    else:
        db_type = 'postgresql'
    print("当前链接的数据库为:", db_type)

    sql_query = f"""
        select 
            seller_id,
            site_name as country_name,
            count_30_day,
            count_1_year,
            count_lifetime,
            num,
            created_at,
            updated_at,
            seller_address
        from {import_table} 
        where 1=1
        and \$CONDITIONS
    """

    # 进行schema和数据校验
    CommonUtil.check_schema_before_import(db_type=db_type,
                                          site_name=site_name,
                                          query=sql_query,
                                          hive_tb_name=hive_table,
                                          msg_usr=['chenyuanjie'])

    # 生成导出脚本
    import_sh = CommonUtil.build_import_sh(site_name=site_name,
                                           db_type=db_type,
                                           query=sql_query,
                                           hdfs_path=hdfs_path)
    # 导入前先删除原始hdfs数据
    HdfsUtils.delete_hdfs_file(hdfs_path)
    # 创建ssh Client对象--用于执行cmd命令
    client = SSHUtil.get_ssh_client()
    SSHUtil.exec_command_async(client, import_sh, ignore_err=False)
    # 创建lzo索引和修复元数据
    CommonUtil.after_import(hdfs_path=hdfs_path, hive_tb=hive_table)
    # 关闭链接
    client.close()