import os import sys sys.path.append(os.path.dirname(sys.path[0])) from utils.ssh_util import SSHUtil from utils.common_util import CommonUtil from utils.hdfs_utils import HdfsUtils if __name__ == '__main__': site_name = CommonUtil.get_sys_arg(1, None) date_type = CommonUtil.get_sys_arg(2, None) date_info = CommonUtil.get_sys_arg(3, None) assert site_name is not None, "site_name 不能为空!" assert date_type is not None, "date_type 不能为空!" assert date_info is not None, "date_info 不能为空!" hive_tb = "ods_asin_image" db_type = "postgresql_cluster" if date_info == '0000-00' else "postgresql_14" partition_dict = { "site_name": site_name, "date_type": date_type, "date_info": date_info } hdfs_path = CommonUtil.build_hdfs_path(hive_tb, partition_dict=partition_dict) import_tb = f"{site_name}_asin_image" query = f""" select asin, img_url, img_order_by, created_at, updated_at, data_type, mapped_asin from {import_tb} where 1=1 and \$CONDITIONS """ empty_flag, check_flag = CommonUtil.check_schema_before_import(db_type=db_type, site_name=site_name, query=query, hive_tb_name=hive_tb, msg_usr=['fangxingjun'] ) assert check_flag, f"导入hive表{hive_tb}表结构检查失败!请检查query是否异常!!" if not empty_flag: sh = CommonUtil.build_import_sh(site_name=site_name, db_type=db_type, query=query, hdfs_path=hdfs_path, map_num=50, key='mapped_asin' ) # 导入前先删除 HdfsUtils.delete_hdfs_file(hdfs_path) client = SSHUtil.get_ssh_client() SSHUtil.exec_command_async(client, sh, ignore_err=False) CommonUtil.after_import(hdfs_path=hdfs_path, hive_tb=hive_tb) client.close()