#! /bin/env bash
# author: 方星钧(ffman)
# describe: dwt_aba_4_week-->站点_last_4_week_aba_copy1
# params: 参数1:site_name;
# version: 1.0
# create_date: 2022-5-20
# update_date: 2022-5-20

# sqoop执行变量
sqoop=/opt/module/sqoop/bin/sqoop


# 定义脚本传入的变量
# 根据site_name来判断数据库名称
if [ $1 == all ];
then
    site_name_array=(us uk de es fr it)
else
    site_name_array=($1)
fi
echo "site_name_array: ${site_name_array}"




export_data () {
        $sqoop export -D mapred.job.queue.name=spark \
        --connect "jdbc:mysql://rm-wz9yg9bsb2zf01ea4yo.mysql.rds.aliyuncs.com:3306/${db}?useUnicode=true&characterEncoding=utf-8" \
        --username adv_yswg \
        --password HmRCMUjt03M33Lze \
        --table ${mysql_table} \
        --input-fields-terminated-by '\001' \
        --hcatalog-database big_data_selection \
        --hcatalog-table ${hive_table} \
        --hcatalog-partition-keys site_name \
        --hcatalog-partition-values ${site_name} \
        --input-null-string '\\N' \
        --input-null-non-string '\\N' \
        --columns ${cols} \
        --num-mappers 3
}



for site_name in ${site_name_array[*]}
        do
                if [ $site_name == us ];
                then
                        db=selection
                else
                        db=selection_$site_name
                fi
                echo "db: ${db}"
                mysql_table=${site_name}_last_4_week_aba_copy1
                hive_table=dwt_aba_4_week
                cols="search_term,st_brand_id,st_rank,st_quantity_being_sold,st_search_num,st_is_first_text,st_ao_val,st_asin_orders_sum,st_asin1,st_click_share1,st_conversion_share1,st_asin2,st_click_share2,st_conversion_share2,st_asin3,st_click_share3,st_conversion_share3,week,year"
                echo "当前导出的hive表:${hive_table}, mysql表: ${mysql_table}"
                export_data
        done