#! /bin/bashDATAX_HOME=/opt/module/datax
if [ $# -lt 1 ]
thenecho "必须传入all/表名..."exit
fi
[ "$2" ] && datestr=$2 || datestr=$(date -d '-1 day' +%F)
handle_export_path(){for i in $(hadoop fs -ls -R "$1" | awk '{print $8}'); dohadoop fs -test -z "$i"if [[ $? -eq 0 ]]; thenecho "$i 文件大小为0,正在删除"hadoop fs -rm -r -f "$i"fidone
}
export_data() {datax_config=$1export_dir=$2tableNames=("${@:3}")for table in "${tableNames[@]}"dossh hadoop102 "mysql -uroot -pmivbAs7Awc -e \"use ticket; delete from ${table} where end_date = '${datestr}'\""donehandle_export_path "$export_dir"python "$DATAX_HOME/bin/datax.py" -p"-Dexportdir=$export_dir" "$datax_config"
}case $1 in
"ads_ticket_respond_statistics")export_data "/opt/module/datax/job/export/ticket/ticket.ads_ticket_respond_statistics.json" "/warehouse/ticket/ads/ads_ticket_respond_statistics/dt=${datestr}" "ads_ticket_respond_statistics";;
"ads_ticket_subpar_statistics")export_data "/opt/module/datax/job/export/ticket/ticket.ads_ticket_subpar_statistics.json" "/warehouse/ticket/ads/ads_ticket_subpar_statistics/dt=${datestr}" "ads_ticket_subpar_statistics";;
"all")export_data "/opt/module/datax/job/export/ticket/ticket.ads_ticket_respond_statistics.json" "/warehouse/ticket/ads/ads_ticket_respond_statistics/dt=${datestr}" "ads_ticket_respond_statistics"export_data "/opt/module/datax/job/export/ticket/ticket.ads_ticket_subpar_statistics.json" "/warehouse/ticket/ads/ads_ticket_subpar_statistics/dt=${datestr}" "ads_ticket_subpar_statistics";;
esac