hbase增量和全量备份

1.星期五全量备份星期四23:59:59的数据,星期一全量备份到星期日23:59:59的数据,其他的增量备份,备份前一天00:00:00  -  23:59:59的数据

01 2 * * 5 /opt/prodfull.sh >> /tmp/full.log 2>&1 &
01 2 * * 1 /opt/prodfull.sh >> /tmp/full.log 2>&1 &
#53 17 16 9 * /opt/demoday.sh > /tmp/day.log 2>&1 &
01 02 * * 2 /opt/prodday.sh >> /tmp/day.log 2>&1 &
01 02 * * 3 /opt/prodday.sh >> /tmp/day.log 2>&1 &
01 02 * * 4 /opt/prodday.sh >> /tmp/day.log 2>&1 &
01 02 * * 7 /opt/prodday.sh >> /tmp/day.log 2>&1 &

1.增量备份脚本

#!/bin/bash
mydate=`date +%A`
yestoday=`date -d "1 day ago" +"%Y-%m-%d" `
#begin tome format 2019-09-11 00:00:00
yestoday_begin_time=`date -d $yestoday" 00:00:00" +%s`
#yestoday_begin_time="1568627040"
echo $yestoday_begin_time
yestoday_end_time=`date -d $yestoday" 23:59:59" +%s`
#yestoday_end_time="1568627520"
echo $yestoday_end_time
function backup02_tables
{
    #check if dir exist or not
    /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -ls /do1cloud/real_backup02/${1}_${mydate}  > /dev/null 2>&1
    olddir_check=$(echo $?)

    #if dir exist then remove the dir and mkdir new dir
    if [[ $olddir_check -eq 0 ]] ; then
        echo ${1}_${mydate}
        /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -rm /do1cloud/real_backup02/${1}_$mydate/*
        /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -rmdir /do1cloud/real_backup02/${1}_${mydate}
        /do1cloud/hbase-2.0.5/bin/hbase org.apache.hadoop.hbase.mapreduce.Export $1 /do1cloud/real_backup02/${1}_${mydate} $yestoday_begin_time $yestoday_end_time
    else
        /do1cloud/hbase-2.0.5/bin/hbase org.apache.hadoop.hbase.mapreduce.Export $1 /do1cloud/real_backup02/${1}_${mydate}  $yestoday_begin_time  $yestoday_end_time
    fi
        /do2cloud/hadoop-3.0.3/bin/hdfs dfs -get /do5cloud/real_backup02/${1}_${mydate} /do3cloud/do24export/fortar/${1}_${mydate}_$yestoday_end_time
}

for table in `cat /opt/prodtables.txt`
do
    backup02_tables  $table
done

nowtime=`date +'%Y-%m-%d-%H-%M-%S'`
tar -zcf /do6cloud/do14export/fortar/tables_${nowtime}.tar.gz  /do0cloud/do94export/fortar/*_${mydate}*/
find /do1cloud/do14export/fortar/ -mtime +5 -exec rm -rf {} \;

3.全量备份脚本

#!/bin/bash
mydate=`date +%A`
yestoday_begin_time=`date -d $yestoday" 00:00:00" +%s`
echo $yestoday_begin_time
yestoday_end_time=`date -d $yestoday" 23:59:59" +%s`
#yestoday_end_time='1568614500'
echo $yestoday_end_time
function backup_tables
{
    #check if dir exist or not
    /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -ls /do1cloud/real_backup/${1}_${mydate}  > /dev/null 2>&1
    olddir_check=$(echo $?)

    #if dir exist then remove the dir and mkdir new dir
    if [[ $olddir_check -eq 0 ]] ; then
        echo ${1}_${mydate}
        /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -rm /do1cloud/real_backup/${1}_$mydate/*
        /do1cloud/hadoop-3.0.3/bin/hdfs  dfs -rmdir /do1cloud/real_backup/${1}_${mydate}
        /do1cloud/hbase-2.0.5/bin/hbase org.apache.hadoop.hbase.mapreduce.Export $1 /do1cloud/real_backup/${1}_${mydate}
    else
        /do1cloud/hbase-2.0.5/bin/hbase org.apache.hadoop.hbase.mapreduce.Export $1 /do1cloud/real_backup/${1}_${mydate} 1 $yestoday_end_time
    fi
        /do1cloud/hadoop-3.0.3/bin/hdfs dfs -get /do1cloud/real_backup/${1}_${mydate} /do1cloud/do14export/fortar/${1}_${mydate}_$yestoday_end_time
}

for table in `cat /opt/prodtables.txt`
do
    backup_tables  $table
done
echo $?"result"
nowtime=`date +'%Y-%m-%d-%H-%M-%S'`
tar -zcf /do1cloud/do14export/fortar/tables_${nowtime}.tar.gz  /do1cloud/do14export/fortar/*_${mydate}*/
find /do1cloud/do14export/fortar/ -mtime +5 -exec rm -rf {} \;

 

posted @ 2019-09-17 14:54  littlevigra  阅读(1825)  评论(1编辑  收藏  举报