"MySQL uses xtrabackup for full incremental backup" mysql_backup.sh

Source: Internet
Author: User

"MySQL uses xtrabackup for full incremental backup" mysql_backup.sh

#!/bin/bash## 2014/12/16# v5.1# increment backup, with timestamp,  compressed, with master & slave.##################### #s_port = "$" s_action= "$" s_slave= "$ $" #####   configuration parameters   ####### #f_my_cnf = "/data/svr/mysql/my.cnf.${s_port}" d_bak_base= "/data/ Backup/mysql/${s_port} "s_copies=6s_password=" xxx "#########################  usage function usage ()  {     cat <<EOF[-]  Usage:  $0 port [full|incr|full_tar|full_ stream] [slave]$0 3306 full &                full backup, uncompressed, compression completed the previous day's backup;$0 3306 incr &                incremental backup, uncompressed; Compress the previous day's backup after completion; $0 3306 full_tar  &           full backup, uncompressed; When done, compress this backup;$0 3306  full_stream &        full backup, side backup edge compression, Stream=tar mode;[-]  for slave instance:$0 3306 full slave  &$0 3306 incr slave &$0 3306 full_tar slave &$0 3306  full_stream slave &[-]  configuration for crontab: +------------------------------+# [mysql]0  0 * * 6 $0 3306 full &0 0 * * 0-5 $0  3306 incr &+------------------------------+eof    exit 2}######## ############## #test  ${s_port} -gt 0if [ $? -gt 0 ]; then     echo  "[+] tips:  port not specified or wrong;"     usage     exit 3;fiif [ -z ${s_slave} ]; then    s_args= "-- Defaults-file=${f_my_cnf} --host=127.0.0.1 --port=${s_port} --user=root --password=${s_ Password} "else   &Nbsp;s_args= "--defaults-file=${f_my_cnf} --host=127.0.0.1 --port=${s_port} --user=root -- Password=${s_password} --slave-info --safe-slave-backup "fid_bak_gz=" ${d_bak_base}/gz "d_bak_tmp=" ${d_ Bak_base}/tmp "d_bak_log=" ${d_bak_base}/log "d_bak_full=" ${d_bak_base}/full "f_bak_stream=" ${d_bak_full}/ mysql-stream-$ (date +%f). gz "[ -d ${d_bak_gz} ] | |  mkdir -p ${d_bak_gz}[ -d ${d_bak_tmp} ] | |  mkdir -p ${d_bak_tmp}[ -d ${d_bak_log} ] | |  mkdir -p ${d_bak_log}[ -d ${d_bak_full} ] | |  mkdir -p ${d_bak_full}######################  clean up Old backup function do_cleanup ()  {     echo  "[-]  ' Date '  delete old files over ${s_copies}  days ...  "    find ${d_bak_gz} -type f -name " *.gz "  -mtime +${s_copies} -print    find ${d_bak_gz} -type f -name  "*.gz"  -mtime +${s_ copies} -delete    find ${d_bak_full} -type f -name  "*.gz"  -mtime +${s_copies} -print    find ${d_bak_full} -type f  -name  "*.gz"  -mtime +${s_copies} -delete    find ${d_bak_ log} -type f -name  "*.log"  -mtime +${s_copies} -print     find ${d_bak_log} -type f -name  "*.log"  -mtime +${s_copies} - delete    echo  "[-]  ' Date '  done."} ######################  compress the directory before today, clean up the old backup function do_tar_yesterday ()  {     local s_yesterday=$ (date -d  "1 day ago"  +%f)     echo  "[ -]  ' Date ' &NBSP;WAITING&NBSP;FOR&NBSP;FILE&NBSP;COMPRESSION&NBSP;PROCESS&NBSP: "    cd ${d_bak_tmp}    ls |grep -v  "' Date +%f '"  |xargs -i tar zcf  "{}.tar.gz"  {} --remove-files     echo  "[-]  ' Date '  move:  ' ls *.gz '  to: ${d_bak_gz}"      mv *.gz ${d_bak_gz}       do_cleanup}########## ############  compression specified directory Function do_tar ()  {    echo  "[-]  ' Date '   WAITING&NBSP;FOR&NBSP;FILE&NBSP;COMPRESSION&NBSP;PROCESS&NBSP: "    cd ${d_bak_full }    ls |grep -v  ". Gz"  |xargs -i tar zcf  "{}". tar.gz " {} --remove-files    do_cleanup}######################  full backup, uncompressed ; Function do_full ()  {    echo  "[+]  ' Date '  +---------- --------------Start--------------------+ "    innobackupex ${s_args} ${d_bak_tmp}     echo  "[-]  ' date '  +------------------------cleanup------------------+"     do _tar_yesterday    echo  "[-]  ' Date '  +------------------------the  End------------------+ "}######################  incremental backup, uncompressed; Compress the previous day's backup after completion; Function do_increment ()  {     echo  "[+]  ' Date '  +------------------------Start--------------------+"     innobackupex --incremental ${s_args} ${d_bak_tmp}     echo  "[-]  ' date '  +------------------------cleanup------------------+"      do_tar_yesterday    echo  "[-]  ' Date '  +------------------------the  end------------------+ "}######################  full backup, uncompressed; After completion, compress this backup; Function do_full_tar ()   {    echo&nbsP; " [+]  ' Date '  +------------------------Start--------------------+ "    innobackupex  ${s_args} ${d_bak_full}    echo  "[-]  ' Date '  +------------------- -----cleanup------------------+ "    do_tar    echo " [-]  " Date '  +------------------------the end------------------+ "}######################  full backup, edge backup edge compression, Stream=tar mode; Function do_full_stream ()  {    echo  "[+]  ' Date '  +--- ---------------------Start--------------------+ "    innobackupex --stream=tar ${ s_args} ${d_bak_full} |gzip >${f_bak_stream}    echo  "[-] " Date '  +------------------------cleanup------------------+ "    do_cleanup     echo  "[-]  ' Date '  +------------------------the end------------------+"}######## ############  case ${s_action} in    full)         do_ Full > "${d_bak_log}/$ (date +%f_%h-%m-%s). Log"  2>&1         ;; &NBSP;&NBSP;&NBSP;&NBSP;INCR)         do_increment > "${d_bak_ log}/$ (date +%f_%h-%m-%s). Log " 2>&1        ;;     full_tar)         do_full_tar > "${d_bak _log}/$ (date +%f_%h-%m-%s). Log " 2>&1        ;;     full_stream)         do_full_stream > "$ {d_bak_log}/$ (date +%f_%h-%m-%s). Log " 2>&1        ;;     *)         usage        &nBSP;;; Esac


"MySQL uses xtrabackup for full incremental backup" mysql_backup.sh

Related Article

Contact Us

The content source of this page is from Internet, which doesn't represent Alibaba Cloud's opinion; products and services mentioned on that page don't have any relationship with Alibaba Cloud. If the content of the page makes you feel confusing, please write us an email, we will handle the problem within 5 days after receiving your email.

If you find any instances of plagiarism from the community, please send an email to: info-contact@alibabacloud.com and provide relevant evidence. A staff member will contact you within 5 working days.

A Free Trial That Lets You Build Big!

Start building with 50+ products and up to 12 months usage for Elastic Compute Service

  • Sales Support

    1 on 1 presale consultation

  • After-Sales Support

    24/7 Technical Support 6 Free Tickets per Quarter Faster Response

  • Alibaba Cloud offers highly flexible support services tailored to meet your exact needs.