本文最后更新于 1667 天前,其中的信息可能已经有所发展或是发生改变。
编写原因
丢数据擦屁股太麻烦了
实现功能
- 自动备份网站(default: /var/www)
- 自动备份数据库(default: mysql)
- 自动上传(default: rclone)
前置
- rclone
- mega/onedrive等储存空间
- crontab
代码
#!/bin/bash
#Set configs
fileName="QN_WEB_BACK_date +%Y%m%d.zip"
folderPath="/root/backup"
filePath=$folderPath/$fileName
remoteFolderPath="TO BE FILLED" #"Backup_Mega:/QN_WEB_BACK/"
originWebsitePath="/var/www/"
originConfigPath="/etc/nginx/sites-aviliable/"
tempFolderName="webBackdate +%Y%m%d"
tempFolder="/tmp/$tempFolderName"
encryptPass="TO BE FILLED"
#Set database
dbUser="TO BE FILLED"
dbPass="TO BE FILLED"
dbHost="TO BE FILLED"
#Backup website files
function backup_file(){
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Backup Files"
mkdir $tempFolder $tempFolder/databases
cp -r /var/www $tempFolder/websites_file
cp -r -L /etc/nginx/sites-enabled $tempFolder/nginx_conf
}
#Backup databases
function backup_database(){
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Backup Databases"
dbList=mysql -u$dbUser -p$dbPass -h$dbHost -Bse "show databases" |grep -v "schema" |grep -v "sys"
for dbName in $dbList
do
mysqldump -u$dbUser -p$dbPass -h$dbHost --databases $dbName > $tempFolder/databases/$dbName.sql
done
}
#Compress files
function compress_file(){
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Compress"
bz2File=$folderPath/"QN_WEB_BACK_date +%Y%m%d.tar"
processorNum=cat /proc/cpuinfo |grep "processor"|wc -l
freeMem=free | awk '/Mem/ {print $4}'
freeMem=expr $freeMem / 1000
if [ $processorNum -gt 2 ]; then
apt install pbzip2
processorNum=expr $processorNum - 1
tar -C "/tmp" -cvf $bz2File $tempFolderName
if [ $freeMem -gt 500 ]; then
pbzip2 $bz2File -c -b16 -m$freeMem -p$processorNum -k >$bz2File".bz2"
else
pbzip2 $bz2File -c -p$processorNum -k > $bz2File".bz2"
fi
bz2File=$bz2File".bz2"
else
bz2File=$bz2File".bz2"
tar -C "/tmp" -cjf $bz2File $tempFolderName
fi
zip -0 -j -P$encryptPass $filePath $bz2File
rm -rf $tempFolder $bz2File
}
#Upload file with rclone
function upload_file(){
if [ ! -x "$folderPath" ]; then
echo "[ERROR]["date +%Y-%m-%d' '%H:%M:%S"] $folderPath is not exist."
mkdir $folderPath
exit 1
fi
if [ ! -f $filePath ]; then
echo "[ERROR]["date +%Y-%m-%d' '%H:%M:%S"] $filePath not found."
exit 1
else
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] $filePath has been found. Start uploading ......"
rclone copy $filePath $remoteFolderPath
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Uploading end."
fi
}
#Delete file for more than a week to save my poor space:(
function delete_before_week(){
beforeAMonth=$(date -d "-7 days" +%Y%m%d)
if [ ! -d $folderPath ]; then
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Folder not found. Nothing to do."
else
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Iteration is beginning ......"
for file in $folderPath/*
do
if [ -f $file ]; then
extension=${file##*.}
if [ "zip" = $extension ]; then
baseName=$(basename $file .zip)
timeStr=${baseName##*_}
if (( $timeStr <= $beforeAMonth )); then
rm -rf $file
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Delete file $file."
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] $file has been found. Start removing ......"
rclone delete "$remoteFolderPath${file##*/}"
fi
else
echo $file" is not a zip file."
fi
else
echo $file" is not a file."
fi
done
echo "[INFO]["date +%Y-%m-%d' '%H:%M:%S"] Iteration end ......"
fi
}
backup_file;
backup_database;
compress_file;
upload_file;
delete_before_week;