利用七牛打造自动化数据灾备
一直以来数据备份一直是个头疼的问题,也尝试过不少备份的工具,现在也有像多备份这样的工作在提供,可是还是有些不尽人意,比如流量限制,太少等情况。今天我们来一起学习下利用七牛做自动化灾备的脚本编写,我这里选择的是python,本来想用php的,后来考虑到可能要备份大文件的话php可能略显威力不够了。
-
先安装七牛的sdk
pip install qiniu
-
编写如下python代码
#!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import sys
from time import strftime,gmtime
import tarfile
from qiniu import Auth
from qiniu import put_file
import qiniu.config
import smtplib
from email.MIMEText import MIMEText
from email.MIMEMultipart import MIMEMultipart
access_key = ''
secret_key = ''
bucket_name = ''
db_host = ''
db_user = ''
db_password = ''
db_name = ''
db_charset = 'utf8'
file_name = 'api'+strftime("%Y-%m-%d",gmtime())
back_dir = '/home/backup/'
web_root = '/home/api/'
tar_file = back_dir.rstrip('/')+'/'+file_name+'.tar.gz'
db_file = back_dir.rstrip('/')+'/'+'db.sql'
def log(status='Success',content=''):
print '%s [%s] %s' %(strftime("%Y/%m/%d %H:%M:%S",gmtime()),status,content)
tar = tarfile.open(tar_file,'w|gz')
for root,dirs,files in os.walk(web_root):
for file in files:
tar.add(os.path.join(root,file))
sql_dump = os.system("mysqldump -h%s -u%s -p%s %s --default_character-set=%s > %s" \
%(db_host, db_user, db_password, db_name, db_charset, db_file))
if sql_dump == 0:
tar.add(db_file)
else:
log('Error','db dump fail')
tar.close()
if os.path.exists(back_dir+file_name+'.tar.gz') != True:
log('Error','tar fail')
sys.exit()
def send_Email(json_result):
#邮件接受者
mail_receiver = ['']
#根据不同邮箱配置 host,user,和pwd
mail_host = ''
mail_user = ''
mail_pwd = ''
mail_to = ','.join(mail_receiver)
msg = MIMEMultipart()
message = json_result
body = MIMEText(message, _subtype='html', _charset='utf-8')
msg.attach(body)
msg['To'] = mail_to
msg['from'] = mail_user
msg['subject'] = 'API备份通知'
try:
s = smtplib.SMTP()
s.connect(mail_host)
s.login(mail_user, mail_pwd)
s.sendmail(mail_user, mail_receiver, msg.as_string())
s.close()
print 'success'
except Exception, e:
print e
q = Auth(access_key, secret_key)
mime_type = "application/x-gzip"
params = {'x:a': 'a'}
localfile = tar_file
key = file_name+'.tar.gz'
token = q.upload_token(bucket_name, key)
progress_handler = lambda progress, total: progress
ret, info = put_file(token, key, localfile, params, mime_type, progress_handler=progress_handler)
print(info)
if ret['key'] == key:
print '上传成功'
send_Email('本次本次备份成功了,备份文件为:'+tar_file)
else:
print '上传失败'
send_Email('本次本次备份失败了,备份文件为:'+tar_file)
os.remove(db_file)
os.remove(tar_file)
- 配置crontab定时任务就可以了
crontab -e
30 2 * * * /usr/bin/python /home/backup/apibackup.py