python mysql备份脚本
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 | #!/usr/bin/env python # -*- coding: utf-8 -*- # @Time : 2023/9/15 15:32 # @File : backup_db.py # @Author : zk_linux # @Software: PyCharm # @Description: import zipfile import time import configparser import os import datetime import logging import subprocess import gzip import schedule exclude_tables = "item" backup_dir = '/server/backup_db' database_list = [ "demo" , "lcd" , "xxl_job" ] file_path = '/server/scripts/log/backup_db.log' exclude = "--ignore-table=demo.item --ignore-table=demo.ap_import_log --ignore-table=demo.ap_upgrade_log --ignore-table=demo.bind_log --ignore-table=demo.esl_firmware_log --ignore-table=demo.item_import_log --ignore-table=demo.mail_log --ignore-table=demo.operation_log --ignore-table=demo.operation_log_active --ignore-table=demo.shelves_pricetag_item_log --ignore-table=demo.sign_import_log --ignore-table=demo.store_inventory_log --ignore-table=demo.user_login_log" logging.basicConfig(level = logging.INFO, filename = file_path, filemode = 'a' , format = '%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s' ) dir_path = '/server/scripts/log/' if not os.path.exists(dir_path): os.makedirs(dir_path) if not os.path.exists(backup_dir): os.makedirs(backup_dir) class MysqlConfig: def __init__( self , config_file): self .config_file = config_file self .config = configparser.ConfigParser() self .config.read( self .config_file) def __getitem__( self , key): return self .config[ 'MySQL' ][key] class GetFileName: def __init__( self , database, mysql_config, backup_dir, exclude = ""): self .database = database self .mysql_config = mysql_config self .backup_dir = backup_dir self .exclude = exclude def generate_backup_file_name( self , prefix): ''' :param prefix:(prefix='demo lcd xxl_job') () :return: ''' return os.path.join( self .backup_dir, f '{prefix}_{datetime.datetime.now().strftime("%Y%m%d%H%M%S")}.sql' ) def get_backup_item_file_name( self ): backup_item_file_name = self .generate_backup_item_file_name() print (backup_item_file_name) return backup_item_file_name def generate_backup_command( self , table_name = ""): ''' :param table_name: item ''' command = f 'mysqldump -h {self.mysql_config["master_host"]} -u {self.mysql_config["master_user"]} -p{self.mysql_config["master_password"]} {self.database} ' command + = f ' {table_name}' if table_name = = "item" : command + = f ' > {self.generate_backup_item_file_name()}' else : command + = f '> {self.generate_backup_file_name(self.database)}' return command def generate_backup_item_file_name( self ): ''' :return: ./backup_db/demo_item_20230918114451.sql ''' item_prefix = f '{self.database}' return os.path.join( self .backup_dir, f '{item_prefix}_item_{datetime.datetime.now().strftime("%Y%m%d%H%M%S")}.sql' ) def get_backup_file_name( self ): backup_file_name = self .generate_backup_item_file_name( str ( self .database)) return backup_file_name class BackupDatabaseRun: def __init__( self , database_list, mysql_config, backup_dir, exclude = ""): self .database_list = database_list self .mysql_config = mysql_config self .backup_dir = backup_dir self .exclude = exclude def run_get_names( self ): try : zip_file_name = [] for database in self .database_list: get_backup_names = GetFileName(database, self .mysql_config, self .backup_dir, self .exclude) backup_demo_file_name = get_backup_names.generate_backup_file_name(database) if database.startswith( "demo" ): backup_command = get_backup_names.generate_backup_command(exclude) zip_file_name.append(backup_demo_file_name) subprocess.run(backup_command, shell = True , check = True ) logging.info(f 'Back up demo library instructions: {backup_command}' ) item_backup_file_name = get_backup_names.generate_backup_item_file_name() item_backup_command = get_backup_names.generate_backup_command(exclude_tables) subprocess.run(item_backup_command, shell = True , check = True ) logging.info(f 'Back up the itme table command:{item_backup_command}' ) zip_file_name.append(item_backup_file_name) else : backup_xxl_lcd_command = get_backup_names.generate_backup_command() subprocess.run(backup_xxl_lcd_command, shell = True , check = True ) logging.info(f 'Back up xxl lcd library instructions:{backup_xxl_lcd_command}' ) zip_file_name.append(backup_demo_file_name) return zip_file_name except subprocess.CalledProcessError as e: logging.error(f 'Backup failed for database : {str(e)}' ) def zip_file( self ): sql_file_name = backup_runner.run_get_names() for file_name in sql_file_name: new_filename = os.path.splitext(file_name)[ 0 ] + '.zip' with zipfile.ZipFile(new_filename, 'w' , zipfile.ZIP_DEFLATED, allowZip64 = True ) as zipf: zipf.write(file_name) logging.info(f 'Original file:{file_name}--->>Target compressed file:{new_filename}' ) os.remove(file_name) def deletefile(PATH): data_retention_days = int (mysql_config[ "data_retention_days" ]) for eachfile in os.listdir(PATH): filename = os.path.join(PATH, eachfile) if os.path.isfile(filename): lastmodifytime = os.stat(filename).st_mtime endfiletime = time.time() - 3600 * 24 * data_retention_days if endfiletime > lastmodifytime: if filename[ - 4 :] = = ".zip" : os.remove(filename) logging.info(f 'Deleting backup files:{filename}' ) elif os.path.isdir(filename): deletefile(filename) if __name__ = = '__main__' : mysql_config = MysqlConfig( 'config.ini' ) backup_runner = BackupDatabaseRun(database_list, mysql_config, backup_dir, exclude_tables) schedule.every().day.at(mysql_config[ "scheduled_task" ]).do(backup_runner.zip_file) logging.info( "Wait for backup" ) while True : schedule.run_pending() time.sleep( 1 ) deletefile(backup_dir) |
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 25岁的心里话
· 闲置电脑爆改个人服务器(超详细) #公网映射 #Vmware虚拟网络编辑器
· 零经验选手,Compose 一天开发一款小游戏!
· 因为Apifox不支持离线,我果断选择了Apipost!
· 通过 API 将Deepseek响应流式内容输出到前端
2019-10-23 高性能IO模型浅析