Loading

文件压缩同步脚本

 

# -*- coding: UTF-8 -*-

import zipfile
import os
import re
from datetime import date
from datetime import timedelta
import time
import paramiko
import logging
from logging.handlers import TimedRotatingFileHandler
import shutil

username = "username"
hostname = "ip"
password = "password"


def initlog(logpath, logname="synclog"):
    logger = logging.getLogger(__name__)
    logger.setLevel(level=logging.INFO)
    log_path = os.path.join(r"{logpath}".format(logpath=logpath), logname)
    # interval 滚动周期,
    # when="MIDNIGHT", interval=1 表示每天0点为更新点,每天生成一个文件
    # backupCount  表示日志保存个数
    file_handler = TimedRotatingFileHandler(filename=log_path, when="MIDNIGHT", interval=1, backupCount=30)
    # filename="mylog" suffix设置,会生成文件名为synclog.2020-04-10.log
    file_handler.suffix = "%Y-%m-%d.log"
    # extMatch是编译好正则表达式,用于匹配日志文件名后缀
    # 需要注意的是suffix和extMatch一定要匹配的上,如果不匹配,过期日志不会被删除。
    file_handler.extMatch = re.compile(r"^\d{4}-\d{2}-\d{2}.log$")
    # 定义日志输出格式
    file_handler.setFormatter(
        logging.Formatter(
            "[%(asctime)s] [%(process)d] [%(levelname)s] - %(module)s.%(funcName)s (%(filename)s:%(lineno)d) - %(message)s"
        ))
    logger.addHandler(file_handler)
    return logger


def get_zip_file(input_path, result):
    logger.info("======================开始深度遍历目录=================================")
    files = os.listdir(input_path)
    for file in files:
        if os.path.isdir(input_path + '/' + file):
            get_zip_file(input_path + '/' + file, result)
        else:
            result.append(input_path + '/' + file)
    logger.info("======================结束深度遍历目录=================================")


def zip_file_path(input_path, output_path, output_name):
    """
    压缩文件,相对路径
    :param input_path: 压缩的文件夹路径
    :param output_path: 解压(输出)的路径
    :param output_name: 压缩包名称
    :return:
    """
    logger.info("======================开始压缩文件=================================")
    f = zipfile.ZipFile(output_path + '/' + output_name, 'w', zipfile.ZIP_DEFLATED)
    filelists = []
    get_zip_file(input_path, filelists)
    for file in filelists:
        f.write(file)
    # 调用了close方法才会保证完成压缩
    f.close()
    return output_path + r"/" + output_name


def zip_file_abspath(abspath, output_name, cust_output=''):
    """
    压缩绝对路径
    :param abspath:         压缩的文件夹的绝对路径
    :param cust_output:     输出路径  可选
    :param output_name:     输出文件名
    :return:                压缩文件的保存路径。
    """
    path = abspath
    os.chdir(path)
    output_path = os.path.abspath(os.path.join(os.getcwd(), ".."))
    os.chdir(output_path)
    for i in os.listdir():
        if i == path.split('\\')[-1] or i == path.split('/')[-1]:
            input_path = '.\\' + i
            if cust_output != '':
                return zip_file_abspath(input_path, cust_output, output_name)
            else:
                return zip_file_path(input_path, output_path, output_name)


def sync_file(hostname, username, password, logger, remotepath, zipfilepath, zipfilename):
    try:
        logger.info("======================开始ssh连接=================================")
        tran = paramiko.Transport((hostname, 22))
        tran.connect(username=username, password=password)
        if tran:
            sftp = paramiko.SFTPClient.from_transport(tran)
            res = sftp.put(r"{}".format(zipfilepath), "{}{}".format(remotepath, zipfilename))
            logger.info("本地文件: {}  远程文件: {}{}".format(zipfilepath, remotepath, zipfilename))
            logger.info("sftp.put发送数据返回值: {}".format(res))
            if res:
                logger.info("同步日志: {}".format(zipfilename))
                # 删除本地压缩文件
                # os.popen("del {}".format(zipFileName))
                logger.info("==================开始删除本地压缩文件=========================")
                zipfilepath = zipfilepath.replace('/', "\\")
                logger.info("zipfilepath: {}".format(zipfilepath))
                os.remove(r'{}'.format(zipfilepath))
                shutil.rmtree(r'{}'.format(zipfilepath.strip(".zip")))

    except Exception as er:
        logger.error(er)


def zip_file_removesource(logpath, logger, remotepath):
    try:
        logger.info("========================查找要压缩的文件==============================")
        os.popen("cd {logpath} && dir /a:d /b > tmp.txt".format(logpath=logpath))
        with open("{logpath}\/tmp.txt".format(logpath=logpath)) as f:
            for i in f.readlines():
                if i.startswith((date.today() - timedelta(days=7)).strftime("%Y-%m-%d")):
                    if i.endswith("\n"):
                        i = i.strip('\n')
                    path = r'{logpath}\{filename}'.format(logpath=logpath, filename=i)
                    zipName = i.strip('\n') + ".zip"
                    zipfilepath = zip_file_abspath(path, zipName)
                    logger.info("")
                    if zipfile.is_zipfile(zipfilepath):
                        sync_file(hostname=hostname, username=username, password=password, logger=logger,
                                  remotepath=remotepath, zipfilepath=zipfilepath, zipfilename=zipName)
                    else:
                        print(False)

    except Exception as er:
        logger.error("压缩文件错误")
        logger.error(er)


if __name__ == '__main__':
    # logpath 本地要压缩的目录文件 
    # remotepath 要上传到Linux的路径
    
    # 日志输出路径
    logger = initlog(r"C:\/Users\Administrator\PycharmProjects\py365\Windows")
    
    # 示例
    paylod = [
        {
            "logpath": r'C:\Users\Administrator\PycharmProjects\py365\Windows\spider\download\danke\room_detail',
            "remotepath": "/tmp/spider/download/danke/room_detail/"
        },
        {
            "logpath": r'C:\Users\Administrator\PycharmProjects\py365\Windows\spider\download\danke\room',
            "remotepath": "/tmp/spider/download/danke/room/"
        }
    ]
    for item in paylod:
        zip_file_removesource(logpath=item.get("logpath"), logger=logger, remotepath=item.get("remotepath"))

 

posted @ 2021-02-08 10:55  KubeSec  阅读(185)  评论(0编辑  收藏  举报