MySQL Binlog信息查看
查看Binlog相关信息
##=====================================## ## 在MySQL内部查看binlog文件列表 ## SHOW BINARY LOGS; ##=====================================## ##查看某个binglog文件中特定pos的操作 SHOW BINLOG EVENTS IN 'mysql-bin.000011' FROM 4742885 LIMIT 15; ##=====================================## ## 使用mysqlbinlog查看binlog ## ## 按时间过滤--start-datetime --stop-datetime,过滤时间格式为'2004-12-25 11:25:56' ## 按位置点过滤--start-position --stop-position,如果不能提供准确的pos值,则会报错 ## 按照GTID过滤--include-gtids include-gtids /export/servers/mysql/bin/mysqlbinlog -vv /export/data/mysql/data/mysql-bin.008735 /export/servers/mysql/bin/mysqlbinlog -vv /export/data/mysql/data/mysql-bin.008735 --start-datetime='2017-01-01 00:00:00' --stop-datetime='2017-01-02 00:00:00' /export/servers/mysql/bin/mysqlbinlog -vv /export/data/mysql/data/mysql-bin.008735 --start-position=194 --stop-position=201 /export/servers/mysql/bin/mysqlbinlog -vv /export/data/mysql/data/mysql-bin.008735 --include-gtids="2aa60248-d8cf-11e8-a5c5-fa1622b12630:22652-22659" ##=====================================## ## 查看binlog 文件大小和最后修改时间 ## ll -h --time-style='+%Y-%m-%d %H:%M:%S' /export/data/mysql/data/mysql-bin*
解析Binlog获取操作频率:
/export/servers/mysql/bin/mysqlbinlog --no-defaults --base64-output=decode-rows -vvv mysql-bin.001282 \ | awk '/###/ {if($0~/UPDATE|INSERT|DELETE/)count[$2" "$NF]++}END{for(i in count) print i,"\t",count[i]}' \ | column -t | sort -k3nr
对解析出的SQL文件进行解析,计算每张表操作次数和每秒操作次数:
1、先使用mysqlbinlog解析文件:
/export/servers/mysql/bin/mysqlbinlog --no-defaults --base64-output=decode-rows -vvv mysql-bin.001282 >/tmp/001282.sql
2、修改下面python文件中main部分的sql文件地址,然后执行
#!/usr/bin/env python # -*- coding: utf-8 -*- # ============================================================================= # FileName: # Desc: # Author: # Email: # HomePage: # Version: # LastChange: # History: # ============================================================================= import os def write_file(file_path, file_content): file_handler = open(file_path, "w") file_handler.write(file_content) file_handler.close() def get_table_summary(dump_sql_path): file_handler = open(dump_sql_path) table_dict = dict() for file_line in file_handler: if file_line.find("Table_map: ") > 0: start_index = file_line.index("Table_map:") + len("Table_map: ") end_index = file_line.index("mapped to", start_index) table_name = file_line[start_index:end_index].strip() if table_name in table_dict.keys(): table_dict[table_name] += 1 else: table_dict[table_name] = 1 result_list = list() result_list.append("##=============按照表访问次数排序====================##") sorted_table_dict = sorted(table_dict.items(), key=lambda x: int(x[1]), reverse=True) for item in sorted_table_dict: result_list.append("{0}======================{1}".format(item[0], item[1])) result_list.append("##=============================================##") result_file = os.path.join(os.path.dirname(__file__), "binlog_table_summary.txt") write_file(result_file, "\n".join(result_list)) def get_binlog_time_summary(dump_sql_path): file_handler = open(dump_sql_path) datetime_dict = dict() for file_line in file_handler: if file_line.find("Table_map: ") > 0: start_index = 1 end_index = file_line.index("server id", start_index) current_time = file_line[start_index:end_index].strip() if current_time in datetime_dict.keys(): datetime_dict[current_time] += 1 else: datetime_dict[current_time] = 1 result_list = list() result_list.append("##=============按照时间访问排序====================##") sorted_time_dict = sorted(datetime_dict.items(), key=lambda x: x[0], reverse=True) for item in sorted_time_dict: result_list.append("{0}======================{1}".format(item[0], item[1])) result_list.append("##=============================================##") result_file = os.path.join(os.path.dirname(__file__), "binlog_time_summary.txt") write_file(result_file, "\n".join(result_list)) if __name__ == '__main__': sql_path = "/tmp/001282.sql" print("正在解析数据文件{0},获取每表访问次数".format(sql_path)) get_table_summary(dump_sql_path=sql_path) print("正在解析数据文件{0},获取每秒访问次数".format(sql_path)) get_binlog_time_summary(dump_sql_path=sql_path) print("解析完成")