python收集es信息
环境:
Os:Centos 7
ES:6.8.5
Python:2.7
get_es_stats.py
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
from elasticsearch import Elasticsearch
import os, json, urllib, datetime, shutil
import pymysql
import traceback
import time
gl_mysql_server = "192.168.1.10"
gl_user_name = "hxl"
gl_password = "mysql"
gl_db_name = "db_cmdbtest"
gl_port = 3306
def utc2bjtime(utc_time):
localtime = time.localtime(utc_time/1000)
bj_time = time.strftime("%Y-%m-%d %H:%M:%S", localtime)
return bj_time
def insert_data(data_dict):
db = pymysql.connect(host=gl_mysql_server,user=gl_user_name,password=gl_password,db=gl_db_name,port=gl_port,use_unicode=True,charset="utf8")
# 得到一个可以执行SQL语句的光标对象
cursor = db.cursor()
# db.autocommit(0) ## 关闭字自动提交
# os
ipaddr = data_dict['ipaddr']
st_timestamp = utc2bjtime(data_dict['st_timestamp'])
cpu_percent = data_dict['cpu_percent']
cpu_load_average_1m = data_dict['cpu_load_average_1m']
cpu_load_average_5m = data_dict['cpu_load_average_5m']
cpu_load_average_15m = data_dict['cpu_load_average_15m']
mem_free_in_bytes = data_dict['mem_free_in_bytes']
mem_used_in_bytes = data_dict['mem_used_in_bytes']
mem_free_percent = data_dict['mem_free_percent']
mem_used_percent = data_dict['mem_used_percent']
# query
search_query_total = data_dict['search_query_total']
search_query_time_in_millis = data_dict['search_query_time_in_millis']
search_query_current = data_dict['search_query_current']
search_fetch_total = data_dict['search_fetch_total']
search_fetch_time_in_millis = data_dict['search_fetch_time_in_millis']
search_fetch_current = data_dict['search_fetch_current']
# indexing
indexing_total = data_dict['indexing_total']
indexing_time_in_millis = data_dict['indexing_time_in_millis']
indexing_current = data_dict['indexing_current']
# refresh
refresh_total = data_dict['refresh_total']
refresh_total_time_in_millis = data_dict['refresh_total_time_in_millis']
# io
fs_read_operations = data_dict['fs_read_operations']
fs_write_operations = data_dict['fs_write_operations']
fs_read_kilobytes = data_dict['fs_read_kilobytes']
fs_write_kilobytes = data_dict['fs_write_kilobytes']
# jvm
jvm_heap_used_in_bytes =data_dict['jvm_heap_used_in_bytes']
jvm_heap_used_percent = data_dict['jvm_heap_used_percent']
jvm_heap_committed_in_bytes = data_dict['jvm_heap_committed_in_bytes']
jvm_heap_max_in_bytes = data_dict['jvm_heap_max_in_bytes']
jvm_threads_count = data_dict['jvm_threads_count']
jvm_threads_peak_count = data_dict['jvm_threads_peak_count']
insert_sql = "insert into tb_es_node_stats(ipaddr,st_timestamp,cpu_percent,cpu_load_average_1m,cpu_load_average_5m,cpu_load_average_15m,mem_free_in_bytes,mem_used_in_bytes,mem_free_percent,mem_used_percent," \
"search_query_total, search_query_time_in_millis, search_query_current, search_fetch_total, search_fetch_time_in_millis, search_fetch_current," \
"indexing_total,indexing_time_in_millis,indexing_current," \
"refresh_total,refresh_total_time_in_millis," \
"fs_read_operations,fs_write_operations,fs_read_kilobytes,fs_write_kilobytes," \
"jvm_heap_used_in_bytes,jvm_heap_used_percent,jvm_heap_committed_in_bytes,jvm_heap_max_in_bytes,jvm_threads_count,jvm_threads_peak_count) " \
"values ('%s','%s','%s','%s','%s','%s','%s','%s','%s','%s'," \
"%s,%s,%s,%s,%s,%s," \
"%s,%s,%s," \
"%s,%s," \
"%s,%s,%s,%s," \
"%s,%s,%s,%s,%s,%s)" \
% (ipaddr, st_timestamp, cpu_percent, cpu_load_average_1m, cpu_load_average_5m, cpu_load_average_15m, mem_free_in_bytes, mem_used_in_bytes, mem_free_percent, mem_used_percent,
search_query_total, search_query_time_in_millis, search_query_current, search_fetch_total, search_fetch_time_in_millis, search_fetch_current,
indexing_total, indexing_time_in_millis, indexing_current,
refresh_total,refresh_total_time_in_millis,
fs_read_operations, fs_write_operations, fs_read_kilobytes, fs_write_kilobytes,
jvm_heap_used_in_bytes, jvm_heap_used_percent, jvm_heap_committed_in_bytes, jvm_heap_max_in_bytes,jvm_threads_count, jvm_threads_peak_count)
print(insert_sql)
print(st_timestamp)
try:
# 执行sql语句
cursor.execute(insert_sql)
db.commit()
except Exception as err:
# Rollback in case there is any error
print("sql语句执行错误", err)
db.rollback()
db.commit()
cursor.close()
db.close()
return 0
# 获取nodes各指标数据
def get_nodes_stats():
es_ip = '192.168.1.69'
es_username = 'elastic'
es_passwd = '123'
es_port = 19200
url = 'http://'+str(es_username)+":" + str(es_passwd) + '@' + str(es_ip)+':' + str(es_port)
es = Elasticsearch(
[url], request_timeout=30
)
# 表tb_es_node_stats
a = []
try:
nodes_stats_info = es.nodes.stats()
for node_id, stats in nodes_stats_info['nodes'].items():
# os
tmp_dict = {
# os
'st_timestamp': stats['timestamp'],
'ipaddr': stats['host'],
'cpu_percent': stats['os']['cpu']['percent'],
'cpu_load_average_1m': stats['os']['cpu']['load_average']["1m"],
'cpu_load_average_5m': stats['os']['cpu']['load_average']["5m"],
'cpu_load_average_15m': stats['os']['cpu']['load_average']["15m"],
'mem_free_in_bytes': stats['os']['mem']['free_in_bytes'],
'mem_used_in_bytes': stats['os']['mem']['used_in_bytes'],
'mem_free_percent': stats['os']['mem']['free_percent'],
'mem_used_percent': stats['os']['mem']['used_percent'],
# query
'search_query_total': stats['indices']['search']['query_total'],
'search_query_time_in_millis': stats['indices']['search']['query_time_in_millis'],
'search_query_current': stats['indices']['search']['query_current'],
'search_fetch_total': stats['indices']['search']['fetch_total'],
'search_fetch_time_in_millis': stats['indices']['search']['fetch_time_in_millis'],
'search_fetch_current': stats['indices']['search']['fetch_current'],
# indexing
'indexing_total': stats['indices']['indexing']['index_total'],
'indexing_time_in_millis': stats['indices']['indexing']['index_time_in_millis'],
'indexing_current': stats['indices']['indexing']['index_current'],
# refresh
'refresh_total': stats['indices']['refresh']['total'],
'refresh_total_time_in_millis': stats['indices']['refresh']['total_time_in_millis'],
# io
'fs_read_operations' : stats['fs']['io_stats']['total']['read_operations'],
'fs_write_operations' : stats['fs']['io_stats']['total']['write_operations'],
'fs_read_kilobytes' : stats['fs']['io_stats']['total']['read_kilobytes'],
'fs_write_kilobytes' : stats['fs']['io_stats']['total']['write_kilobytes'],
# jvm
'jvm_heap_used_in_bytes' : stats['jvm']['mem']['heap_used_in_bytes'],
'jvm_heap_used_percent' : stats['jvm']['mem']['heap_used_percent'],
'jvm_heap_committed_in_bytes' : stats['jvm']['mem']['heap_committed_in_bytes'],
'jvm_heap_max_in_bytes' : stats['jvm']['mem']['heap_max_in_bytes'],
'jvm_threads_count' : stats['jvm']['threads']['count'],
'jvm_threads_peak_count' : stats['jvm']['threads']['peak_count']
}
insert_data(tmp_dict)
# query
#search_query_total = stats['indices']['search']['query_total'],
#search_query_time_in_millis = stats['indices']['search']['query_time_in_millis'],
#search_query_current = stats['indices']['search']['query_current'],
#search_fetch_total = stats['indices']['search']['fetch_total'],
#search_fetch_time_in_millis = stats['indices']['search']['fetch_time_in_millis'],
#search_fetch_current = stats['indices']['search']['fetch_current'],
# indexing
#indexing_total = stats['indices']['indexing']['index_total'],
#indexing_time_in_millis = stats['indices']['indexing']['index_time_in_millis'],
#indexing_current = stats['indices']['indexing']['index_current'],
# refresh
#refresh_total = stats['indices']['refresh']['total'],
#refresh_total_time_in_millis = stats['indices']['refresh']['total_time_in_millis'],
# io
#fs_read_operations = stats['fs']['io_stats']['total']['read_operations'],
#fs_write_operations = stats['fs']['io_stats']['total']['write_operations'],
#fs_read_kilobytes = stats['fs']['io_stats']['total']['read_kilobytes'],
#fs_write_kilobytes = stats['fs']['io_stats']['total']['write_kilobytes'],
# jvm
#jvm_heap_used_in_bytes = stats['jvm']['mem']['heap_used_in_bytes'],
#jvm_heap_used_percent = stats['jvm']['mem']['heap_used_percent'],
#jvm_heap_committed_in_bytes = stats['jvm']['mem']['heap_committed_in_bytes'],
#jvm_heap_max_in_bytes = stats['jvm']['mem']['heap_max_in_bytes'],
#jvm_threads_count = stats['jvm']['threads']['count'],
#jvm_threads_peak_count = stats['jvm']['threads']['peak_count']
except Exception as e:
print(e)
if __name__ == '__main__':
get_nodes_stats()
run_get_es_stats.sh
#!/bin/bash
/usr/bin/python /home/yeemiao/scripts/es/get_es_stats.py
crontab:
##收集es信息,每20秒收集一次
* * * * * /home/yeemiao/scripts/es/run_get_es_stats.sh
* * * * * sleep 20; /home/yeemiao/scripts/es/run_get_es_stats.sh
* * * * * sleep 40; /home/yeemiao/scripts/es/run_get_es_stats.sh
程序需要的依赖
pip install pymysql
pip install elasticsearch==6.8.2