安装elk日志监控系统
下载安装包稳定版本
elasticsearch-7.17.10-linux-x86_64.tar.gz # https://www.elastic.co/downloads/past-releases/elasticsearch-7-17-10 logstash-7.17.10-linux-x86_64.tar.gz # https://www.elastic.co/downloads/past-releases/logstash-7-17-10 kibana-7.17.10-linux-x86_64.tar.gz # https://www.elastic.co/downloads/past-releases/kibana-7-17-10 jdk1.8.0_351.tar.gz #jdk8版本以上
172.16.1.12:server端安装elasticsearch、kibana、jdk
172.16.1.2:agent端安装logstash、jdk ,每个主机都需要安装一个logstash
1、安装jdk
在172.16.1.12上执行
# tar zxvf jdk1.8.0_351.tar.gz -C /usr/local/ #解压 # vim /etc/profile #系统环境变量 export JAVA_HOME=/usr/local/jdk1.8.0_351 export JAVA_HOME=$JAVA_HOME/jre export PATH=$PATH:$JAVA_HOME/bin export CLASSPATH=.:$JAVA_HOME/lib/dt.jar:JAVA_HOME/lib/tools.jar # source /etc/profile #刷新加载
# java -version #验证
openjdk version "1.8.0_351"
OpenJDK Runtime Environment (build 1.8.0_351-b08)
OpenJDK 64-Bit Server VM (build 25.351-b08, mixed mode)
2、安装es
mkdir /usr/local/elk #创建目录,方便管理 tar -zxvf elasticsearch-7.17.10-linux-x86_64.tar.gz -C /usr/local/elk #解压 cd /usr/local/elk #进入 mv elasticsearch-7.17.10 elasticsearch #重命名
adduser es #创建用户
echo 1234 |passwd --stdin es #更改密码
mkdir /usr/local/elk/elasticsearch/data #创建数据存放目录
chown -R es:es /usr/local/elk/elasticsearch #将ES所解压的目录授予此对应的用户
centos系统配置
编辑vi /etc/security/limits.conf,追加以下内容;# 设置当前ES用户的最大文件数(这里也可以使用*,表示所有的用户)
echo "elasticsearch soft nofile 65536" >> /etc/security/limits.conf echo "elasticsearch hard nofile 65536" >> /etc/security/limits.conf
vi /etc/sysctl.conf
# 修改下述配置, 如果没有就在文件末尾添加:
echo "vm.max_map_count=655360" >> /etc/sysctl.conf
sysctl -p
修改配置文件elasticsearch.yml
vim /usr/local/elk/elasticsearch/config/elasticsearch.yml添加以下内容:
cluster.name: my-application node.name: node_01 path.data: /usr/local/elk/elasticsearch/data #数据目录 path.logs: /usr/local/elk/elasticsearch/logs #日志目录 network.host: 172.16.1.12 #es地址 http.port: 20003 #启动端口 bootstrap.system_call_filter: false bootstrap.memory_lock: false cluster.initial_master_nodes: ["node_01"]
添加es服务
vim /usr/lib/systemd/system/elasticsearch.service
[Unit] Description=elasticsearch After=network.target [Service] Type=forking #启动用户 User=es #jdk位置 Environment="PATH=/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin:/usr/local/jdk1.8.0_351/bin" #启动指令 ExecStart=/usr/local/elk/elasticsearch/bin/elasticsearch -d PrivateTmp=true # 指定此进程可以打开的最大文件数 LimitNOFILE=65536 # 指定此进程可以打开的最大进程数 LimitNPROC=65536 # 最大虚拟内存 LimitAS=infinity # 最大文件大小 LimitFSIZE=infinity # 超时设置 0-永不超时 TimeoutStopSec=0 # SIGTERM是停止java进程的信号 KillSignal=SIGTERM # 信号只发送给给JVM KillMode=process # java进程不会被杀掉 SendSIGKILL=no # 正常退出状态 SuccessExitStatus=143 [Install] WantedBy=multi-user.target
启动es
systemctl daemon-reload #重新加载
systemctl enable elasticsearch.service #开机自启动
systemctl start elasticsearch.service #启动服务
systemctl status elasticsearch.service #查看服务状态
查看端口,进程
netstat -tunlp |grep 20003 tcp6 0 0 172.16.1.12:20003 :::* LISTEN 26703/java
3、安装 logstash
在被收集端安装logstash,提前安装jdk这里省略
上传,解压
tar -zxvf logstash-7.17.10-linux-x86_64.tar.gz -C /usr/local/elk/ #解压
cd /usr/local/elk/ #进入
mv logstash-7.17.10 logstash #重命名
创建日志搜索配置文件
vim /usr/local/elk/logstash/bin/config.conf
input { file { path => "/var/log/messages" #日志目录 type => "messages_log_172.16.1.2" #自定义名称 start_position => "beginning" #默认 } file { path => "/var/log/secure" #日志目录 type => "secure_log_172.16.1.2" #自定义名称 start_position => "beginning" }
} output { if [type] == "messages_log_172.16.1.2"{ elasticsearch { hosts => ["172.16.1.12:20003"] #es的地址 index => "messages_log_172.16.1.2-%{+YYYY.MM.dd}" } } if [type] == "secure_log_172.16.1.2"{ elasticsearch { hosts => ["172.16.1.12:20003"] #es的地址
index => "secure_log_172.16.1.2-%{+YYYY.MM.dd}" } } }
启动
vim /usr/local/logstash/bin/startup.sh #启动脚本
nohup /usr/local/logstash/bin/logstash -f config.conf >/dev/null 2>&1 &
chmod +x /usr/local/logstash/bin/startup.sh #添加支持权限 sh /usr/local/logstash/bin/startup.sh #启动
查看进程
# ps aux |grep logstash root 4642 0.0 0.0 112824 988 pts/1 S+ 17:05 0:00 grep --color=auto logstash root 30439 1.8 1.8 10243832 2434084 ? Sl 5月16 27:13 /usr/local/elk/logstash/jdk/bin/java -Xms1g -Xmx1g -XX:+UseConcMarkSweepGC -XX:CMSInitiatingOccupancyFraction=75 -XX:+UseCMSInitiatingOccupancyOnly -Djava.awt.headless=true -Dfile.encoding=UTF-8 -Djdk.io.File.enableADS=true -Djruby.compile.invokedynamic=true -Djruby.jit.threshold=0 -Djruby.regexp.interruptible=true -XX:+HeapDumpOnOutOfMemoryError -Djava.security.egd=file:/dev/urandom -Dlog4j2.isThreadContextMapInheritable=true -cp /usr/local/elk/logstash/logstas-core/lib/jars/animal-sniffer-annotations-1.14.jar:/usr/local/elk/logstash/logstash-core/lib/jars/checker-compat-qual-2.0.0.jar:/usr/local/elk/logstashlogstash-core/lib/jars/commons-codec-1.14.jar:/usr/local/elk/logstash/logstash-core/lib/jars/commons-compiler-3.1.0.jar:/usr/local/elk/logstash/logstas-core/lib/jars/commons-logging-1.2.jar:/usr/local/elk/logstash/logstash-core/lib/jars/error_prone_annotations-2.1.3.jar:/usr/local/elk/logstash/logstas-core/lib/jars/google-java-format-1.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/guava-24.1.1-jre.jar:/usr/local/elk/logstash/logstash-core/lib/jars/j2objc-annotations-1.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jackson-annotations-2.9.10.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jackson-core-2.9.10.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jackson-databind-2.9.10.8.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jackson-dataformat-cbor-2.9.10.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jackson-dataformat-yaml-2.9.10.jar:/usr/local/elk/logstash/logstash-core/lib/jars/janino-3.1.0.jar:/usr/local/elk/logstash/logstash-core/lib/jars/javassist-3.26.0-GA.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jruby-complete-9.2.20.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/jsr305-1.3.9.jar:/usr/local/elk/logstash/logstash-core/lib/jars/log4j-1.2-api-2.17.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/log4j-api-2.17.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/log4j-core-2.17.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/log4j-jcl-2.17.1.jar:/usr/local/elklogstash/logstash-core/lib/jars/log4j-slf4j-impl-2.17.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/logstash-core.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.commands-3.6.0.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.contenttype-3.4.100.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.expressions-3.4.300.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.filesystem-1.3.100.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.jobs-3.5.100.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.resources-3.7.100.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.core.runtime-3.7.0.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.equinox.app-1.3.100.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.equinox.common-3.6.0.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.equinox.preferences-3.4.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.equinox.registry-3.5.101.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.jdt.core-3.10.0.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.osgi-3.7.1.jar:/usr/local/elk/logstash/logstash-core/lib/jars/org.eclipse.text-3.5.101.jar:/usr/local/elk/logstash/logstash-core/lib/jars/reflections-0.9.11.jar:/usr/local/elk/logstash/logstash-core/lib/jars/slf4j-api-1.7.30.jar:/usr/local/elk/logstash/logstash-core/lib/jars/snakeyaml-1.33.jar org.logstash.Logstash -f config.conf
4、安装kibana
解压
tar zxvf kibana-7.17.10-linux-x86_64.tar.gz -C /usr/local/elk #解压 cd /usr/local/elk #进入 mv kibana-7.17.10 kibana #重命名
编辑配置文件
vim /usr/local/elk/kibana/config/kibana.yml
server.port: 20006 #启动端口 server.host: "172.16.1.12" #kibana地址 elasticsearch.hosts: ["http://172.16.1.12:20003"] #es地址 kibana.index: ".kibana" #默认 i18n.locale: "zh-CN" #开启中文
wq 保存退出
启动kibana
vim /usr/local/elk/kibana/bin/startup.sh
nohup /usr/local/elk/kibana/bin/kibana --allow-root &
chmod +x /usr/local/elk/kibana/bin/startup.sh #添加启动脚本 sh /usr/local/elk/kibana/bin/startup.sh #启动
检查端口
netstat -tunlp |grep 20006 tcp 0 0 172.16.1.12:20006 0.0.0.0:* LISTEN 30710/node
到这里,3个组件都正常启动的情况下,可以访问kibana页面啦
http://172.16.1.12:20006/
打开索引模式
左上角菜单--Management--Stack Management--Kibana--索引模式
创建索引模式
输入索引名称,匹配具体日志
输入具体的名称后,可以匹配到某个日志,注意:输入的名称需与中括号及中括号内的内容完全匹配,“下一步”按钮才能被点亮,否则无法进入下一步,后半部分日期部分可以不用输入
选择一个时间字段,如果日志数据中本身没有,可以使用@timestamp
进入日志面板
左上角菜单-Kibana-Discover,进入Kibana日志面板
Kibana日志面板使用
左上角菜单--Kibana--Discover,日志面板中:
- 可以切换索引来查看不同服务的日志
- 可以根据时间段筛选日志
- 可以自定义日志列表字段
- 可以通过Kibana特有的KSQL检索日志