25.Spring Cloud Sleuth与ELK

1.ELK搭建

关于ELK环境搭建在之前的博客都已经提到了。

Elasticsearch安装Head插件

安装Logstash

安装Kibana

2.项目示例

  2.1添加依赖

 <!--elk依赖  -->
<dependency>
    <groupId>net.logstash.logback</groupId>
    <artifactId>logstash-logback-encoder</artifactId>
<version>4.6</version>
</dependency>
<!--分布式追踪 -->
<dependency>
    <groupId>org.springframework.cloud</groupId>
    <artifactId>spring-cloud-starter-sleuth</artifactId>
</dependency>

 

2.2在src/main.resources新建logback-spring.xml,内容如下:

<?xml version="1.0" encoding="UTF-8"?>
<configuration>
  <include resource="org/springframework/boot/logging/logback/defaults.xml" /><springProperty scope="context" name="springAppName" source="spring.application.name" />
  <!-- Example for logging into the build folder of your project -->
  <property name="LOG_FILE" value="${BUILD_FOLDER:-build}/${springAppName}" /><property name="CONSOLE_LOG_PATTERN"
    value="%clr(%d{yyyy-MM-dd HH:mm:ss.SSS}){faint} %clr(${LOG_LEVEL_PATTERN:-%5p}) %clr([${springAppName:-},%X{X-B3-TraceId:-},%X{X-B3-SpanId:-},%X{X-B3-ParentSpanId:-},%X{X-Span-Export:-}]){yellow} %clr(${PID:- }){magenta} %clr(---){faint} %clr([%15.15t]){faint} %clr(%-40.40logger{39}){cyan} %clr(:){faint} %m%n${LOG_EXCEPTION_CONVERSION_WORD:-%wEx}" />

  <!-- Appender to log to console -->
  <appender name="console" class="ch.qos.logback.core.ConsoleAppender">
    <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
      <!-- Minimum logging level to be presented in the console logs -->
      <level>DEBUG</level>
    </filter>
    <encoder>
      <pattern>${CONSOLE_LOG_PATTERN}</pattern>
      <charset>utf8</charset>
    </encoder>
  </appender>

  <!-- Appender to log to file -->
  <appender name="flatfile" class="ch.qos.logback.core.rolling.RollingFileAppender">
    <file>${LOG_FILE}</file>
    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
      <fileNamePattern>${LOG_FILE}.%d{yyyy-MM-dd}.gz</fileNamePattern>
      <maxHistory>7</maxHistory>
    </rollingPolicy>
    <encoder>
      <pattern>${CONSOLE_LOG_PATTERN}</pattern>
      <charset>utf8</charset>
    </encoder>
  </appender><!-- Appender to log to file in a JSON format -->
  <appender name="logstash" class="ch.qos.logback.core.rolling.RollingFileAppender">
    <file>${LOG_FILE}.json</file>
    <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
      <fileNamePattern>${LOG_FILE}.json.%d{yyyy-MM-dd}.gz</fileNamePattern>
      <maxHistory>7</maxHistory>
    </rollingPolicy>
    <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
      <providers>
        <timestamp>
          <timeZone>UTC</timeZone>
        </timestamp>
        <pattern>
          <pattern>
            {
              "severity": "%level",
              "service": "${springAppName:-}",
              "trace": "%X{X-B3-TraceId:-}",
              "span": "%X{X-B3-SpanId:-}",
              "parent": "%X{X-B3-ParentSpanId:-}",
              "exportable": "%X{X-Span-Export:-}",
              "pid": "${PID:-}",
              "thread": "%thread",
              "class": "%logger{40}",
              "rest": "%message"
            }
          </pattern>
        </pattern>
      </providers>
    </encoder>
  </appender><root level="INFO">
    <appender-ref ref="console" />
    <appender-ref ref="logstash" />
    <!--<appender-ref ref="flatfile"/> -->
  </root>
</configuration>

 

2.3新建bootstrap.properties添加如下内容

#指定微服务的名称后续在调用的时候只需要使用该名称就可以进行服务的访问
spring.application.name=service-provide-trace-elk

由于使用了自定义的logback-spring.xml,并且该文件中含有变量(springAppName),spring.application.name属性必须在bootstrap.properties中设置。

2.4Logstash配置文件,命名为logstash.conf(E:\soft\logstash-6.3.2\bin   在安装目录的bin目录下新建logstash.conf),具体配置如下

input { 
   file {
   type => "server"
    codec =>json
    path  =>"D:\personal_code\workspace_springcloud\918.spring-cloud-service-provide-trace-elk\build\service-provide-trace-elk.json"
  }
 }
 filter {
       grok {
              match => { "message" => "%{TIMESTAMP_ISO8601:timestamp}\s+%{LOGLEVEL:severity}\s+\[%{DATA:service},%{DATA:trace},%{DATA:span},%{DATA:exportable}\]\s+%{DATA:pid}\s+---\s+\[%{DATA:thread}\]\s+%{DATA:class}\s+:\s+%{GREEDYDATA:rest}" }
       }
}
output {
  elasticsearch { 
       hosts => ["localhost:9200"] 
	   index => "trace_test_log"
	   }
}

2.4启动注册中心,服务消费,服务提供者,elasticsearch,elasticsearch-head,logstash,kibana

elasticsearch-head启动页面:

kibana界面

通过消费者,访问服务提供者。可以看到追踪日志在不断的增加。

 

 

  

 微信公众号

 

 

posted @ 2020-01-13 17:13  盲目的拾荒者  阅读(1324)  评论(0编辑  收藏  举报