logback、发至kafka、MDC&traceId

 

logging.level.cn.com.dao=debug 数据库mybatis的sql日志需要设置为debug
logback-spring.xml  debug模式需要全部改为debug
<root level="debug">
<appender-ref ref="CONSOLE"/>
</root>
<logger name="cn.com" level="debug">
<appender-ref ref="insurantFile"/>
</logger>

 

     <dependency>
            <groupId>org.projectlombok</groupId>
            <artifactId>lombok</artifactId>
            <version>1.18.8</version>
        </dependency>
logback-spring.xml 文件
<?xml version="1.0" encoding="UTF-8"?>
<configuration>
    <include resource="org/springframework/boot/logging/logback/base.xml" />
    <property name="log.path" value="F:\\study\\log" />
    <property name="projectname" value="insurance-service" />

    <appender name="CONSOLE" class="ch.qos.logback.core.ConsoleAppender">
        <target>System.out</target>
        <encoder charset="UTF-8">
            <pattern>%d [%thread] %-5p [%c] [%F:%L] [tracesss=%X{traceId}] - %msg%n</pattern>
        </encoder>
    </appender>

    <appender name="errorAppender" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <!-- 过滤日志 -->
        <filter class="ch.qos.logback.classic.filter.ThresholdFilter">
            <level>ERROR</level>
        </filter>
        <file>${log.path}/error/errorAppender.log</file>
        <Prudent>true</Prudent>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${log.path}/error/errorAppender.%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} error [%thread] %level %logger{35} - %msg%n</pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>

    <appender name="insurantFile" class="ch.qos.logback.core.rolling.RollingFileAppender">
        <file>${log.path}/insurant/insurant.log</file>
        <Prudent>true</Prudent>
        <rollingPolicy class="ch.qos.logback.core.rolling.TimeBasedRollingPolicy">
            <fileNamePattern>${log.path}/insurant/insurant.%d{yyyy-MM-dd}.log</fileNamePattern>
        </rollingPolicy>
        <encoder>
            <pattern>%d{yyyy-MM-dd HH:mm:ss.SSS} insurant [%thread] [%X{traceId}] %level [%c] [%F:%L] - %msg%n </pattern>
            <charset>UTF-8</charset>
        </encoder>
    </appender>

    <logger name="cn.com.xmh.controller.TestController" level="INFO">
        <appender-ref ref="insurantFile"/>
    </logger>

    <!--<logger name="org.apache"  level="INFO">-->
        <!--<appender-ref ref="baseFile" />-->
    <!--</logger>-->

    <!-- additivity="false"  若是additivity设为true,则子Logger不止会在自己的appender里输出,还会在root的logger的appender里输出-->
    <root level="INFO">
        <appender-ref ref="CONSOLE"/>
    </root>

</configuration>
lombok插件
@Controller
@RequestMapping("/cos")
@Slf4j
public class TestController {
  log.info("hello1*****************mytestlog");
traceId设置
public class LogMdcFilter implements Filter {
    private static final String UNIQUE_ID = "traceId";
    @Override
    public void init(FilterConfig filterConfig) {
        System.out.println("init**********filter************************************");
    }
    @Override
    public void doFilter(ServletRequest request, ServletResponse response, FilterChain chain) throws IOException, ServletException {
        System.out.println("doFilter**********filter************************************");
        boolean bInsertMDC = insertMDC();
        try {
            chain.doFilter(request, response);
        } finally {
            if(bInsertMDC) {
                MDC.remove(UNIQUE_ID);
            }
        }
    }
    @Override
    public void destroy() {
    }
    private boolean insertMDC() {
        UUID uuid = UUID.randomUUID();
        String uniqueId = uuid.toString().replace("-", "");
        MDC.put(UNIQUE_ID, uniqueId);
        return true;
    }
}

@Configuration
public class MyConfig extends WebMvcConfigurationSupport {
 @Bean
    public FilterRegistrationBean filterRegistrationBean() {

        FilterRegistrationBean registrationBean = new FilterRegistrationBean();
        LogMdcFilter filter = new LogMdcFilter();
        registrationBean.setFilter(filter);

        //设置过滤器拦截请求
        List<String> urls = new ArrayList<>();
        urls.add("/*");
        registrationBean.setUrlPatterns(urls);

        return registrationBean;
    }

异步任务traceId设置
@Aspect
@Component
public class LogMdcAspect {
    private static final String UNIQUE_ID = "traceId";
    @Pointcut("@annotation(org.springframework.scheduling.annotation.Async)")
    public void logPointCut() {
    }
    @Around("logPointCut()")
    public Object around(ProceedingJoinPoint point) throws Throwable {
        MDC.put(UNIQUE_ID, UUID.randomUUID().toString().replace("-",""));
        Object result = point.proceed();// 执行方法
        MDC.remove(UNIQUE_ID);
        return result;
    }
}

  

logback-kafka-appender

 <dependency>
            <groupId>org.springframework.boot</groupId>
            <artifactId>spring-boot-starter-web</artifactId>
            <version>2.6.8</version>
        </dependency>
	 <!-- 下面这五个主要是为了使用com.github.danielwegener.logback.kafka.KafkaAppender 将日志信息同步到kafka中 -->
        <dependency>
            <groupId>com.github.danielwegener</groupId>
            <artifactId>logback-kafka-appender</artifactId>
            <version>0.2.0-RC2</version>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-classic</artifactId>
            <version>1.2.3</version>
        </dependency>
        <dependency>
            <groupId>ch.qos.logback</groupId>
            <artifactId>logback-core</artifactId>
            <version>1.2.3</version>
        </dependency>
        <dependency>
            <groupId>net.logstash.logback</groupId>
            <artifactId>logstash-logback-encoder</artifactId>
            <version>6.4</version>
        </dependency>

    <?xml version="1.0" encoding="UTF-8"?>
    <configuration scan="true" scanPeriod="60 seconds" debug="false">
    <contextName>manage</contextName>
    <springProperty scope="context" name="bootstrapServers" source="spring.kafka.bootstrap-servers"/>
<!-- 输入到kafka -->
    <appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender">
        <encoder class="net.logstash.logback.encoder.LoggingEventCompositeJsonEncoder">
            <providers class="net.logstash.logback.composite.loggingevent.LoggingEventJsonProviders">
                <pattern>
                    <pattern>
                        {
                        "date":"%d{yyyy-MM-dd HH:mm:ss.SSS}",
                        "thread":"%thread",
                        "level":"%level",
                        "logger":"%logger{15}",
                        "line":"%line",
                        "msg": "%msg",
                        "stack_trace":"%exception"
                        }
                    </pattern>
                </pattern>
            </providers>
        </encoder>
        <topic>szdz-xmh-simple-log</topic>
        <keyingStrategy class="com.github.danielwegener.logback.kafka.keying.NoKeyKeyingStrategy"/>
        <deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy"/>
        <!--注意此处应该是spring boot中的kafka配置属性-->
        <producerConfig>bootstrap.servers=${bootstrapServers}</producerConfig>
        <producerConfig>retries=1</producerConfig>    
        <producerConfig>batch.size=16389</producerConfig>
        <producerConfig>buffer.memory=33554432</producerConfig>    
        <producerConfig>properties.max.request.size==2097152</producerConfig>
    </appender>
    <!-- 消息异步输出 -->
    <appender name="ASYNC" class="com.xmh.manage.NoDiscardAsyncAppender">
        <appender-ref ref="kafkaAppender" />
    </appender>

    <root level="info">
        <appender-ref ref="ASYNC"/>
    </root>


public class NoDiscardAsyncAppender extends AsyncAppender {
    public NoDiscardAsyncAppender() {
    }
    protected boolean isDiscardable(ILoggingEvent event) {
        return false;
    }
}

 

posted @ 2021-03-28 16:26  XUMT111  阅读(427)  评论(0编辑  收藏  举报