自定义logger写入日志到kafka中
1.引入开源组件logback-kafka-appender
<!-- kafka-appender -->
<dependency>
<groupId>com.github.danielwegener</groupId>
<artifactId>logback-kafka-appender</artifactId>
<version>0.2.0-RC2</version>
</dependency>
<dependency>
<groupId>ch.qos.logback</groupId>
<artifactId>logback-classic</artifactId>
</dependency>
2.logback-spring.xml添加核心配置
<!-- kafka的appender -->
<appender name="kafkaAppender" class="com.github.danielwegener.logback.kafka.KafkaAppender">
<encoder class="ch.qos.logback.classic.encoder.PatternLayoutEncoder">
<pattern>%msg%n</pattern>
</encoder>
<topic>${topic.name}</topic>
<keyingStrategy class="com.github.danielwegener.logback.kafka.keying.NoKeyKeyingStrategy" />
<deliveryStrategy class="com.github.danielwegener.logback.kafka.delivery.AsynchronousDeliveryStrategy" />
<producerConfig>retries=3</producerConfig>
<producerConfig>linger.ms=1000</producerConfig>
<producerConfig>bootstrap.servers=${mvn.log.kafkaAppender}</producerConfig>
<producerConfig>client.id=delta</producerConfig>
<producerConfig>compression.type=snappy</producerConfig>
</appender>
<!-- 设置异步appender,入队列方式-->
<appender name="KAFKA-ASYNC" class="ch.qos.logback.classic.AsyncAppender">
<appender-ref ref="kafkaAppender" />
<!-- 设置队列入队时非阻塞,当队列满时会直接丢弃日志。-->
<neverBlock>true</neverBlock>
</appender>
<!-- 将ModifyLog类下的日志发送到kafka -->
<logger name="delta.api.domain.customization.ModifyLog" level="INFO" additivity="false">
<appender-ref ref="KAFKA-ASYNC"/>
</logger>
3.使用
类中引入logger
private Logger operatorLogger = LoggerFactory.getLogger(ModifyLog.class);
类中记录日志
ModifyLog modifyLog = ModifyLog.builder()
.traceId(IdUtil.randomUUID())
.appName(ApplicationRunner.APP_NAME)
.erp(LoginContext.getLoginContext().getPin())
.modifyTime(System.currentTimeMillis())
.entityName(request.getDefinition())
.operate(OperatorType.CREATE.name())
.before(EMPTY)
.after(JsonUtils.toJSONString(request))
.build();
operatorLogger.info(JsonUtils.toJSONString(modifyLog));
原创:做时间的朋友