依赖
<!-- kafka -->
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
配置
spring:
kafka:
bootstrap-servers: localhost:9092
consumer:
# 消费偏移配置
# none:如果没有为消费者找到先前的offset的值,即没有自动维护偏移量,也没有手动维护偏移量,则抛出异常
# earliest:在各分区下有提交的offset时:从offset处开始消费;在各分区下无提交的offset时:从头开始消费
# latest:在各分区下有提交的offset时:从offset处开始消费;在各分区下无提交的offset时:从最新的数据开始消费
auto:
offset:
reset: earliest
# 自动提交已消费offset时间价格(配置enable-auto-commit=true时使用)
auto-commit-interval: 1000
# 开启自动提交(按周期)已消费offset
enable-auto-commit: true
key-deserializer: org.apache.kafka.common.serialization.StringDeserializer
value-deserializer: org.apache.kafka.common.serialization.StringDeserializer
group-id: defaultGroupId
producer:
retries: 3 # 设置大于0的值,则客户端会将发送失败的记录重新发送
key-serializer: org.apache.kafka.common.serialization.StringSerializer
value-serializer: org.apache.kafka.common.serialization.StringSerializer
消息发送
@Component
@Slf4j
public class KafkaUtils {
@Autowired
private KafkaTemplate kafkaTemplate;
/**
* 发送kafka消息
*
* @param topicName
* @param jsonMessage
*/
public void send(String topicName, String jsonMessage) {
ListenableFuture send = kafkaTemplate.send(topicName, jsonMessage);
}
}
消息接收
@Configuration
@Slf4j
public class Consumer {
@KafkaListener(topics = "austinBusiness")
public void consume(String message) {
log.warn("kafka message:{}", message);
}
}