SpringBoot---Kafka
1、实战
1 2 3 4 5 6 7 8 9 10 11 12 13 | <!-- https: //mvnrepository.com/artifact/org.apache.kafka/kafka --> <dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka_2. 12 </artifactId> <version> 2.3 . 1 </version> </dependency> <!-- https: //mvnrepository.com/artifact/org.springframework.cloud/spring-cloud-starter-stream-kafka --> <dependency> <groupId>org.springframework.cloud</groupId> <artifactId>spring-cloud-starter-stream-kafka</artifactId> <version> 3.0 . 0 .RELEASE</version> </dependency> |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 | package com.an.kafka.config; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.ProducerFactory; import java.util.HashMap; import java.util.Map; /** * @description: 生产者配置 * @author: anpeiyong * @date: Created in 2019/12/12 14:44 * @since: */ @Configuration @EnableKafka public class KafkaProducerConfig { @Bean public ProducerFactory producerFactory() { return new DefaultKafkaProducerFactory(producerConfigs()); } @Bean public Map producerConfigs() { Map props = new HashMap(); //Kafka生产者配置 props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.8.172:9092" ); props.put(ProducerConfig.RETRIES_CONFIG, 0 ); props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384 ); props.put(ProducerConfig.LINGER_MS_CONFIG, 1 ); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432 ); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer. class ); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer. class ); return props; } @Bean public KafkaTemplate kafkaTemplate() { return new KafkaTemplate(producerFactory()); } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 | package com.an.kafka.producer; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.web.bind.annotation.GetMapping; import org.springframework.web.bind.annotation.PathVariable; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RestController; /** * @description: Kafka生产者 * @author: anpeiyong * @date: Created in 2019/12/12 14:54 * @since: */ @RestController @RequestMapping ( "kafkaProducer" ) public class KafkaProducer { @Autowired private KafkaTemplate kafkaTemplate; @GetMapping ( "send/{msg}" ) public String send( @PathVariable ( "msg" ) String msg){ kafkaTemplate.send( "an" , "antest" , msg); //topic_name是指发送信息的目标主题 return "success" ; } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 | package com.an.kafka.config; import org.apache.kafka.clients.consumer.ConsumerConfig; import org.apache.kafka.common.serialization.StringDeserializer; import org.springframework.boot.autoconfigure.kafka.KafkaProperties; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory; import org.springframework.kafka.core.ConsumerFactory; import org.springframework.kafka.core.DefaultKafkaConsumerFactory; import java.util.HashMap; import java.util.Map; /** * @description: 消费者配置 * @author: anpeiyong * @date: Created in 2019/12/12 15:06 * @since: */ @Configuration @EnableKafka public class KafkaConsumerConfig { @Bean public ConcurrentKafkaListenerContainerFactory kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); return factory; } @Bean public ConsumerFactory consumerFactory() { return new DefaultKafkaConsumerFactory<>(consumerConfigs()); } @Bean public Map consumerConfigs() { Map props = new HashMap<>(); //消费者参数设置 props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "192.168.8.172:9092" ); props.put(ConsumerConfig.GROUP_ID_CONFIG, "test-consumer-group" ); props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true ); props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "1000" ); props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000" ); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer. class ); return props; } @Bean public KafkaProperties.Listener listener() { return new KafkaProperties.Listener(); } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 | package com.an.kafka.consumer; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.springframework.kafka.annotation.KafkaListener; import org.springframework.stereotype.Component; /** * @description: kafka消费者 * @author: anpeiyong * @date: Created in 2019/12/12 15:10 * @since: */ @Component public class KafkaConsumer { @KafkaListener (topics = "an" ) public void listen (ConsumerRecord record){ System.out.printf( "topic = %s, offset = %d, value = %s \n" , record.topic(), record.offset(), record.value()); } } |
1 2 3 | 结果: topic = an, offset = 0 , value = hello |
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· AI与.NET技术实操系列(二):开始使用ML.NET
· 记一次.NET内存居高不下排查解决与启示
· 探究高空视频全景AR技术的实现原理
· 理解Rust引用及其生命周期标识(上)
· 浏览器原生「磁吸」效果!Anchor Positioning 锚点定位神器解析
· DeepSeek 开源周回顾「GitHub 热点速览」
· 物流快递公司核心技术能力-地址解析分单基础技术分享
· .NET 10首个预览版发布:重大改进与新特性概览!
· AI与.NET技术实操系列(二):开始使用ML.NET
· .NET10 - 预览版1新功能体验(一)