Kafka
Pom
<dependency> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> <version>0.11.0.0</version> </dependency>
生产者
Properties properties = new Properties(); // properties.put("bootstrap.servers", // "192.168.2.200:9092,192.168.2.157:9092,192.168.2.233:9092,192.168.2.194:9092,192.168.2.122:9092"); // properties.put("bootstrap.servers", // "192.168.2.200:9092,192.168.2.233:9092,192.168.2.122:9092"); properties.put("bootstrap.servers", "127.0.0.1:9092"); properties.put("acks", "all"); properties.put("retries", 3); properties.put("batch.size", 16384); properties.put("linger.ms", 1); properties.put("buffer.memory", 33554432); properties.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); properties.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); Producer<String, String> producer = null; Gson gson = new Gson(); try { producer = new KafkaProducer<String, String>(properties); for (int i = 0; i < 2; i++) {// topID无所谓 producer.send(new ProducerRecord<String, String>("test1", gson.toJson(new obj("test", i + "age")))); } } catch (Exception e) { e.printStackTrace(); } finally { producer.close(); } }
消费者
Properties properties = new Properties(); // properties.put("bootstrap.servers", "192.168.2.200:9092,192.168.2.157:9092,192.168.2.233:9092,192.168.2.194:9092,192.168.2.122:9092"); // properties.put("bootstrap.servers", "192.168.2.200:9092,192.168.2.233:9092,192.168.2.122:9092"); properties.put("bootstrap.servers", "127.0.0.1:9092"); properties.put("group.id", "GroupB"); // properties.put("zookeeper.connect", "192.168.2.200:2181,192.168.2.157:2181,192.168.2.233:2181,192.168.2.194:2181,192.168.2.122:2181"); // properties.put("zookeeper.connect", "192.168.2.200:2181,192.168.2.233:2181,192.168.2.122:2181"); properties.put("zookeeper.connect", "127.0.0.1:2181"); properties.put("zookeeper.session.timeout.ms", "40000"); properties.put("zookeeper.sync.time.ms", "20000"); properties.put("auto.commit.interval.ms", "100000"); properties.put("key.deserializer","org.apache.kafka.common.serialization.StringDeserializer"); properties.put("value.deserializer","org.apache.kafka.common.serialization.StringDeserializer"); KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(properties); consumer.subscribe(Arrays.asList("test1")); while(true) { ConsumerRecords<String, String> records = consumer.poll(100); for (ConsumerRecord<String,String> consumerRecord : records) { System.out.println(consumerRecord.offset()+"\t"+consumerRecord.key()+"\t"+consumerRecord.value()); } } }
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 记一次.NET内存居高不下排查解决与启示
· 探究高空视频全景AR技术的实现原理
· 理解Rust引用及其生命周期标识(上)
· 浏览器原生「磁吸」效果!Anchor Positioning 锚点定位神器解析
· 没有源码,如何修改代码逻辑?
· 全程不用写代码,我用AI程序员写了一个飞机大战
· DeepSeek 开源周回顾「GitHub 热点速览」
· 记一次.NET内存居高不下排查解决与启示
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· .NET10 - 预览版1新功能体验(一)