kafka 入门示例代码
本地搭建好docker kafka环境后,java代码:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 | package org.example; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import java.time.Duration; import java.util.Collections; import java.util.Properties; public class SimpleConsumer { public static void main(String[] args) { String topicName = "test" ; Properties props = new Properties(); props.put( "bootstrap.servers" , "localhost:9092" ); props.put( "group.id" , "test-group" ); props.put( "key.deserializer" , "org.apache.kafka.common.serialization.StringDeserializer" ); props.put( "value.deserializer" , "org.apache.kafka.common.serialization.StringDeserializer" ); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList(topicName)); try { while ( true ) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis( 100 )); for (ConsumerRecord<String, String> record : records) { System.out.printf( "Received message: key = %s, value = %s, partition = %d%n" , record.key(), record.value(), record.partition()); } } } finally { consumer.close(); } } } |
发送消息的代码:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 | package org.example; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import java.util.Properties; public class SimpleProducer { public static void main(String[] args) { String topicName = "test" ; Properties props = new Properties(); props.put( "bootstrap.servers" , "localhost:9092" ); props.put( "key.serializer" , "org.apache.kafka.common.serialization.StringSerializer" ); props.put( "value.serializer" , "org.apache.kafka.common.serialization.StringSerializer" ); Producer<String, String> producer = new KafkaProducer<>(props); try { for ( int i = 0 ; i < 10 ; i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, "key" + i, "message" + i); producer.send(record, (RecordMetadata metadata, Exception exception) -> { if (exception != null ) { exception.printStackTrace(); } else { System.out.println( "Sent message to " + metadata.topic() + " partition: " + metadata.partition() + " with offset: " + metadata.offset()); } }); } } finally { producer.close(); } } } |
为了让 Kafka 生产者使用用户名和密码进行身份验证,你需要在 Kafka 客户端配置中添加一些与 SASL 认证相关的属性。假设你使用的是 SASL/PLAIN 机制,以下是如何修改代码以包含用户名和密码的示例:
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 | package org.example; import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; import java.util.Properties; public class SimpleProducer { public static void main(String[] args) { String topicName = "test" ; Properties props = new Properties(); props.put( "bootstrap.servers" , "localhost:9092" ); props.put( "key.serializer" , "org.apache.kafka.common.serialization.StringSerializer" ); props.put( "value.serializer" , "org.apache.kafka.common.serialization.StringSerializer" ); // SASL 认证相关配置 props.put( "security.protocol" , "SASL_PLAINTEXT" ); // 或者 "SASL_SSL" props.put( "sasl.mechanism" , "PLAIN" ); // SASL 配置包括用户名和密码 String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";" ; String jaasCfg = String.format(jaasTemplate, "your-username" , "your-password" ); props.put( "sasl.jaas.config" , jaasCfg); Producer<String, String> producer = new KafkaProducer<>(props); try { for ( int i = 0 ; i < 10 ; i++) { ProducerRecord<String, String> record = new ProducerRecord<>(topicName, "key" + i, "message" + i); producer.send(record, (RecordMetadata metadata, Exception exception) -> { if (exception != null ) { exception.printStackTrace(); } else { System.out.println( "Sent message to " + metadata.topic() + " partition: " + metadata.partition() + " with offset: " + metadata.offset()); } }); } } finally { producer.close(); } } } |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 | package org.example; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; import java.time.Duration; import java.util.Collections; import java.util.Properties; public class SimpleConsumer { public static void main(String[] args) { String topicName = "test" ; Properties props = new Properties(); props.put( "bootstrap.servers" , "localhost:9092" ); props.put( "group.id" , "test-group" ); props.put( "key.deserializer" , "org.apache.kafka.common.serialization.StringDeserializer" ); props.put( "value.deserializer" , "org.apache.kafka.common.serialization.StringDeserializer" ); // SASL 认证相关配置 props.put( "security.protocol" , "SASL_PLAINTEXT" ); // 或者 "SASL_SSL" props.put( "sasl.mechanism" , "PLAIN" ); // SASL 配置包括用户名和密码 String jaasTemplate = "org.apache.kafka.common.security.plain.PlainLoginModule required username=\"%s\" password=\"%s\";" ; String jaasCfg = String.format(jaasTemplate, "your-username" , "your-password" ); props.put( "sasl.jaas.config" , jaasCfg); KafkaConsumer<String, String> consumer = new KafkaConsumer<>(props); consumer.subscribe(Collections.singletonList(topicName)); try { while ( true ) { ConsumerRecords<String, String> records = consumer.poll(Duration.ofMillis( 100 )); for (ConsumerRecord<String, String> record : records) { System.out.printf( "Received message: key = %s, value = %s, partition = %d%n" , record.key(), record.value(), record.partition()); } } } finally { consumer.close(); } } } |
注意:
- 这里的
security.protocol
可以是SASL_PLAINTEXT
或SASL_SSL
,视你的集群配置而定。 - 需要根据实际情况替换
"your-username"
和"your-password"
。 - 如果你的 Kafka 集群使用了 SSL,你还需要配置 SSL 相关属性,例如
ssl.truststore.location
和ssl.truststore.password
。
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 全程不用写代码,我用AI程序员写了一个飞机大战
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· 记一次.NET内存居高不下排查解决与启示
· 白话解读 Dapr 1.15:你的「微服务管家」又秀新绝活了
· DeepSeek 开源周回顾「GitHub 热点速览」
2019-08-20 如何通过“流量线索”进行恶意程序感染分析
2019-08-20 下载恶意pcap包的网站汇总