1. 开启进程
| [node123]systemctl stop firewalld |
| |
| [node123]zkServer.sh start |
| |
| [node123]kafka-server-start.sh /opt/app/kafka-0.11.0.0/config/server.properties & |
| |
| [node1]kafka-console-consumer.sh --zookeeper node1:2181 --topic demo |
2. java导入依赖
| <?xml version="1.0" encoding="UTF-8"?> |
| <project xmlns="http://maven.apache.org/POM/4.0.0" |
| xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" |
| xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> |
| <modelVersion>4.0.0</modelVersion> |
| |
| <groupId>org.example</groupId> |
| <artifactId>kafkacode</artifactId> |
| <version>1.0-SNAPSHOT</version> |
| |
| <dependencies> |
| <dependency> |
| <groupId>org.apache.kafka</groupId> |
| <artifactId>kafka-clients</artifactId> |
| <version>0.11.0.0</version> |
| </dependency> |
| |
| <dependency> |
| <groupId>org.apache.kafka</groupId> |
| <artifactId>kafka_2.12</artifactId> |
| <version>0.11.0.0</version> |
| </dependency> |
| </dependencies> |
| </project> |
3. java代码
消费者消费带key值的数据
| package new_consumer; |
| |
| import org.apache.kafka.clients.consumer.ConsumerRecord; |
| import org.apache.kafka.clients.consumer.ConsumerRecords; |
| import org.apache.kafka.clients.consumer.KafkaConsumer; |
| import java.util.Arrays; |
| import java.util.Properties; |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| public class ConsumerSelf { |
| public static void main(String[] args) { |
| Properties prop = new Properties(); |
| |
| prop.setProperty("bootstrap.servers","192.168.200.111:9092,192.168.200.112:9092,192.168.200.113:9092"); |
| |
| prop.setProperty("group.id", "uek"); |
| |
| prop.put("enable.auto.commit", "true"); |
| |
| prop.put("auto.commit.interval.ms", "1000"); |
| |
| prop.put("key.deserializer", "org.apache.kafka.common.serialization.IntegerDeserializer"); |
| |
| prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); |
| |
| |
| KafkaConsumer<Integer, String> consumer = new KafkaConsumer<Integer, String>(prop); |
| consumer.subscribe(Arrays.asList("demo")); |
| |
| while (true) { |
| ConsumerRecords<Integer, String> records = consumer.poll(2000); |
| for (ConsumerRecord<Integer, String> record: records) { |
| System.out.println("当前消息的key值为" + record.key()); |
| System.out.println("当前消息的value值为" + record.value()); |
| System.out.println("当前消息的分区为" + record.partition()); |
| } |
| } |
| } |
| } |
生产者生产带key值的数据
| package new_partition_pro; |
| |
| import org.apache.kafka.clients.producer.Callback; |
| import org.apache.kafka.clients.producer.KafkaProducer; |
| import org.apache.kafka.clients.producer.ProducerRecord; |
| import org.apache.kafka.clients.producer.RecordMetadata; |
| import java.util.Properties; |
| |
| |
| public class NewProducerPartitionAndCallBack { |
| public static void main(String[] args) { |
| |
| Properties prop = new Properties(); |
| prop.put("bootstrap.servers", "192.168.200.111:9092,192.168.200.112:9092,192.168.200.113:9092"); |
| |
| prop.put("key.serializer", "org.apache.kafka.common.serialization.IntegerSerializer"); |
| |
| prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); |
| |
| |
| |
| |
| prop.put("partitioner.class","new_partition_pro.CustomPartition"); |
| |
| |
| KafkaProducer<Integer, String> producer = new KafkaProducer<Integer, String>(prop); |
| |
| |
| for (int i = 0; i < 100; i++) { |
| ProducerRecord<Integer, String> record = new ProducerRecord<Integer, String>("demo", i, "hello" + i); |
| |
| final int finalI = i; |
| |
| |
| |
| |
| |
| |
| producer.send(record, new Callback() { |
| public void onCompletion(RecordMetadata recordMetadata, Exception e) { |
| System.out.println("当前这个数据的分区为:"+recordMetadata.partition() + "---offset:" + recordMetadata.offset()); |
| System.out.println("当前的key为:" + finalI); |
| } |
| }); |
| } |
| |
| |
| |
| |
| producer.flush(); |
| } |
| } |
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 无需6万激活码!GitHub神秘组织3小时极速复刻Manus,手把手教你使用OpenManus搭建本
· C#/.NET/.NET Core优秀项目和框架2025年2月简报
· 葡萄城 AI 搜索升级:DeepSeek 加持,客户体验更智能
· 什么是nginx的强缓存和协商缓存
· 一文读懂知识蒸馏