案例要求
| /** |
| * 要求将控制台输入的每一行数据发送到Kafka中进行存储 |
| * 输入的每一行数据形式如下 |
| * s001 zs 20 男 |
| * s002 ls 21 女 |
| * 要求将数据发送到我们的kafka的student主题中 同时要求发送的数据以学生编号为key,以学生信息为value形式进行数据方法 |
| * 同时要求student主题一共两个分区,其中编号0号分区存放性别为男的数据 编号1号分区存放性别为女的数据 |
| * 然后创建消费者去消费我们的Kafka主题中数据 |
| * 消费两个分区的数据即可 |
| * |
| * 生产者的要求: |
| * 1、生产者发送的数据是在控制台键盘输入的数据 |
| * 2、生产者在发送数据的时候自定义分区机制 |
| * 3、生产者生产的数据键值对类型的数据,以学生编号为key 以学生信息为value的数据 |
| * |
| * 消费者的要求: |
| * 正常能消费Kafka的student主题中的数据即可 |
| */ |
| |
生产者
| package practice; |
| |
| import org.apache.kafka.clients.producer.Callback; |
| import org.apache.kafka.clients.producer.KafkaProducer; |
| import org.apache.kafka.clients.producer.ProducerRecord; |
| import org.apache.kafka.clients.producer.RecordMetadata; |
| |
| import java.io.BufferedReader; |
| import java.io.IOException; |
| import java.io.InputStreamReader; |
| import java.util.Properties; |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| public class Producer { |
| public static void main(String[] args) { |
| |
| Properties prop = new Properties(); |
| prop.put("bootstrap.servers", "192.168.200.111:9092,192.168.200.112:9092,192.168.200.113:9092"); |
| |
| prop.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); |
| |
| prop.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); |
| |
| |
| |
| |
| prop.put("partitioner.class","practice.CustomPartition"); |
| |
| |
| KafkaProducer<String, String> producer = new KafkaProducer<String, String>(prop); |
| |
| |
| BufferedReader reader = new BufferedReader(new InputStreamReader(System.in)); |
| String line = null; |
| try { |
| while ((line = reader.readLine()) != null) { |
| System.out.println(line); |
| |
| String[] str = line.split(" "); |
| final String sno = str[0]; |
| ProducerRecord<String, String> record = new ProducerRecord<String, String>("demo", sno, line); |
| producer.send(record, new Callback() { |
| public void onCompletion(RecordMetadata recordMetadata, Exception e) { |
| System.out.println("当前这个数据的分区为:"+recordMetadata.partition() + "---offset:" + recordMetadata.offset()); |
| System.out.println("当前的key为:" + sno + "-----主题为:" + recordMetadata.topic()); |
| } |
| }); |
| } |
| } catch (IOException e) { |
| e.printStackTrace(); |
| } |
| } |
| } |
自定义分区
| package practice; |
| |
| import org.apache.kafka.clients.producer.Partitioner; |
| import org.apache.kafka.common.Cluster; |
| |
| import java.util.Map; |
| |
| public class CustomPartition implements Partitioner { |
| public int partition(String s, Object key, byte[] bytes, Object value, byte[] bytes1, Cluster cluster) { |
| String s1 = value.toString(); |
| String[] s2 = s1.split(" "); |
| String sex = s2[s2.length-1]; |
| |
| if (sex == "男") { |
| return 0; |
| } else { |
| return 1; |
| } |
| } |
| |
| public void close() { |
| |
| } |
| |
| public void configure(Map<String, ?> map) { |
| |
| } |
| } |
消费者
| package practice; |
| |
| import org.apache.kafka.clients.consumer.ConsumerRecord; |
| import org.apache.kafka.clients.consumer.ConsumerRecords; |
| import org.apache.kafka.clients.consumer.KafkaConsumer; |
| |
| import java.util.Arrays; |
| import java.util.Properties; |
| |
| public class Customer { |
| public static void main(String[] args) { |
| Properties prop = new Properties(); |
| |
| prop.setProperty("bootstrap.servers","192.168.200.111:9092,192.168.200.112:9092,192.168.200.113:9092"); |
| |
| prop.setProperty("group.id", "uek"); |
| |
| prop.put("enable.auto.commit", "true"); |
| |
| prop.put("auto.commit.interval.ms", "1000"); |
| |
| prop.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); |
| |
| prop.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); |
| |
| KafkaConsumer<String, String> consumer = new KafkaConsumer<String, String>(prop); |
| consumer.subscribe(Arrays.asList("demo")); |
| while (true) { |
| ConsumerRecords<String, String> records = consumer.poll(2000); |
| for (ConsumerRecord<String, String> record: records) { |
| System.out.println("当前消息的key为"+record.key()); |
| System.out.println("当前消息的value值为" + record.value()); |
| System.out.println("当前消息的分区为" + record.partition()); |
| } |
| } |
| } |
| } |
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· TypeScript + Deepseek 打造卜卦网站:技术与玄学的结合
· Manus的开源复刻OpenManus初探
· AI 智能体引爆开源社区「GitHub 热点速览」
· 三行代码完成国际化适配,妙~啊~
· .NET Core 中如何实现缓存的预热?