java api调用kafka

已经启用了zookeeper和kafka

单机的ip为192.168.80.128

加入maven 的pom.xml代码如下 

<dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>0.8.2.1</version>
          </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.11</artifactId>
            <version>0.8.2.1</version>
        </dependency>

 

生产者的KafkaProducerDemo.java代码如下 

package com.anjubao.weixin.web.weChat;

import java.util.Properties;

import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerRecord;

/**
  @project:standardProject
  @class:KafkaProducerDemo.java
  @author:fuanyu E-mail:fuanyu163@163.com
  @date:2022年11月9日 下午2:20:14
 */
public class KafkaProducerDemo {
    
     public static void main(String[] args) throws InterruptedException {
            /* 1、连接集群,通过配置文件的方式
             * 2、发送数据-topic:order,value
             */
            Properties props = new Properties(); props.put("bootstrap.servers", "192.168.80.128:9092"); props.put("acks", "all");
            props.put("retries", 0);
            props.put("batch.size", 16384);
            props.put("linger.ms", 1);
            props.put("buffer.memory", 33554432); 
            props.put("key.serializer",
                    "org.apache.kafka.common.serialization.StringSerializer"); 
            props.put("value.serializer",
                    "org.apache.kafka.common.serialization.StringSerializer");
     KafkaProducer<String, String> kafkaProducer = new KafkaProducer<String, String>
                    (props);
     System.out.println("*222*********");
            for (int i = 0; i < 10; i++) {
    // 发送数据 ,需要一个producerRecord对象,最少参数 String topic, V value 
                 System.out.println("*1*****");
                kafkaProducer.send(new ProducerRecord<String, String>("topic", "订单 息!"+i));
                Thread.sleep(100);
                System.out.println("*2*****");
            }
        }
     

}

消费者代码KafkaConsumerDemo.java

package com.anjubao.weixin.web.weChat;

import java.util.List;
import java.util.Properties;
import java.util.concurrent.TimeUnit;

import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.ConsumerIterator;
import kafka.consumer.KafkaStream;
import kafka.consumer.Whitelist;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.MessageAndMetadata;

import org.apache.kafka.common.utils.CollectionUtils;

/**
  @project:standardProject
  @class:KafkaConsumerDemo.java
  @author:fuanyu E-mail:fuanyu163@163.com
  @date:2022年11月9日 下午2:22:30
 */
public class KafkaConsumerDemo {
    
     
        public static void main(String[] args) throws Exception {
            Properties properties = new Properties();
            properties.put("zookeeper.connect", "192.168.80.128:2181");
            properties.put("auto.commit.enable", "true");
            properties.put("auto.commit.interval.ms", "60000");
            properties.put("group.id", "test");

            ConsumerConfig consumerConfig = new ConsumerConfig(properties);

            ConsumerConnector javaConsumerConnector = (ConsumerConnector) Consumer.createJavaConsumerConnector(consumerConfig);
            System.out.println("**********");
            //topic的过滤器
            Whitelist whitelist = new Whitelist("topic");
            List<KafkaStream<byte[], byte[]>> partitions = javaConsumerConnector.createMessageStreamsByFilter(whitelist);

            if (partitions==null) {
            System.out.println("empty!");
            TimeUnit.SECONDS.sleep(1);
            }

            //消费消息
            for (KafkaStream<byte[], byte[]> partition : partitions) {

            ConsumerIterator<byte[], byte[]> iterator = partition.iterator();
            while (iterator.hasNext()) {
            MessageAndMetadata<byte[], byte[]> next = iterator.next();
            System.out.println("partiton:" + next.partition());
            System.out.println("offset:" + next.offset());
            System.out.println("接收到message:" + new String(next.message(), "utf-8"));
            }
            }
            }

}

 

测试时看运行KafkaConsumerDemo.java的代码。然后再运行KafkaProducerDemo.java

从结果上看消费者的结果如下

 

posted @ 2023-05-23 17:10  幸福眼泪  阅读(49)  评论(0编辑  收藏  举报