最简洁的kafka开发实例
问题导读
1.如何启动kafka?
2.如何通过代码实现生产者例子 ?
3.如何通过代码实现消费者例子 ?
1.启动kafka。
//启动zookeeper server (用&是为了能退出命令行):
bin/zookeeper-server-start.sh config/zookeeper.properties &
//启动kafka server:
bin/kafka-server-start.sh config/server.properties &
2.新建一个生产者例子
import java.util.Properties;
import kafka.javaapi.producer.Producer;
import kafka.producer.KeyedMessage;
import kafka.producer.ProducerConfig;
public class KafkaTest {
public static void main(String[] args) {
Properties props = new Properties();
props.put("zk.connect", "10.103.22.47:2181");
props.put("serializer.class", "kafka.serializer.StringEncoder");
props.put("metadata.broker.list", "10.103.22.47:9092");
props.put("request.required.acks", "1");
//props.put("partitioner.class", "com.xq.SimplePartitioner");
ProducerConfig config = new ProducerConfig(props);
Producer<String, String> producer = new Producer<String, String>(config);
String ip = "192.168.2.3";
String msg ="this is a messageuuu!";
KeyedMessage<String, String> data = new KeyedMessage<String, String>("test", ip,msg);
producer.send(data);
producer.close();
}
}
3.新建一个消费者例子
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import kafka.consumer.Consumer;
import kafka.consumer.ConsumerConfig;
import kafka.consumer.KafkaStream;
import kafka.javaapi.consumer.ConsumerConnector;
import kafka.message.Message;
import kafka.message.MessageAndMetadata;
public class ConsumerSample {
public static void main(String[] args) {
// specify some consumer properties
Properties props = new Properties();
props.put("zookeeper.connect", "10.103.22.47:2181");
props.put("zookeeper.connectiontimeout.ms", "1000000");
props.put("group.id", "test_group");
// Create the connection to the cluster
ConsumerConfig consumerConfig = new ConsumerConfig(props);
ConsumerConnector connector = Consumer.createJavaConsumerConnector(consumerConfig);
Map<String,Integer> topics = new HashMap<String,Integer>();
topics.put("test", 2);
Map<String, List<KafkaStream<byte[], byte[]>>>
topicMessageStreams = connector.createMessageStreams(topics);
List<KafkaStream<byte[], byte[]>> streams = topicMessageStreams.get("test");
ExecutorService threadPool = Executors.newFixedThreadPool(2);
for (final KafkaStream<byte[], byte[]> stream : streams) {
threadPool.submit(new Runnable() {
public void run() {
for (MessageAndMetadata msgAndMetadata : stream) {
// process message (msgAndMetadata.message())
System.out.println("topic: " + msgAndMetadata.topic());
Message message = (Message) msgAndMetadata.message();
ByteBuffer buffer = message.payload();
byte[] bytes = new byte[message.payloadSize()];
buffer.get(bytes);
String tmp = new String(bytes);
System.out.println("message content: " + tmp);
}
}
});
}
}
}