搭建 kafka 集群
下载
wget https://dlcdn.apache.org/kafka/3.1.0/kafka_2.13-3.1.0.tgz
tar -zxvf kafka_2.13-3.1.0.tgz
mv kafka_2.13-3.1.0 /usr/local/kafka
cd /usr/local/kafka/
mkdir logs
# 修改配置文件
vim config/server.properties
#主要修改一下6个地方:
#1) broker.id 需要保证每一台kafka都有一个独立的broker
#2) log.dirs 数据存放的目录
#3) zookeeper.connect zookeeper的连接地址信息
#4) delete.topic.enable 是否直接删除topic
#5) host.name 主机的名称
#6) 修改:
# PLAINTEXT://master-node:9092 PLAINTEXT://node-1:9092 PLAINTEXT://node-2:9092
listeners=PLAINTEXT://master-node:9092
#broker.id 标识了kafka集群中一个唯一broker。 我三台机子,分别写了 0 1 2
broker.id=0
num.network.threads=3
num.io.threads=8
socket.send.buffer.bytes=102400
socket.receive.buffer.bytes=102400
socket.request.max.bytes=104857600
# 存放生产者生产的数据 数据一般以topic的方式存放
log.dirs=/usr/local/kafka/logs
num.partitions=1
num.recovery.threads.per.data.dir=1
offsets.topic.replication.factor=1
transaction.state.log.replication.factor=1
transaction.state.log.min.isr=1
log.retention.hours=168
log.segment.bytes=1073741824
log.retention.check.interval.ms=300000
# zk的信息,本人三台 host 分别为 master-node,node-1,node-2
zookeeper.connect=master-node:2181,node-1:2181,node-2:2181
zookeeper.connection.timeout.ms=6000
group.initial.rebalance.delay.ms=0
delete.topic.enable=true
# 三台分别为 master-node, node-1, node-2
host.name=master-node
自启脚本
#!/bin/sh
#
# chkconfig: 345 99 01
# description: Kafka
#
# File : Kafka
#
# Description: Starts and stops the Kafka server
#
source /etc/rc.d/init.d/functions
KAFKA_HOME=/usr/local/kafka
KAFKA_USER=root
export LOG_DIR=/data/kafka/runlog
[ -e /etc/sysconfig/kafka ] && . /etc/sysconfig/kafka
# See how we were called.
case "$1" in
start)
echo -n "Starting Kafka:"
/sbin/runuser -s /bin/sh $KAFKA_USER -c "nohup $KAFKA_HOME/bin/kafka-server-start.sh $KAFKA_HOME/config/server.properties > $LOG_DIR/server.out 2> $LOG_DIR/server.err &"
echo " done."
exit 0
;;
stop)
echo -n "Stopping Kafka: "
/sbin/runuser -s /bin/sh $KAFKA_USER -c "ps -ef | grep kafka.Kafka | grep -v grep | awk '{print $2}' | xargs kill"
echo " done."
exit 0
;;
hardstop)
echo -n "Stopping (hard) Kafka: "
/sbin/runuser -s /bin/sh $KAFKA_USER -c "ps -ef | grep kafka.Kafka | grep -v grep | awk '{print $2}' | xargs kill -9"
echo " done."
exit 0
;;
status)
c_pid=`ps -ef | grep kafka.Kafka | grep -v grep | awk '{print $2}'`
if [ "$c_pid" = "" ] ; then
echo "Stopped"
exit 3
else
echo "Running $c_pid"
exit 0
fi
;;
restart)
stop
start
;;
*)
echo "Usage: kafka {start|stop|hardstop|status|restart}"
exit 1
;;
esac
chkconfig kafka on
JavaAPI 测试
<dependencies>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/kafka-clients -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>1.0.0</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.1</version>
<configuration>
<source>11</source>
<target>11</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
</plugins>
</build>
public class ProducerDemo {
public static String topic = "demo";
public static void main(String[] args) throws Exception {
final Properties properties = new Properties();
properties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, "master-node:9092,node-1:9092,node-2:9092");
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
KafkaProducer<String, String> kafkaProducer = new KafkaProducer<>(properties);
try {
while (true) {
String msg = "Hello, " + new Random().nextInt(100);
ProducerRecord<String, String> record = new ProducerRecord<>(topic, msg);
kafkaProducer.send(record);
System.out.println("消息发送成功: " + msg);
Thread.sleep(500);
}
} finally {
kafkaProducer.close();
}
}
}
public class ConsumerDemo {
public static void main(String[] args) {
final Properties properties = new Properties();
properties.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "master-node:9092,node-1:9092,node-2:9092");
properties.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
properties.put(ConsumerConfig.GROUP_ID_CONFIG, "demo");
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer<>(properties);
kafkaConsumer.subscribe(Collections.singletonList(ProducerDemo.topic));
while (true) {
ConsumerRecords<String, String> records = kafkaConsumer.poll(100);
for (ConsumerRecord<String, String> record : records
) {
System.out.println(String.format("topic:%s,offset:%d,消息:%s", record.topic(), record.offset(), record.value()));
}
}
}
}