引入依赖
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
kafka生产
import java.io.PrintStream;
import java.util.Properties;
import java.util.Random;
import java.util.Scanner;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.StringSerializer;
public class KafkaProducerExample
{
public static void main(String[] args)
{
System.setProperty("java.security.krb5.conf", "kerberos/krb5.conf");
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "aaaa:9092,bbbb:9092,cccc:9092");
properties.setProperty("sasl.jaas.config", "com.sun.security.auth.module.Krb5LoginModule required \n useKeyTab=true \n storeKey=true \n keyTab=\"kerberos/rcp.keytab\" \n principal=\"hadoop@HADOOP.COM\";");
properties.setProperty("sasl.kerberos.service.name", "kafka");
properties.setProperty("sasl.mechanism", "GSSAPI");
properties.setProperty("security.protocol", "SASL_PLAINTEXT");
Properties properties1 = ProducerConfig.addSerializerToConfig(properties, new StringSerializer(), new StringSerializer());
KafkaProducer<String, String> stringStringKafkaProducer = new KafkaProducer(properties1);
Random random = new Random();
Scanner scanner = new Scanner(System.in);
while (scanner.hasNext())
{
String next = scanner.next();
ProducerRecord<String, String> rcp_test_001 = new ProducerRecord("rcp_test_002", random.nextInt() + "", next);
stringStringKafkaProducer.send(rcp_test_001);
stringStringKafkaProducer.flush();
}
}
}
kafka消费
import java.io.PrintStream;
import java.time.Duration;
import java.util.Collections;
import java.util.Iterator;
import java.util.Properties;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.clients.consumer.KafkaConsumer;
import org.apache.kafka.common.serialization.StringDeserializer;
public class KafkaConsumerExample
{
public static void main(String[] args)
{
System.setProperty("java.security.krb5.conf", "kerberos/krb5.conf");
Properties properties = new Properties();
properties.setProperty("bootstrap.servers", "aaaa:9092,bbbb:9092,ccc:9092");
properties.setProperty("sasl.jaas.config", "com.sun.security.auth.module.Krb5LoginModule required \n useKeyTab=true \n storeKey=true \n keyTab=\"kerberos/rcp.keytab\" \n principal=\"hadoop@HADOOP.COM\";");
properties.setProperty("sasl.kerberos.service.name", "kafka");
properties.setProperty("sasl.mechanism", "GSSAPI");
properties.setProperty("security.protocol", "SASL_PLAINTEXT");
properties.setProperty("auto.offset.reset", "earliest");
properties.setProperty("group.id", "test_003");
Properties properties1 = ConsumerConfig.addDeserializerToConfig(properties, new StringDeserializer(), new StringDeserializer());
KafkaConsumer<String, String> kafkaConsumer = new KafkaConsumer(properties1);
kafkaConsumer.subscribe(Collections.singleton("rcp_test_002"));
for (;;)
{
ConsumerRecords<String, String> poll = kafkaConsumer.poll(Duration.ofMillis(1000L));
if (!poll.isEmpty())
{
Iterator<ConsumerRecord<String, String>> iterator = poll.iterator();
while (iterator.hasNext())
{
ConsumerRecord<String, String> next = (ConsumerRecord)iterator.next();
System.out.println((String)next.value());
}
}
}
}
}
https://www.pudn.com/news/631d3ae00a3381336819ced4.html