kafka集群七、java操作kafka(有密码验证)
系列导航
五、kafka集群__consumer_offsets副本数修改
kafka环境搭建好了如何通过代码来访问?
先介绍不需要密码验证的kafka集群如何操作
1、环境
包:kafka-clients-0.11.0.1.jar
jkd:1.7
2、kafka配置类
package password; import java.util.Properties; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; import java.util.Properties; import org.apache.kafka.clients.consumer.KafkaConsumer; import org.apache.kafka.clients.producer.KafkaProducer; /* kafka有用户名验证的配置 */ public class KafkaUtil { //服务器地址 没密码使用PLAINTEXT前缀 有密码了使用SASL_PLAINTEXT前缀 public static final String servers="SASL_PLAINTEXT://192.168.0.104:9092,SASL_PLAINTEXT://192.168.0.105:9092,SASL_PLAINTEXT://192.168.0.106:9092"; //kafka集群生产者配置 public static KafkaProducer<String, String> getProducer(String username,String passwd) { Properties props = new Properties(); props.put("bootstrap.servers",servers ); props.put("acks", "1"); props.put("retries", 0); props.put("batch.size", 0);//16384 props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); props.setProperty("security.protocol", "SASL_PLAINTEXT"); props.setProperty("sasl.mechanism", "PLAIN"); String jassc = "org.apache.kafka.common.security.plain.PlainLoginModule required\n" + "username=\"" + username + "\"\n" + "password=\"" + passwd + "\";"; props.setProperty("sasl.jaas.config", jassc); KafkaProducer<String, String> kp = new KafkaProducer<String, String>(props); return kp; } public static KafkaConsumer<String, String> getConsumer(String groupId,String username,String passwd) { Properties props = new Properties(); props.put("bootstrap.servers", servers); props.put("auto.offset.reset", "earliest"); //必须要加,如果要读旧数据 props.put("group.id", groupId); props.put("enable.auto.commit", "false"); props.put("auto.commit.interval.ms", "100"); props.put("max.partition.fetch.bytes", "10240");//每次拉取的消息字节数,10K?,每次取回20条左右 props.put("session.timeout.ms", "30000"); props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.put("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer"); props.setProperty ("security.protocol", "SASL_PLAINTEXT"); props.setProperty ("sasl.mechanism", "PLAIN"); String jassc = "org.apache.kafka.common.security.plain.PlainLoginModule required\n" + "username = \"" + username + "\"\n" + "password =\"" + passwd + "\";"; props.setProperty("sasl.jaas.config", jassc); KafkaConsumer<String, String> kc = new KafkaConsumer<String, String>(props); return kc; } }
3、生产者类ProducerClient
package password; import java.util.concurrent.Future; import org.apache.kafka.clients.producer.Callback; import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.ProducerRecord; import org.apache.kafka.clients.producer.RecordMetadata; /* * 消费者 */ public class ProducerClient { /** * @param args */ public static void main(String[] args) { // TODO Auto-generated method stub sendToKafka(); } private static Producer<String, String> producer = KafkaUtil.getProducer("producer","prod-pwd"); public static void sendToKafka( ) { for(int i=0;i<5000;i++){ try { final ProducerRecord<String, String> record = new ProducerRecord<String, String>("testTopic", "d+key-" + i, "{\"name\":\"哈哈\",\"id\":\"218CF4630C2F8795\"}"); Future<RecordMetadata> send = producer.send(record, new Callback() { @Override public void onCompletion(RecordMetadata metadata, Exception e) { if (e != null) { e.printStackTrace(); } } }); System.out.println("sendToKafka-发送至Kafka:" + "d+key-" + i); } catch (Exception e) { e.printStackTrace(); } } producer.close(); } }
4、消费者类ConsumerClient
package password; import java.util.Arrays; import org.apache.kafka.clients.consumer.ConsumerRecord; import org.apache.kafka.clients.consumer.ConsumerRecords; import org.apache.kafka.clients.consumer.KafkaConsumer; /* 消费者 */ public class ConsumerClient { public static KafkaConsumer<String, String> consumer = null; public static void main(String[] args) { fecthKafka(); } public static void fecthKafka( ) { consumer = KafkaUtil.getConsumer("testGroup","producer","prod-pwd"); //group consumer.subscribe(Arrays.asList("testTopic"));//topics int i=0; while (true) { ConsumerRecords<String, String> records ; try { records = consumer.poll(Long.MAX_VALUE);//毫秒 }catch (Exception e){ e.printStackTrace(); continue; } for (ConsumerRecord<String, String> record : records) { System.out.println("fetched from partition " + record.partition() + ", offset: " + record.offset() + ",key: " + record.key() + ",value:" + record.value() ); i++; System.out.println(i); } try { consumer.commitSync(); } catch (Exception e) { e.printStackTrace(); continue; } } } }