kafka实现SASL_PLAINTEXT权限认证·集成springboot篇
kafka实现SASL_PLAINTEXT权限认证·集成springboot篇
消费者模块实现
1、首先创建 kafka_client_jaas.conf 文件
KafkaServer { org.apache.kafka.common.security.plain.PlainLoginModule required username="admin" password="admin" user_admin="admin" user_alice="alice"; };
我们使用的是多环境部署,在名字后面追加环境区分:kafka_client_jaas_dev.conf kafka_client_jaas_test.conf kafka_client_jaas_prod.conf 。这使得每个环境的账户密码不一样,更加灵活
2、bean 配置
@Configuration @EnableKafka public class KafkaConsumerConfig { @Value("${kafka.consumer.servers}") private String servers; @Value("${kafka.consumer.enable.auto.commit}") private boolean enableAutoCommit; @Value("${kafka.consumer.session.timeout}") private String sessionTimeout; @Value("${kafka.consumer.auto.commit.interval}") private String autoCommitInterval; @Value("${kafka.consumer.group.id}") private String groupId; @Value("${kafka.consumer.auto.offset.reset}") private String autoOffsetReset; @Value("${kafka.consumer.concurrency}") private int concurrency; @Value("${kafkaSecurityStatus}") private int kafkaSecurityStatus; @Bean public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); factory.setConcurrency(concurrency); factory.getContainerProperties().setPollTimeout(1500); return factory; } public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerBatchFactory() { ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>(); factory.setConsumerFactory(consumerFactory()); factory.setConcurrency(concurrency); factory.setBatchListener(true); factory.getContainerProperties().setPollTimeout(1500); return factory; } public ConsumerFactory<String, String> consumerFactory() { return new DefaultKafkaConsumerFactory<>(consumerConfigs()); } public Map<String, Object> consumerConfigs() { Map<String, Object> propsMap = new HashMap<>(); propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, enableAutoCommit); propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval); propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, sessionTimeout); propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset); // 灵活配置是否启用权限认证开关 if(kafkaSecurityStatus == 1){ propsMap.put("security.protocol", "SASL_PLAINTEXT"); propsMap.put("sasl.mechanism", "PLAIN"); } return propsMap; } }
3、加载conf
我这里因为需要根据启动环境加载不同的conf,就在启动类拿到启动的参数时加载
public class XXXApplication { private static Logger log = LoggerFactory.getLogger(XXXApplication.class); public static void main(String[] args) { String profile = System.getProperty("spring.profiles.active"); //加载kafka 权限认证的配置信息 String kafkaPath = "classpath:kafka_client_jaas_" + profile + ".conf"; log.info("=====profile:" + profile + " || kafkaPath" + kafkaPath); System.setProperty("java.security.auth.login.config", kafkaPath); SpringApplication.run(XXXApplication.class, args); } }
4、启动命令
不可用:nohup java -jar XXX.jar --spring.profiles.active=prod >>/dev/null &
由于原来使用的--spring.profiles.active=prod 不能成功加载,调整为如下命令
可用:nohup java -Dspring.profiles.active=dev -jar XXX.jar >>/dev/null &
生产者模块
其他配置与消费者类似,这里不多介绍了
bean配置如下:
@Configuration @EnableKafka public class KafkaProducerConfig { @Value("${kafka.producer.servers}") private String servers; @Value("${kafka.producer.retries}") private int retries; @Value("${kafka.producer.batch.size}") private int batchSize; @Value("${kafka.producer.linger}") private int linger; @Value("${kafka.producer.buffer.memory}") private int bufferMemory; @Value("${kafkaSecurityStatus}") private int kafkaSecurityStatus; public Map<String, Object> producerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); props.put(ProducerConfig.RETRIES_CONFIG, retries); props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize); props.put(ProducerConfig.LINGER_MS_CONFIG, linger); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); // 灵活配置开关是否启用权限认证 if(kafkaSecurityStatus == 1){ props.put("security.protocol", "SASL_PLAINTEXT"); props.put("sasl.mechanism", "PLAIN"); } return props; } public ProducerFactory<String, String> producerFactory() { return new DefaultKafkaProducerFactory<>(producerConfigs()); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<String, String>(producerFactory()); } }
至此结束!