使用kafka客戶端例子(开启kerberos验证)

1.pom配置

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
<!--kafka支持-->
  <dependency>   
      <groupId>org.apache.kafka</groupId>   
      <artifactId>kafka-clients</artifactId>  
      <version> 2.3.0</version>
  </dependency>
  <dependency>
      <groupId>org.springframework.kafka</groupId>
      <artifactId>spring-kafka</artifactId>
  </dependency>
  <dependency>
      <groupId>org.springframework.kafka</groupId>
      <artifactId>spring-kafka-test</artifactId>
      <scope>test</scope>
  </dependency>   

2.kafka配置

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import java.util.HashMap;
import java.util.Map;
 
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.security.auth.SecurityProtocol;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
 
/**
 * @Description:kafka配置
 * @author op
 *
 */
@Configuration
public class KafkaConfig {
     
    @Value("${spring.kafka.bootstrap-servers}")
    private String bootstrapServers;
  
    @Value("${spring.kafka.consumer.enable-auto-commit}")
    private Boolean autoCommit;
  
    @Value("${spring.kafka.consumer.auto-commit-interval-ms}")
    private Integer autoCommitInterval;
  
    @Value("3000")
    private Integer maxNum;
     
    @Value("1048576")//最大数据大小为10M
    private Integer maxBytes;
  
  
    @Value("${spring.kafka.consumer.auto-offset-reset}")
    private String autoOffsetReset;
  
    @Value("${spring.kafka.consumer.key-deserializer}")
    private String keyDeserializer;
     
    @Value("${spring.kafka.consumer.value-deserializer}")
    private String valDeserializer;
    /**
     * 监听器工厂 批量消费
     * @return
     */
    @Bean
    public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<Integer, String>> kafkaListenerFactory() {
        ConcurrentKafkaListenerContainerFactory<Integer, String> factory =
                new ConcurrentKafkaListenerContainerFactory<>();
        factory.setConsumerFactory(new DefaultKafkaConsumerFactory<>(consumerConfigs()));
        //设置为批量消费,每个批次数量在Kafka配置参数中设置ConsumerConfig.MAX_POLL_RECORDS_CONFIG
        factory.setBatchListener(true);
        //设置要监听的线程数。(设置10个消费者进行消费。)
        factory.setConcurrency(10);
        return factory;
    }
     
    @Bean
    public Map<String, Object> consumerConfigs() { 
        Map<String, Object> props = new HashMap<>();
        props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
        props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, autoCommit);
        props.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, autoCommitInterval);
        props.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
        props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, keyDeserializer);
        props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, valDeserializer);
        props.put(ConsumerConfig.MAX_PARTITION_FETCH_BYTES_CONFIG,maxBytes);//每一批
        props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, maxNum);//每一批最多数量
        props.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, autoOffsetReset);
        //kerberos安全认证
        props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, SecurityProtocol.SASL_PLAINTEXT.name);
        props.put(SaslConfigs.SASL_MECHANISM, "GSSAPI");
        props.put(SaslConfigs.SASL_KERBEROS_SERVICE_NAME, "kafka");
        return props;
    }
}

3.消费者

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import java.util.List;
 
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import org.springframework.util.CollectionUtils;
import lombok.extern.slf4j.Slf4j;
 
/**
 * @Description:kafka消费者监听
 * @author op
 *
 */
@Slf4j
@Component
public class KafkaConsumerListener {
     
    // 配置要监听的主题、消费者组、消费者工厂
    @KafkaListener(
            groupId = "自定义",
            topics = "test",
            containerFactory = "kafkaListenerFactory"
    )
    public void newbulkWebsiteAlert(List<ConsumerRecord<?, ?>> records){
        log.newTrace();
        if (CollectionUtils.isEmpty(records)) {
            return;
        }
        try {
            for (ConsumerRecord<?, ?> record : records) {
                String value = record.value().toString();
                //业务代码
                   xxx
            }
        } catch (Exception e) {
            log.info("订阅kafka消息出现异常" + e.getMessage(),e);
        }
    }
}
    

4.配置

1
2
3
4
5
6
7
8
9
10
spring.kafka.bootstrap-servers=ip:端口
spring.kafka.consumer.properties.group.id=自定义
spring.kafka.consumer.enable-auto-commit=true
spring.kafka.consumer.auto-commit-interval-ms=1000
spring.kafka.consumer.auto-offset-reset=earliest
spring.kafka.consumer.properties.session.timeout.ms=120000
spring.kafka.consumer.properties.request.timeout.ms=180000
spring.kafka.consumer.key-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.consumer.value-deserializer=org.apache.kafka.common.serialization.StringDeserializer
spring.kafka.listener.missing-topics-fatal=false

5.在启动服务的时候加上kerberos配置文件的读取,比如 java -jar -Djava.security.krb5.conf=xxx/krb5.conf  -Djava.security.auth.login.config=xxx/jaas.conf xxx.jar &

1
2
-Djava.security.krb5.conf=xxx/krb5.conf
-Djava.security.auth.login.config=xxx/jaas.conf

  

1
需要注意jaas.conf文件中serviceName和principal读取的时候是合起来用的,用户名密码是在kafka目录下的,如果serviceName存在principal只是用户名加密码,没有serviceName字段的时候principal需要配置为kafka/用户名密码
1
KafkaClient {

  com.sun.security.auth.module.Krb5LoginModule required
  useKeyTab=true
  storeKey=true
  serviceName="kafka"
  keyTab="D:/logs/kafka.keytab"
  principal="用户名密码";
};

posted on   qqq9527  阅读(406)  评论(0编辑  收藏  举报

编辑推荐:
· AI与.NET技术实操系列:向量存储与相似性搜索在 .NET 中的实现
· 基于Microsoft.Extensions.AI核心库实现RAG应用
· Linux系列:如何用heaptrack跟踪.NET程序的非托管内存泄露
· 开发者必知的日志记录最佳实践
· SQL Server 2025 AI相关能力初探
阅读排行:
· 震惊!C++程序真的从main开始吗?99%的程序员都答错了
· 【硬核科普】Trae如何「偷看」你的代码?零基础破解AI编程运行原理
· 单元测试从入门到精通
· 上周热点回顾(3.3-3.9)
· winform 绘制太阳,地球,月球 运作规律
< 2025年3月 >
23 24 25 26 27 28 1
2 3 4 5 6 7 8
9 10 11 12 13 14 15
16 17 18 19 20 21 22
23 24 25 26 27 28 29
30 31 1 2 3 4 5

导航

统计

点击右上角即可分享
微信分享提示