springboot kafka 发送jsons数据到队列 和接受数据

mvaen 包配置  在pom.xml 中放入

 <dependency>
            <groupId>org.springframework.kafka</groupId>
            <artifactId>spring-kafka</artifactId>
        </dependency>

在代码中配置 kafka

复制代码
import com.fasterxml.jackson.annotation.JsonFormat;

@JsonFormat(shape = JsonFormat.Shape.STRING)
public enum AdKafkaTopic {
   xxxxxx_name(1, 1);


    final int numPartitions;
    final short replicationFactor;

    AdKafkaTopic(int numPartitions, int replicationFactor) {
        this.numPartitions = numPartitions;
        this.replicationFactor = (short) replicationFactor;
    }

    public int getNumPartitions() {
        return numPartitions;
    }

    public short getReplicationFactor() {
        return replicationFactor;
    }
}

复制代码
复制代码
import org.apache.kafka.clients.admin.NewTopic;
import org.springframework.beans.BeansException;
import org.springframework.beans.factory.support.DefaultListableBeanFactory;
import org.springframework.context.ApplicationContext;
import org.springframework.context.ApplicationContextAware;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.context.annotation.Configuration;

@Configuration
public class KafkaConfig implements ApplicationContextAware {

    NewTopic createFrom(AdKafkaTopic t) {
        return new NewTopic(t.name(), t.getNumPartitions(), t.getReplicationFactor());
    }

    @Override
    public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
        //将applicationContext转换为ConfigurableApplicationContext
        ConfigurableApplicationContext configurableApplicationContext = (ConfigurableApplicationContext) applicationContext;

        DefaultListableBeanFactory defaultListableBeanFactory = (DefaultListableBeanFactory) configurableApplicationContext.getBeanFactory();
        for (AdKafkaTopic t : AdKafkaTopic.values()) {
            defaultListableBeanFactory.registerSingleton("newTopic_" + t.name(), createFrom(t));
        }
    }

复制代码
复制代码
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;

@Service
public class KafkaDataSender {
    private final KafkaTemplate<String, String> template;

    public KafkaDataSender(KafkaTemplate<String, String> template) {
        this.template = template;
    }

    <T> void sendKafkaMsg(AdKafkaTopic topic, String key, T value) {
        if (value instanceof String) {
            this.template.send(topic.name(), key, (String) value);
        } else {
            this.template.send(topic.name(), key, JsonUtil.toJson(value));
        }
    }

//封装的发送方法 xxx是实体类
public void sendSaleCampaign(xxx dto) { sendKafkaMsg(AdKafkaTopic.campaign_name_rule, dto.getCampaignId(), dto); } }
复制代码

 

上边配置好后,在需要发送的地方发送出去

复制代码
 @Autowired
    private KafkaDataSender kafkaDataSender;



 public void pullSaleCampaign(xxx dto) {
        try {
            kafkaDataSender.sendSaleCampaign(dto);
            log.info("的推送到队列 xle 成功:{}", JsonUtil.toJson(dto));
        }catch (Exception e){
            log.error("的推送到队列 cxxe 失败:{}", JsonUtil.toJson(dto), e);
        }

    }
复制代码

 

 

以上就是kafka发送数据到队列的方式

linux相关命令

//远程查看队列是否能链接通

kafkacat -b 8.219.216.229:9092 -L | grep 'xxx'

//远程往队列里面放数据
echo "Hello, Kafka!" | kafkacat -b 172.21.184.8:9092 -t xxx

 

消费队列数据

复制代码
package com.giikin.ads.collector.process2.processor;

import com.giikin.ads.collector.oss.OssAppendWriteService;
import com.giikin.ads.collector.oss.OssTable;
import com.giikin.ads.collector.process2.adStruct.service.TiktokAdAdService;
import lombok.extern.slf4j.Slf4j;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;

import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.stream.Collectors;

@Component
@Slf4j
//@DependsOn(value = "kafkaTopicConfig")
public class SourceDataProcess {
    private final OssAppendWriteService ossAppendWriteService;
   

    @KafkaListener(
            id = "tiktokToOss",
            topics = {
                    "xxx","xxx2"
            },
            groupId = "tToOss",
            containerFactory = "batchFactory"
    )
    public void writeTiktokSource(List<ConsumerRecord<String, String>> records) {
        Map<String, List<ConsumerRecord<String, String>>> collect = records.stream().collect(Collectors.groupingBy(ConsumerRecord::topic));
        collect.forEach((topic, datas) -> {
            List<String> values = datas.stream().map(ConsumerRecord::value).collect(Collectors.toList());
            switch (topic) {
                case "xxx":
                    ossAppendWriteService.writeSync(values, OssTable.xxx);
                    break;
                case "xxxx":
                    break;
                default:
                   break;
            }
        });
    }
}
复制代码

 

posted @   知行IT讲堂  阅读(14)  评论(0编辑  收藏  举报
编辑推荐:
· .NET Core 中如何实现缓存的预热?
· 从 HTTP 原因短语缺失研究 HTTP/2 和 HTTP/3 的设计差异
· AI与.NET技术实操系列:向量存储与相似性搜索在 .NET 中的实现
· 基于Microsoft.Extensions.AI核心库实现RAG应用
· Linux系列:如何用heaptrack跟踪.NET程序的非托管内存泄露
阅读排行:
· TypeScript + Deepseek 打造卜卦网站:技术与玄学的结合
· Manus的开源复刻OpenManus初探
· 三行代码完成国际化适配,妙~啊~
· .NET Core 中如何实现缓存的预热?
· 如何调用 DeepSeek 的自然语言处理 API 接口并集成到在线客服系统
点击右上角即可分享
微信分享提示