kafka整合springboot
1、pom.xml添加依赖
<dependency> <groupId>org.springframework.kafka</groupId> <artifactId>spring-kafka</artifactId> </dependency>
<dependency>
<groupId>com.typesafe</groupId>
<artifactId>config</artifactId>
<version>1.2.1</version>
</dependency>
2、创建配置文件
package com.youfan.config; import org.apache.kafka.clients.producer.ProducerConfig; import org.apache.kafka.common.serialization.StringSerializer; import org.springframework.beans.factory.annotation.Value; import org.springframework.context.annotation.Bean; import org.springframework.context.annotation.Configuration; import org.springframework.kafka.annotation.EnableKafka; import org.springframework.kafka.core.DefaultKafkaProducerFactory; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.kafka.core.ProducerFactory; import java.util.HashMap; import java.util.Map; @Configuration @EnableKafka public class KafkaProducerConfig { @Value("${kafka.producer.servers}") private String servers; @Value("${kafka.producer.retries}") private int retries; @Value("${kafka.producer.batch.size}") private int batchSize; @Value("${kafka.producer.linger}") private int linger; @Value("${kafka.producer.buffer.memory}") private int bufferMemory; public Map<String, Object> producerConfigs() { Map<String, Object> props = new HashMap<>(); props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, servers); props.put(ProducerConfig.RETRIES_CONFIG, retries); props.put(ProducerConfig.BATCH_SIZE_CONFIG, batchSize); props.put(ProducerConfig.LINGER_MS_CONFIG, linger); props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, bufferMemory); props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class); props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class); return props; } public ProducerFactory<String, String> producerFactory() { return new DefaultKafkaProducerFactory<String, String>(producerConfigs()); } @Bean public KafkaTemplate<String, String> kafkaTemplate() { return new KafkaTemplate<String, String>(producerFactory()); } }
3、properties文件添加配置信息
kafka.consumer.zookeeper.connect=192.168.227.129:2181 kafka.consumer.servers=192.168.227.129:9092 kafka.consumer.enable.auto.commit=true kafka.consumer.session.timeout=6000 kafka.consumer.auto.commit.interval=100 kafka.consumer.auto.offset.reset=latest kafka.consumer.topic=test kafka.consumer.group.id=test kafka.consumer.concurrency=10 kafka.producer.servers=192.168.227.129:9092 kafka.producer.retries=0 kafka.producer.batch.size=4096 kafka.producer.linger=1 kafka.producer.buffer.memory=40960
4、编写读取topic的配置文件
public class ReadProperties { public final static Config config = ConfigFactory.load("kafka.properties"); public static String getKey(String key){ return config.getString(key).trim(); } public static String getKey(String key,String filename){ Config config = ConfigFactory.load(filename); return config.getString(key).trim(); } }
//配置文件内容
//attentionProductLog=attentionProductLog
//buyCartProductLog=buyCartProductLog
//collectProductLog=collectProductLog
//scanProductLog=scanProductLog
5、使用kafka
package com.youfan.Control; import com.alibaba.fastjson.JSONObject; import com.youfan.entity.ResultMessage; import com.youfan.log.AttentionProductLog; import com.youfan.log.BuyCartProductLog; import com.youfan.log.CollectProductLog; import com.youfan.log.ScanProductLog; import com.youfan.utils.ReadProperties; import org.apache.commons.lang3.StringUtils; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.kafka.core.KafkaTemplate; import org.springframework.web.bind.annotation.RequestMapping; import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; import javax.servlet.http.HttpServletRequest; import java.util.Date; /** * */ @RestController @RequestMapping("infolog") public class InfoInControl { private final String attentionProductLogTopic = ReadProperties.getKey("attentionProductLog"); private final String buyCartProductLogTopic = ReadProperties.getKey("buyCartProductLog"); private final String collectProductLogTopic = ReadProperties.getKey("collectProductLog"); private final String scanProductLogTopic = ReadProperties.getKey("scanProductLog"); @Autowired private KafkaTemplate<String, String> kafkaTemplate; @RequestMapping(value = "helloworld",method = RequestMethod.GET) public String hellowolrd(HttpServletRequest req){ String ip =req.getRemoteAddr(); ResultMessage resultMessage = new ResultMessage(); resultMessage.setMessage("hello:"+ip); resultMessage.setStatus("success"); String result = JSONObject.toJSONString(resultMessage); return result; } /** * AttentionProductLog:{productid:productid....} BuyCartProductLog:{productid:productid....} CollectProductLog:{productid:productid....} ScanProductLog:{productid:productid....} * @param recevicelog * @param req * @return */ @RequestMapping(value = "receivelog",method = RequestMethod.POST) public String hellowolrd(String recevicelog,HttpServletRequest req){ if(StringUtils.isBlank(recevicelog)){ return null; } String[] rearrays = recevicelog.split(":",2); String classname = rearrays[0]; String data = rearrays[1]; String resulmesage= ""; if("AttentionProductLog".equals(classname)){ AttentionProductLog attentionProductLog = JSONObject.parseObject(data,AttentionProductLog.class); resulmesage = JSONObject.toJSONString(attentionProductLog); kafkaTemplate.send(attentionProductLogTopic,resulmesage+"##1##"+new Date().getTime()); }else if("BuyCartProductLog".equals(classname)){ BuyCartProductLog buyCartProductLog = JSONObject.parseObject(data,BuyCartProductLog.class); resulmesage = JSONObject.toJSONString(buyCartProductLog); kafkaTemplate.send(buyCartProductLogTopic,resulmesage+"##1##"+new Date().getTime()); }else if("CollectProductLog".equals(classname)){ CollectProductLog collectProductLog = JSONObject.parseObject(data,CollectProductLog.class); resulmesage = JSONObject.toJSONString(collectProductLog); kafkaTemplate.send(collectProductLogTopic,resulmesage+"##1##"+new Date().getTime()); }else if("ScanProductLog".equals(classname)){ ScanProductLog scanProductLog = JSONObject.parseObject(data,ScanProductLog.class); resulmesage = JSONObject.toJSONString(scanProductLog); kafkaTemplate.send(scanProductLogTopic,resulmesage+"##1##"+new Date().getTime()); } ResultMessage resultMessage = new ResultMessage(); resultMessage.setMessage(resulmesage); resultMessage.setStatus("success"); String result = JSONObject.toJSONString(resultMessage); return result; } }
本文来自博客园,作者:小白啊小白,Fighting,转载请注明原文链接:https://www.cnblogs.com/ywjfx/p/11197646.html