FLink自定义Kafka Source,处理后转发给下一个kafka topic
一、依赖配置
pom文件:https://www.cnblogs.com/robots2/p/16048648.html
kafka单机版安装:https://www.cnblogs.com/robots2/p/15797379.html
其它信息看我的博客
二、Flink代码
package net.xdclass.class6.app;
import java.util.Properties;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.functions.MapFunction;
import org.apache.flink.api.common.serialization.SimpleStringSchema;
import org.apache.flink.api.java.functions.KeySelector;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.datastream.KeyedStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaConsumer;
import org.apache.flink.streaming.connectors.kafka.FlinkKafkaProducer;
import org.apache.flink.streaming.connectors.redis.RedisSink;
import org.apache.flink.streaming.connectors.redis.common.config.FlinkJedisPoolConfig;
import net.xdclass.class6.sink.MyRedisSink;
import net.xdclass.class6.source.VideoOrderSource;
import net.xdclass.model.VideoOrder;
/**
* @desc 自定义kafka source,输出到kafka
* 本地测试页面:http://127.0.0.1:8081/#/overview
* 生产 ./kafka-console-producer.sh --broker-list ip地址:9092 --topic test
* FLink程序加工转换,输出到另一个kafka
* 消费 ./kafka-console-consumer.sh --bootstrap-server ip地址:9092 --topic test-output
* @menu
*/
public class FLink08KafkaCustomSourceApp {
public static void main(String[] args) throws Exception {
//WebUi方式运行
final StreamExecutionEnvironment env =
StreamExecutionEnvironment.createLocalEnvironmentWithWebUI(new Configuration());
// StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
//设置运行模式为流批一体
env.setRuntimeMode(RuntimeExecutionMode.AUTOMATIC);
Properties props = new Properties();
//kafka地址
props.setProperty("bootstrap.servers", "ip地址:9092");
//消费者组名
props.setProperty("group.id", "FLink-test-group");
//字符串序列化和反序列化规则
props.setProperty("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.setProperty("value.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
//offset重置规则
props.setProperty("auto.offset.reset", "latest");
//自动提交,回馈kafka响应。2秒提交一次,减少block与flink程序交互,提升性能
props.setProperty("enable.auto.commit", "true");
props.setProperty("auto.commit.interval.ms", "2000");
//有后台线程每隔10s检测一下Kafka的分区变化情况
props.setProperty("flink.partition-discovery.interval-millis","10000");
//kafka名字,序列化方式,配置文件
FlinkKafkaConsumer<String> consumer = new FlinkKafkaConsumer<String>("test", new SimpleStringSchema(),
props);
//设置从记录的消费者组内的offset开始消费,如果没有记录,则从auto.offset.reset 配置处开始消费
consumer.setStartFromGroupOffsets();
//设置自定义source
DataStream<String> kafkaDS = env.addSource(consumer);
kafkaDS.print("kafka:");
DataStream<String> mapDS = kafkaDS.map(new MapFunction<String, String>() {
@Override
public String map(String value) throws Exception {
return "自定义拼接前缀:"+value;
}
});
FlinkKafkaProducer<String> producer = new FlinkKafkaProducer<String>("test-output",
new SimpleStringSchema(), props);
mapDS.addSink(producer);
//设置名字
env.execute("CustomKafkaSourceApp");
}
}
分类:
Flink
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 【自荐】一款简洁、开源的在线白板工具 Drawnix
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY