一、maven依赖
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients</artifactId>
<version>1.18.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java</artifactId>
<version>1.18.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-runtime-web</artifactId>
<version>1.18.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner_2.12</artifactId>
<version>1.18.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc</artifactId>
<version>3.1.2-1.18</version>
</dependency>
<dependency>
<groupId>mysql</groupId>
<artifactId>mysql-connector-java</artifactId>
<version>8.0.29</version>
</dependency>
二、MysqlReader
package com.example.flinkcdcjob.dataBatch;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import java.lang.reflect.Field;
import java.sql.*;
import java.util.ArrayList;
import java.util.List;
public class MysqlReader extends RichSourceFunction<List<User>> {
private Connection connection = null;
private PreparedStatement ps = null;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
Class.forName("com.mysql.cj.jdbc.Driver");
connection = DriverManager.getConnection("jdbc:mysql://rm-cn-lbj3n5u3s000ap3o.rwlb.rds.aliyuncs.com:3306", "dengxiaohui", "DdXxHh@123321");
ps = connection.prepareStatement("select id,username,password from test_db.user");
}
@Override
public void run(SourceContext<List<User>> sourceContext) throws Exception {
ResultSet resultSet = ps.executeQuery();
List<User> userList = new ArrayList<>();
User user = User.class.newInstance();
int count = 0;
while (resultSet.next()) {
User object = new User();
ResultSetMetaData metaData = resultSet.getMetaData();
int columnCount = metaData.getColumnCount();
for (int i = 0; i < columnCount; i++) {
String columnName = metaData.getColumnName(i + 1);
Field declaredField = user.getClass().getDeclaredField(columnName);
declaredField.setAccessible(true);
if (null != resultSet.getObject(columnName) && !(resultSet.getObject(columnName).toString()).isEmpty()) {
declaredField.set(object, resultSet.getObject(columnName));
} else {
declaredField.set(object, null);
}
}
userList.add(object);
count++;
if (count % 5000 == 0) {
sourceContext.collect(userList);
userList.clear();
}
}
System.out.println("目标表读取数据条数:" + count);
sourceContext.collect(userList);
}
@Override
public void cancel() {
try {
super.close();
if (connection != null) {
connection.close();
}
if (ps != null) {
ps.close();
}
} catch (Exception e) {
e.printStackTrace();
}
}
}
三、MyWriter.java
package com.example.flinkcdcjob.dataBatch;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.streaming.api.functions.sink.RichSinkFunction;
import java.lang.reflect.Field;
import java.sql.Connection;
import java.sql.DriverManager;
import java.sql.PreparedStatement;
import java.text.SimpleDateFormat;
import java.util.Date;
import java.util.List;
public class MysqlWriter extends RichSinkFunction<List<User>> {
private Connection connection = null;
private PreparedStatement preparedStatement = null;
private int totalNum = 0;
@Override
public void open(Configuration parameters) throws Exception {
super.open(parameters);
if (connection == null) {
Class.forName("com.mysql.cj.jdbc.Driver");
connection = DriverManager.getConnection("jdbc:mysql://rm-cn-lbj3n5u3s000ap3o.rwlb.rds.aliyuncs.com:3306", "dengxiaohui", "DdXxHh@123321");
connection.setAutoCommit(false);
}
connection.prepareStatement("truncate table sink_db.user").execute();
preparedStatement = connection.prepareStatement("insert into sink_db.user values (?,?,?)");
}
@Override
public void invoke(List<User> value, Context context) throws Exception {
try {
for (int i = 0; i < value.size(); i++) {
Field[] fields = value.get(i).getClass().getDeclaredFields();
int location = 0;
for (Field field : fields) {
location++;
field.setAccessible(true);
if (null != (field.get(value.get(i))) && !(field.get(value.get(i)).toString()).isEmpty()) {
preparedStatement.setObject(location, (field.get(value.get(i))));
} else {
preparedStatement.setObject(location, null);
}
}
preparedStatement.addBatch();
totalNum++;
}
preparedStatement.executeBatch();
preparedStatement.clearBatch();
SimpleDateFormat simpleDateFormat = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
System.out.println("now time is :" + simpleDateFormat.format(new Date()) + "批量插入数据条数:" + totalNum);
connection.commit();
} catch (
Exception e) {
e.printStackTrace();
}
}
@Override
public void close() throws Exception {
super.close();
if (preparedStatement != null) {
preparedStatement.close();
}
if (connection != null) {
connection.close();
}
super.close();
}
}
四、数据表对应的实体类 User.java
package com.example.flinkcdcjob.dataBatch;
import lombok.Data;
import java.math.BigInteger;
@Data
public class User {
public BigInteger id;
public String username;
public String password;
}
五、主类MysqlCdcMysql.java
package com.example.flinkcdcjob.dataBatch;
import org.apache.flink.streaming.api.datastream.DataStreamSource;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import java.util.List;
public class MysqlCdcMysql {
public static void main(String[] args) throws Exception {
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment().setParallelism(1);
DataStreamSource<List<User>> dataStream = env.addSource(new MysqlReader());
dataStream.addSink(new MysqlWriter());
env.execute("Flink cost MySQL data to write MySQL");
}
}
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】凌霞软件回馈社区,博客园 & 1Panel & Halo 联合会员上线
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】博客园社区专享云产品让利特惠,阿里云新客6.5折上折
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步