data.csv内容:#
1,Tom,15
2,Lily,13
3,Mike,21
4,John,20
5,Emma,18
6,Sophia,19
7,David,22
8,James,16
9,Olivia,17
10,Robert,23
11,Emily,14
12,Daniel,25
13,Amelia,24
代码1(直接输出结果)#
package com.auguigu.demo;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import java.nio.file.Paths;
public class TextSql {
public static void main(String[] args) throws Exception {
String csvFilePath = "input/data.csv";
String absoluteCsvFilePath = Paths.get(csvFilePath).toAbsolutePath().toString();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
tableEnv.executeSql(
"CREATE TABLE csv_source (" +
" `uid` INT," +
" `name` STRING," +
" `age` INT" +
") WITH (" +
" 'connector' = 'filesystem'," +
" 'path' = '" + absoluteCsvFilePath + "'," +
" 'format' = 'csv'," +
" 'csv.field-delimiter' = ','," +
" 'csv.ignore-parse-errors' = 'true'" +
")"
);
tableEnv.executeSql("select name,uid from csv_source").print();
}
}
代码1结果#
+----+--------------------------------+-------------+
| op | name | uid |
+----+--------------------------------+-------------+
| +I | Tom | 1 |
| +I | Lily | 2 |
| +I | Mike | 3 |
| +I | John | 4 |
| +I | Emma | 5 |
| +I | Sophia | 6 |
| +I | David | 7 |
| +I | James | 8 |
| +I | Olivia | 9 |
| +I | Robert | 10 |
| +I | Emily | 11 |
| +I | Daniel | 12 |
| +I | Amelia | 13 |
+----+--------------------------------+-------------+
13 rows in set
Process finished with exit code 0
代码2(使用DataStream输出结果):#
package com.xxx.demo;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.types.Row;
import java.nio.file.Paths;
public class TextSql {
public static void main(String[] args) throws Exception {
String csvFilePath = "input/data.csv";
String absoluteCsvFilePath = Paths.get(csvFilePath).toAbsolutePath().toString();
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
env.setParallelism(1);
StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env);
tableEnv.executeSql(
"CREATE TABLE csv_source (" +
" `uid` INT," +
" `name` STRING," +
" `age` INT" +
") WITH (" +
" 'connector' = 'filesystem'," +
" 'path' = '" + absoluteCsvFilePath + "'," +
" 'format' = 'csv'," +
" 'csv.field-delimiter' = ','," +
" 'csv.ignore-parse-errors' = 'true'" +
")"
);
Table result = tableEnv.sqlQuery("select name,uid from csv_source");
DataStream<Row> stream = tableEnv.toDataStream(result, Row.class);
stream.print();
env.execute();
}
}
代码2输出结果#
+I[Tom, 1]
+I[Lily, 2]
+I[Mike, 3]
+I[John, 4]
+I[Emma, 5]
+I[Sophia, 6]
+I[David, 7]
+I[James, 8]
+I[Olivia, 9]
+I[Robert, 10]
+I[Emily, 11]
+I[Daniel, 12]
+I[Amelia, 13]
Process finished with exit code 0
pom.xml#
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>flink-demo1</artifactId>
<version>1.0-SNAPSHOT</version>
<properties>
<java.version>11</java.version>
<flink.version>1.17.2</flink.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-planner-loader</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-kafka</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-files</artifactId>
<version>${flink.version}</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-csv</artifactId>
<version>${flink.version}</version>
</dependency>
</dependencies>
</project>
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 阿里最新开源QwQ-32B,效果媲美deepseek-r1满血版,部署成本又又又降低了!
· SQL Server 2025 AI相关能力初探
· AI编程工具终极对决:字节Trae VS Cursor,谁才是开发者新宠?
· 开源Multi-agent AI智能体框架aevatar.ai,欢迎大家贡献代码
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
2021-06-11 基于token的身份验证的原理