练习 :采用 Flink TABLE API 或者 SQL查询 所有的商品信息并打印到控制台。
1,玩具,玩具手枪,28888.2,12
2,玩具,遥控汽车,3222.0,5
3,玩具,洋娃娃,68.0,8
4,玩具,遥控飞机,3222.0,6
5,电器,风扇,55.8,10
6,电器,空调,99999.2,3
7,电器,平底锅,666.0,1
1 import lombok.AllArgsConstructor; 2 import lombok.Data; 3 import lombok.NoArgsConstructor; 4 5 @Data 6 @NoArgsConstructor 7 @AllArgsConstructor 8 public class Shop { 9 private String id; 10 private String type; 11 private String name; 12 private Double price; 13 private Integer num; 14 }
1 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 2 import org.apache.flink.table.api.*; 3 import org.apache.flink.table.descriptors.Csv; 4 import org.apache.flink.table.descriptors.FileSystem; 5 import org.apache.flink.table.descriptors.Schema; 6 import org.apache.flink.types.Row; 7 import org.apache.flink.util.CloseableIterator; 8 9 public class Flink_Batch_Table { 10 public static void main(String[] args) { 11 12 //flink环境 13 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 14 env.setParallelism(1); 15 16 EnvironmentSettings build = EnvironmentSettings.newInstance().useBlinkPlanner().inBatchMode().build(); 17 TableEnvironment tableEnv = TableEnvironment.create(build); 18 19 //获取资源文件路径 20 String filePath = Flink_Batch_Table.class.getClassLoader().getResource("table.csv").getPath(); 21 22 //connect获取table(batchTable) 23 tableEnv.connect(new FileSystem().path(filePath)) 24 .withFormat(new Csv()) 25 .withSchema(new Schema() 26 .field("id", DataTypes.STRING()) 27 .field("type", DataTypes.STRING()) 28 .field("name", DataTypes.STRING()) 29 .field("price", DataTypes.DOUBLE()) 30 .field("num", DataTypes.INT())) 31 .createTemporaryTable("batchTable"); 32 33 //查询所有商品信息并打印 34 Table batchTable = tableEnv.from("batchTable"); 35 Table table = batchTable.select("id,type,name,price,num"); 36 TableResult res = table.execute(); 37 CloseableIterator<Row> collect = res.collect(); 38 while (collect.hasNext()){ 39 Row next = collect.next(); 40 System.out.println(next); 41 } 42 43 //统计各个商品的平均价格 44 Table table1 = tableEnv.sqlQuery("select id,(sum(price)/sum(num)) av from batchTable group by id"); 45 TableResult res1 = table1.execute(); 46 CloseableIterator<Row> collect1 = res1.collect(); 47 while (collect1.hasNext()){ 48 Row next = collect1.next(); 49 System.out.println(next); 50 } 51 52 //统计电器里面大于平底锅价格的有哪些 53 Table table2 = tableEnv.sqlQuery("select * from batchTable where type = '电器' and price > (select price from batchTable where name = '平底锅')"); 54 CloseableIterator<Row> collect2 = table2.execute().collect(); 55 while (collect2.hasNext()){ 56 System.out.println(collect2.next()); 57 } 58 59 //统计电器里面大于平底锅价格的有哪些 60 Table table3 = tableEnv.from("batchTable"); 61 //where() 里 不能加子句 62 Table where = table3.select("*").where("type = '电器'"); 63 CloseableIterator<Row> collect3 = where.execute().collect(); 64 while (collect3.hasNext()){ 65 System.out.println(collect3.next()); 66 } 67 68 // 统计哪个商品类型的总价格是最大的,要求列出商品类型名称及总价格 69 Table table4 = tableEnv.sqlQuery("select type,sum(price) aa from batchTable group by type order by aa desc limit 1"); 70 CloseableIterator<Row> collect4 = table4.execute().collect(); 71 while (collect4.hasNext()){ 72 System.out.println(collect4.next()); 73 } 74 75 //统计商品个数不等于12和5的商品信息 76 Table table5 = tableEnv.sqlQuery("select * from batchTable where num<>12 and num<>5"); 77 CloseableIterator<Row> collect5 = table5.execute().collect(); 78 while (collect5.hasNext()){ 79 System.out.println(collect5.next()); 80 } 81 82 //统计包含遥控的所有商品信息 83 Table table6 = tableEnv.sqlQuery("select * from batchTable where name like '%遥控%'"); 84 CloseableIterator<Row> collect6 = table6.execute().collect(); 85 while (collect6.hasNext()){ 86 System.out.println(collect6.next()); 87 } 88 89 } 90 }
1 import org.apache.flink.api.common.functions.MapFunction; 2 import org.apache.flink.api.java.tuple.Tuple2; 3 import org.apache.flink.streaming.api.datastream.DataStream; 4 import org.apache.flink.streaming.api.datastream.DataStreamSource; 5 import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; 6 import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 7 import org.apache.flink.table.api.EnvironmentSettings; 8 import org.apache.flink.table.api.Table; 9 import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 10 import org.apache.flink.types.Row; 11 import org.apache.flink.util.CloseableIterator; 12 13 public class Flink_Stream_Table { 14 public static void main(String[] args) { 15 16 //flink环境 17 StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); 18 env.setParallelism(1); 19 20 EnvironmentSettings build = EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build(); 21 StreamTableEnvironment tableEnv = StreamTableEnvironment.create(env,build); 22 23 //获取资源文件路径 24 String filePath = Flink_Batch_Table.class.getClassLoader().getResource("table.csv").getPath(); 25 26 //stream获取table(streamTable) 27 DataStreamSource<String> source = env.readTextFile(filePath); 28 SingleOutputStreamOperator<Shop> map = source.map(new MapFunction<String, Shop>() { 29 @Override 30 public Shop map(String s) throws Exception { 31 String[] split = s.split(","); 32 return new Shop(split[0], split[1], split[2], Double.valueOf(split[3]), Integer.valueOf(split[4])); 33 } 34 }); 35 36 //将 DataStream 转为 Table将 DataStream 转为 Table 37 Table streamTable = tableEnv.fromDataStream(map); 38 //结果打印1 39 tableEnv.toDataStream(streamTable).print(); 40 //结果打印2 41 DataStream<Shop> shopDataStream = tableEnv.toAppendStream(streamTable, Shop.class); 42 shopDataStream.print(); 43 44 //将 DataStream 注册为一个 Table 临时表 45 tableEnv.createTemporaryView("streamTable",map); 46 //统计各个商品类型的平均价格 47 Table table = tableEnv.sqlQuery("select type,(sum(price)/sum(num)) av from streamTable group by `type`"); 48 //结果打印1 49 CloseableIterator<Row> collect = table.execute().collect(); 50 while (collect.hasNext()){ 51 System.out.println(collect.next()); 52 } 53 //结果打印2 (聚合函数,数据变动,不能toAppendStream打印) 54 DataStream<Tuple2<Boolean, Row>> tuple2DataStream = tableEnv.toRetractStream(table, Row.class); 55 tuple2DataStream.print(); 56 57 try { 58 env.execute(); 59 } catch (Exception e) { 60 e.printStackTrace(); 61 } 62 63 } 64 }
pom.xml
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>org.example</groupId> <artifactId>work3-4</artifactId> <version>1.0-SNAPSHOT</version> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <flink.version>1.13.0</flink.version> <scala.binary.version>2.11</scala.binary.version> </properties> <dependencies> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-core</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-java</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-streaming-java_${scala.binary.version}</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-clients_${scala.binary.version}</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.projectlombok</groupId> <artifactId>lombok</artifactId> <version>1.16.22</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-api-java-bridge_2.11</artifactId> <version>1.13.0</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-planner-blink_2.11</artifactId> <version>1.13.0</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-planner_2.11</artifactId> <version>1.13.0</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-api-scala-bridge_2.11</artifactId> <version>1.13.0</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-runtime-blink_2.11</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-table-common</artifactId> <version>${flink.version}</version> </dependency> <dependency> <groupId>org.apache.flink</groupId> <artifactId>flink-csv</artifactId> <version>1.10.0</version> </dependency> </dependencies> </project>
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 【自荐】一款简洁、开源的在线白板工具 Drawnix
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY
· 无需6万激活码!GitHub神秘组织3小时极速复刻Manus,手把手教你使用OpenManus搭建本
· C#/.NET/.NET Core优秀项目和框架2025年2月简报