|NO.Z.00042|——————————|BigDataEnd|——|Hadoop&Spark.V03|——|Spark.v03|spark sql|DataFrame & Dataset创建|
一、DataFrame & Dataset 的创建
### --- DataFrame & Dataset 的创建
~~~ 不要刻意区分:DF、DS。DF是一种特殊的DS;ds.transformation => df
### --- 由range生成Dataset
scala> val numDS = spark.range(5, 100, 5)
numDS: org.apache.spark.sql.Dataset[Long] = [id: bigint]
~~~ # orderBy 转换操作;desc:function;show:Action
scala> numDS.orderBy(desc("id")).show(5)
+---+
| id|
+---+
| 95|
| 90|
| 85|
| 80|
| 75|
+---+
only showing top 5 rows
~~~ # 统计信息
scala> numDS.describe().show
+-------+------------------+
|summary| id|
+-------+------------------+
| count| 19|
| mean| 50.0|
| stddev|28.136571693556885|
| min| 5|
| max| 95|
+-------+------------------+
~~~ # 显示schema信息
scala> numDS.printSchema
root
|-- id: long (nullable = false)
~~~ # 使用RDD执行同样的操作
scala> numDS.rdd.map(_.toInt).stats
res12: org.apache.spark.util.StatCounter = (count: 19, mean: 50.000000, stdev: 27.386128, max: 95.000000, min: 5.000000)
~~~ # 检查分区数
scala> numDS.rdd.getNumPartitions
res13: Int = 3
### --- 由集合生成Dataset
~~~ Dataset = RDD[case class]
scala> case class Person(name:String, age:Int, height:Int)
defined class Person
~~~ # 注意 Seq 中元素的类型
scala> val seq1 = Seq(Person("Jack", 28, 184), Person("Tom", 10, 144), Person("Andy", 16, 165))
seq1: Seq[Person] = List(Person(Jack,28,184), Person(Tom,10,144), Person(Andy,16,165))
scala> val ds1 = spark.createDataset(seq1)
ds1: org.apache.spark.sql.Dataset[Person] = [name: string, age: int ... 1 more field]
~~~ # 显示schema信息
scala> ds1.printSchema
root
|-- name: string (nullable = true)
|-- age: integer (nullable = false)
|-- height: integer (nullable = false)
scala> ds1.show
+----+---+------+
|name|age|height|
+----+---+------+
|Jack| 28| 184|
| Tom| 10| 144|
|Andy| 16| 165|
+----+---+------+
scala> val seq2 = Seq(("Jack", 28, 184), ("Tom", 10, 144), ("Andy", 16, 165))
seq2: Seq[(String, Int, Int)] = List((Jack,28,184), (Tom,10,144), (Andy,16,165))
scala> val ds2 = spark.createDataset(seq2)
ds2: org.apache.spark.sql.Dataset[(String, Int, Int)] = [_1: string, _2: int ... 1 more field]
scala> ds2.show
+----+---+---+
| _1| _2| _3|
+----+---+---+
|Jack| 28|184|
| Tom| 10|144|
|Andy| 16|165|
+----+---+---+
### --- 由集合生成DataFrame
DataFrame = RDD[Row] + Schema
scala> val lst = List(("Jack", 28, 184), ("Tom", 10, 144), ("Andy", 16, 165))
lst: List[(String, Int, Int)] = List((Jack,28,184), (Tom,10,144), (Andy,16,165))
~~~ # 改单个字段名时简便
scala> val df1 = spark.createDataFrame(lst).
| withColumnRenamed("_1", "name1").
| withColumnRenamed("_2", "age1").
| withColumnRenamed("_3", "height1")
~~~ 输出参数
df1: org.apache.spark.sql.DataFrame = [name1: string, age1: int ... 1 more field]
scala> df1.orderBy("age1").show(10)
+-----+----+-------+
|name1|age1|height1|
+-----+----+-------+
| Tom| 10| 144|
| Andy| 16| 165|
| Jack| 28| 184|
+-----+----+-------+
~~~ # desc是函数,在IDEA中使用是需要导包
scala> import org.apache.spark.sql.functions._
import org.apache.spark.sql.functions._
scala> df1.orderBy(desc("age1")).show(10)
+-----+----+-------+
|name1|age1|height1|
+-----+----+-------+
| Jack| 28| 184|
| Andy| 16| 165|
| Tom| 10| 144|
+-----+----+-------+
~~~ # 修改整个DF的列名
scala> val df2 = spark.createDataFrame(lst).toDF("name", "age", "height")
df2: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
### --- RDD 转成 DataFrame
DataFrame = RDD[Row] + Schema
scala> import org.apache.spark.sql.Row
import org.apache.spark.sql.Row
scala> import org.apache.spark.sql.types._
import org.apache.spark.sql.types._
scala> val arr = Array(("Jack", 28, 184), ("Tom", 10, 144), ("Andy", 16, 165))
arr: Array[(String, Int, Int)] = Array((Jack,28,184), (Tom,10,144), (Andy,16,165))
scala> val rdd1 = sc.makeRDD(arr).map(f=>Row(f._1, f._2, f._3))
rdd1: org.apache.spark.rdd.RDD[org.apache.spark.sql.Row] = MapPartitionsRDD[30] at map at <console>:40
scala> val schema = StructType( StructField("name", StringType, false) ::
| StructField("age", IntegerType, false) ::
| StructField("height", IntegerType, false) ::
| Nil)
schema: org.apache.spark.sql.types.StructType = StructType(StructField(name,StringType,false), StructField(age,IntegerType,false), StructField(height,IntegerType,false))
scala> val schema1 = (new StructType).
| add("name", "string", false).
| add("age", "int", false).
| add("height", "int", false)
schema1: org.apache.spark.sql.types.StructType = StructType(StructField(name,StringType,false), StructField(age,IntegerType,false), StructField(height,IntegerType,false))
# --- RDD => DataFrame,要指明schema
scala> val rddToDF = spark.createDataFrame(rdd1, schema)
rddToDF: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
scala> rddToDF.orderBy(desc("name")).show(false)
+----+---+------+
|name|age|height|
+----+---+------+
|Tom |10 |144 |
|Jack|28 |184 |
|Andy|16 |165 |
+----+---+------+
scala> import org.apache.spark.sql.Row
import org.apache.spark.sql.Row
scala> import org.apache.spark.sql.types._
import org.apache.spark.sql.types._
scala> val arr1 = Array(("Jack", 28, null), ("Tom", 10, 144), ("Andy", 16, 165))
arr1: Array[(String, Int, Any)] = Array((Jack,28,null), (Tom,10,144), (Andy,16,165))
scala> val rdd1 = sc.makeRDD(arr1).map(f=>Row(f._1, f._2, f._3))
rdd1: org.apache.spark.rdd.RDD[org.apache.spark.sql.Row] = MapPartitionsRDD[36] at map at <console>:44
scala> val structType = StructType(StructField("name", StringType, false) ::
| StructField("age", IntegerType, false) ::
| StructField("height", IntegerType, false) :: Nil)
structType: org.apache.spark.sql.types.StructType = StructType(StructField(name,StringType,false), StructField(age,IntegerType,false), StructField(height,IntegerType,false))
~~~ # false 说明字段不能为空
scala> val schema1 = structType
schema1: org.apache.spark.sql.types.StructType = StructType(StructField(name,StringType,false), StructField(age,IntegerType,false), StructField(height,IntegerType,false))
scala> val df1 = spark.createDataFrame(rdd1, schema1)
df1: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
~~~ # 下一句执行报错(因为有空字段)
scala> df1.show
[Stage 7:> (0 + 1) / 1]21/10/20 14:46:06 WARN TaskSetManager: Lost task 0.0 in stage 7.0 (TID 19, 192.168.1.122, executor 1): java.lang.RuntimeException: Error while encoding: java.lang.RuntimeException: The 2th field 'height' of input row cannot be null.
~~~ # true 允许该字段为空,语句可以正常执行
scala> val schema2 = StructType( StructField("name", StringType, false) ::
| StructField("age", IntegerType, false) ::
| StructField("height", IntegerType, true) :: Nil)
schema2: org.apache.spark.sql.types.StructType = StructType(StructField(name,StringType,false), StructField(age,IntegerType,false), StructField(height,IntegerType,true))
scala> val df2 = spark.createDataFrame(rdd1, schema2)
df2: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
scala> df2.show
+----+---+------+
|name|age|height|
+----+---+------+
|Jack| 28| null|
| Tom| 10| 144|
|Andy| 16| 165|
+----+---+------+
~~~ # IDEA中需要,spark-shell中不需要
scala> import spark.implicits._
import spark.implicits._
scala> val arr2 = Array(("Jack", 28, 150), ("Tom", 10, 144), ("Andy", 16, 165))
arr2: Array[(String, Int, Int)] = Array((Jack,28,150), (Tom,10,144), (Andy,16,165))
scala> val rddToDF = sc.makeRDD(arr2).toDF("name", "age", "height")
rddToDF: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
scala> case class Person(name:String, age:Int, height:Int)
defined class Person
scala> val arr2 = Array(("Jack", 28, 150), ("Tom", 10, 144), ("Andy", 16, 165))
arr2: Array[(String, Int, Int)] = Array((Jack,28,150), (Tom,10,144), (Andy,16,165))
scala> val rdd2: RDD[Person] = spark.sparkContext.makeRDD(arr2).map(f=>Person(f._1, f._2, f._3))
scala> val ds2 = rdd2.toDS() // 反射推断,spark 通过反射从caseclass的定义得到类名
scala> val df2 = rdd2.toDF() // 反射推断
scala> ds2.printSchema
root
|-- _1: string (nullable = true)
|-- _2: integer (nullable = false)
|-- _3: integer (nullable = false)
scala> df2.printSchema
root
|-- name: string (nullable = false)
|-- age: integer (nullable = false)
|-- height: integer (nullable = true)
scala> df2.orderBy(desc("name")).show(10)
+----+---+------+
|name|age|height|
+----+---+------+
| Tom| 10| 144|
|Jack| 28| null|
|Andy| 16| 165|
+----+---+------+
### --- RDD转Dataset
Dataset = RDD[case class]
DataFrame = RDD[Row] + Schema
scala> val ds3 = spark.createDataset(rdd2)
scala> ds3.show(10)
### --- 从文件创建DateFrame(以csv文件为例)
scala> val df1 = spark.read.csv("data/people1.csv")
df1: org.apache.spark.sql.DataFrame = [_c0: string, _c1: string ... 1 more field]
scala> df1.printSchema()
root
|-- _c0: string (nullable = true)
|-- _c1: string (nullable = true)
|-- _c2: string (nullable = true)
scala> df1.show()
+-----+---+---------+
| _c0|_c1| _c2|
+-----+---+---------+
| name|age| job|
|Jorge| 30|Developer|
| Bob| 32|Developer|
+-----+---+---------+
scala> val df2 = spark.read.csv("data/people2.csv")
df2: org.apache.spark.sql.DataFrame = [_c0: string]
scala> df2.printSchema()
root
|-- _c0: string (nullable = true)
scala> df2.show()
+------------------+
| _c0|
+------------------+
| name;age;job|
|Jorge;30;Developer|
| Bob;32;Developer|
+------------------+
~~~ # 指定参数:spark 2.3.0
scala> val schema = "name string, age int, job string"
schema: String = name string, age int, job string
scala> val df3 = spark.read
df3: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@36fe658
scala> .options(Map(("delimiter", ";"), ("header", "true")))
res54: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@36fe658
scala> .schema(schema)
res55: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@36fe658
scala> .csv("data/people2.csv")
res56: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
scala> df3.printSchema()
scala> df3.show
~~~ # 自动类型推断
scala> val df4 = spark.read
df4: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@2df356d2
scala> .option("delimiter", ";")
res59: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@2df356d2
scala> .option("header", "true")
res60: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@2df356d2
scala> .option("inferschema", "true")
res61: org.apache.spark.sql.DataFrameReader = org.apache.spark.sql.DataFrameReader@2df356d2
scala> .csv("data/people2.csv")
res62: org.apache.spark.sql.DataFrame = [name: string, age: int ... 1 more field]
scala> df4.printSchema()
scala> df4.show
二、编程代码实现
### --- 编程代码实现
package cn.yanqi.sparksql
import org.apache.spark.sql.{DataFrame, Row, SparkSession}
case class Person(name:String, age:Int, height:Int)
object Demo1 {
def main(args: Array[String]): Unit = {
val spark = SparkSession
.builder()
.appName("Demo1")
.master("local[*]")
.getOrCreate()
val sc = spark.sparkContext
sc.setLogLevel("warn")
// DataFrame & Dataset 的创建
import spark.implicits._
// val arr2 = Array(("Jack", 28, 150), ("Tom", 10, 144), ("Andy", 16, 165))
// val rddToDF: DataFrame = sc.makeRDD(arr2).toDF("name", "age", "height")
// rddToDF.orderBy("age").show(10)
// rddToDF.orderBy(desc("age")).show(10)
// val arr2 = Array(("Jack", 28, 150), ("Tom", 10, 144), ("Andy", 16, 165))
// val rdd2: RDD[Person] = spark.sparkContext.makeRDD(arr2).map(f=>Person(f._1, f._2, f._3))
// val ds2 = rdd2.toDS() // 反射推断,spark 通过反射从case class的定义得到类名
// val df2 = rdd2.toDF() // 反射推断
// ds2.printSchema
// df2.printSchema
// ds2.orderBy(desc("name")).show(10)
// df2.orderBy(desc("name")).show(10)
val df1: DataFrame = spark.read.csv("data/people1.csv")
df1.printSchema()
df1.show()
val df2: DataFrame = spark.read.csv("data/people2.csv")
df2.printSchema()
df2.show()
// 定义参数
val df3: DataFrame = spark.read
.options(Map(("header", "true"), ("inferschema", "true")))
.csv("data/people1.csv")
df3.printSchema()
df3.show()
// Spark 2.3.0
val schemaStr = "name string, age int, job string"
val df4: DataFrame = spark.read
.option("header", "true")
.option("delimiter", ";")
.schema(schemaStr)
.csv("data/people2.csv")
df4.printSchema()
df4.show()
spark.close()
}
}
### --- 编译打印
~~~ # 准备数据文件:data/people1.csv、data/people2.csv
~~~ # 编译打印
D:\JAVA\jdk1.8.0_231\bin\java.exe "-javaagent:D:\IntelliJIDEA\IntelliJ IDEA 2019.3.3\lib\idea_rt.jar=65010:D:\IntelliJIDEA\IntelliJ IDEA 2019.3.3\bin" -Dfile.encoding=UTF-8 -classpath D:\JAVA\jdk1.8.0_231\jre\lib\charsets.jar;D:\JAVA\jdk1.8.0_231\jre\lib\deploy.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\access-bridge-64.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\cldrdata.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\dnsns.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\jaccess.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\jfxrt.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\localedata.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\nashorn.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\sunec.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\sunjce_provider.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\sunmscapi.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\sunpkcs11.jar;D:\JAVA\jdk1.8.0_231\jre\lib\ext\zipfs.jar;D:\JAVA\jdk1.8.0_231\jre\lib\javaws.jar;D:\JAVA\jdk1.8.0_231\jre\lib\jce.jar;D:\JAVA\jdk1.8.0_231\jre\lib\jfr.jar;D:\JAVA\jdk1.8.0_231\jre\lib\jfxswt.jar;D:\JAVA\jdk1.8.0_231\jre\lib\jsse.jar;D:\JAVA\jdk1.8.0_231\jre\lib\management-agent.jar;D:\JAVA\jdk1.8.0_231\jre\lib\plugin.jar;D:\JAVA\jdk1.8.0_231\jre\lib\resources.jar;D:\JAVA\jdk1.8.0_231\jre\lib\rt.jar;E:\NO.Z.80000.Hadoop.spark\SparkBigData\target\classes;C:\Users\Administrator\.m2\repository\org\scala-lang\scala-library\2.12.10\scala-library-2.12.10.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-core_2.12\2.4.5\spark-core_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\com\thoughtworks\paranamer\paranamer\2.8\paranamer-2.8.jar;C:\Users\Administrator\.m2\repository\org\apache\avro\avro\1.8.2\avro-1.8.2.jar;C:\Users\Administrator\.m2\repository\org\codehaus\jackson\jackson-core-asl\1.9.13\jackson-core-asl-1.9.13.jar;C:\Users\Administrator\.m2\repository\org\apache\commons\commons-compress\1.8.1\commons-compress-1.8.1.jar;C:\Users\Administrator\.m2\repository\org\tukaani\xz\1.5\xz-1.5.jar;C:\Users\Administrator\.m2\repository\org\apache\avro\avro-mapred\1.8.2\avro-mapred-1.8.2-hadoop2.jar;C:\Users\Administrator\.m2\repository\org\apache\avro\avro-ipc\1.8.2\avro-ipc-1.8.2.jar;C:\Users\Administrator\.m2\repository\com\twitter\chill_2.12\0.9.3\chill_2.12-0.9.3.jar;C:\Users\Administrator\.m2\repository\com\esotericsoftware\kryo-shaded\4.0.2\kryo-shaded-4.0.2.jar;C:\Users\Administrator\.m2\repository\com\esotericsoftware\minlog\1.3.0\minlog-1.3.0.jar;C:\Users\Administrator\.m2\repository\org\objenesis\objenesis\2.5.1\objenesis-2.5.1.jar;C:\Users\Administrator\.m2\repository\com\twitter\chill-java\0.9.3\chill-java-0.9.3.jar;C:\Users\Administrator\.m2\repository\org\apache\xbean\xbean-asm6-shaded\4.8\xbean-asm6-shaded-4.8.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-client\2.6.5\hadoop-client-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-common\2.6.5\hadoop-common-2.6.5.jar;C:\Users\Administrator\.m2\repository\xmlenc\xmlenc\0.52\xmlenc-0.52.jar;C:\Users\Administrator\.m2\repository\commons-collections\commons-collections\3.2.2\commons-collections-3.2.2.jar;C:\Users\Administrator\.m2\repository\commons-configuration\commons-configuration\1.6\commons-configuration-1.6.jar;C:\Users\Administrator\.m2\repository\commons-digester\commons-digester\1.8\commons-digester-1.8.jar;C:\Users\Administrator\.m2\repository\commons-beanutils\commons-beanutils\1.7.0\commons-beanutils-1.7.0.jar;C:\Users\Administrator\.m2\repository\com\google\code\gson\gson\2.2.4\gson-2.2.4.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-auth\2.6.5\hadoop-auth-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\directory\server\apacheds-kerberos-codec\2.0.0-M15\apacheds-kerberos-codec-2.0.0-M15.jar;C:\Users\Administrator\.m2\repository\org\apache\directory\server\apacheds-i18n\2.0.0-M15\apacheds-i18n-2.0.0-M15.jar;C:\Users\Administrator\.m2\repository\org\apache\directory\api\api-asn1-api\1.0.0-M20\api-asn1-api-1.0.0-M20.jar;C:\Users\Administrator\.m2\repository\org\apache\directory\api\api-util\1.0.0-M20\api-util-1.0.0-M20.jar;C:\Users\Administrator\.m2\repository\org\apache\curator\curator-client\2.6.0\curator-client-2.6.0.jar;C:\Users\Administrator\.m2\repository\org\htrace\htrace-core\3.0.4\htrace-core-3.0.4.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-hdfs\2.6.5\hadoop-hdfs-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\mortbay\jetty\jetty-util\6.1.26\jetty-util-6.1.26.jar;C:\Users\Administrator\.m2\repository\xerces\xercesImpl\2.9.1\xercesImpl-2.9.1.jar;C:\Users\Administrator\.m2\repository\xml-apis\xml-apis\1.3.04\xml-apis-1.3.04.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-app\2.6.5\hadoop-mapreduce-client-app-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-common\2.6.5\hadoop-mapreduce-client-common-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-yarn-client\2.6.5\hadoop-yarn-client-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-yarn-server-common\2.6.5\hadoop-yarn-server-common-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-shuffle\2.6.5\hadoop-mapreduce-client-shuffle-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-yarn-api\2.6.5\hadoop-yarn-api-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-core\2.6.5\hadoop-mapreduce-client-core-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-yarn-common\2.6.5\hadoop-yarn-common-2.6.5.jar;C:\Users\Administrator\.m2\repository\javax\xml\bind\jaxb-api\2.2.2\jaxb-api-2.2.2.jar;C:\Users\Administrator\.m2\repository\javax\xml\stream\stax-api\1.0-2\stax-api-1.0-2.jar;C:\Users\Administrator\.m2\repository\org\codehaus\jackson\jackson-jaxrs\1.9.13\jackson-jaxrs-1.9.13.jar;C:\Users\Administrator\.m2\repository\org\codehaus\jackson\jackson-xc\1.9.13\jackson-xc-1.9.13.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-mapreduce-client-jobclient\2.6.5\hadoop-mapreduce-client-jobclient-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\hadoop\hadoop-annotations\2.6.5\hadoop-annotations-2.6.5.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-launcher_2.12\2.4.5\spark-launcher_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-kvstore_2.12\2.4.5\spark-kvstore_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\fusesource\leveldbjni\leveldbjni-all\1.8\leveldbjni-all-1.8.jar;C:\Users\Administrator\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.6.7\jackson-core-2.6.7.jar;C:\Users\Administrator\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.6.7\jackson-annotations-2.6.7.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-network-common_2.12\2.4.5\spark-network-common_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-network-shuffle_2.12\2.4.5\spark-network-shuffle_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-unsafe_2.12\2.4.5\spark-unsafe_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\javax\activation\activation\1.1.1\activation-1.1.1.jar;C:\Users\Administrator\.m2\repository\org\apache\curator\curator-recipes\2.6.0\curator-recipes-2.6.0.jar;C:\Users\Administrator\.m2\repository\org\apache\curator\curator-framework\2.6.0\curator-framework-2.6.0.jar;C:\Users\Administrator\.m2\repository\com\google\guava\guava\16.0.1\guava-16.0.1.jar;C:\Users\Administrator\.m2\repository\org\apache\zookeeper\zookeeper\3.4.6\zookeeper-3.4.6.jar;C:\Users\Administrator\.m2\repository\javax\servlet\javax.servlet-api\3.1.0\javax.servlet-api-3.1.0.jar;C:\Users\Administrator\.m2\repository\org\apache\commons\commons-lang3\3.5\commons-lang3-3.5.jar;C:\Users\Administrator\.m2\repository\org\apache\commons\commons-math3\3.4.1\commons-math3-3.4.1.jar;C:\Users\Administrator\.m2\repository\com\google\code\findbugs\jsr305\1.3.9\jsr305-1.3.9.jar;C:\Users\Administrator\.m2\repository\org\slf4j\slf4j-api\1.7.16\slf4j-api-1.7.16.jar;C:\Users\Administrator\.m2\repository\org\slf4j\jul-to-slf4j\1.7.16\jul-to-slf4j-1.7.16.jar;C:\Users\Administrator\.m2\repository\org\slf4j\jcl-over-slf4j\1.7.16\jcl-over-slf4j-1.7.16.jar;C:\Users\Administrator\.m2\repository\log4j\log4j\1.2.17\log4j-1.2.17.jar;C:\Users\Administrator\.m2\repository\org\slf4j\slf4j-log4j12\1.7.16\slf4j-log4j12-1.7.16.jar;C:\Users\Administrator\.m2\repository\com\ning\compress-lzf\1.0.3\compress-lzf-1.0.3.jar;C:\Users\Administrator\.m2\repository\org\xerial\snappy\snappy-java\1.1.7.3\snappy-java-1.1.7.3.jar;C:\Users\Administrator\.m2\repository\org\lz4\lz4-java\1.4.0\lz4-java-1.4.0.jar;C:\Users\Administrator\.m2\repository\com\github\luben\zstd-jni\1.3.2-2\zstd-jni-1.3.2-2.jar;C:\Users\Administrator\.m2\repository\org\roaringbitmap\RoaringBitmap\0.7.45\RoaringBitmap-0.7.45.jar;C:\Users\Administrator\.m2\repository\org\roaringbitmap\shims\0.7.45\shims-0.7.45.jar;C:\Users\Administrator\.m2\repository\commons-net\commons-net\3.1\commons-net-3.1.jar;C:\Users\Administrator\.m2\repository\org\json4s\json4s-jackson_2.12\3.5.3\json4s-jackson_2.12-3.5.3.jar;C:\Users\Administrator\.m2\repository\org\json4s\json4s-core_2.12\3.5.3\json4s-core_2.12-3.5.3.jar;C:\Users\Administrator\.m2\repository\org\json4s\json4s-ast_2.12\3.5.3\json4s-ast_2.12-3.5.3.jar;C:\Users\Administrator\.m2\repository\org\json4s\json4s-scalap_2.12\3.5.3\json4s-scalap_2.12-3.5.3.jar;C:\Users\Administrator\.m2\repository\org\scala-lang\modules\scala-xml_2.12\1.0.6\scala-xml_2.12-1.0.6.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\core\jersey-client\2.22.2\jersey-client-2.22.2.jar;C:\Users\Administrator\.m2\repository\javax\ws\rs\javax.ws.rs-api\2.0.1\javax.ws.rs-api-2.0.1.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\hk2-api\2.4.0-b34\hk2-api-2.4.0-b34.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\hk2-utils\2.4.0-b34\hk2-utils-2.4.0-b34.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\external\aopalliance-repackaged\2.4.0-b34\aopalliance-repackaged-2.4.0-b34.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\external\javax.inject\2.4.0-b34\javax.inject-2.4.0-b34.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\hk2-locator\2.4.0-b34\hk2-locator-2.4.0-b34.jar;C:\Users\Administrator\.m2\repository\org\javassist\javassist\3.18.1-GA\javassist-3.18.1-GA.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\core\jersey-common\2.22.2\jersey-common-2.22.2.jar;C:\Users\Administrator\.m2\repository\javax\annotation\javax.annotation-api\1.2\javax.annotation-api-1.2.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\bundles\repackaged\jersey-guava\2.22.2\jersey-guava-2.22.2.jar;C:\Users\Administrator\.m2\repository\org\glassfish\hk2\osgi-resource-locator\1.0.1\osgi-resource-locator-1.0.1.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\core\jersey-server\2.22.2\jersey-server-2.22.2.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\media\jersey-media-jaxb\2.22.2\jersey-media-jaxb-2.22.2.jar;C:\Users\Administrator\.m2\repository\javax\validation\validation-api\1.1.0.Final\validation-api-1.1.0.Final.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet\2.22.2\jersey-container-servlet-2.22.2.jar;C:\Users\Administrator\.m2\repository\org\glassfish\jersey\containers\jersey-container-servlet-core\2.22.2\jersey-container-servlet-core-2.22.2.jar;C:\Users\Administrator\.m2\repository\io\netty\netty-all\4.1.42.Final\netty-all-4.1.42.Final.jar;C:\Users\Administrator\.m2\repository\io\netty\netty\3.9.9.Final\netty-3.9.9.Final.jar;C:\Users\Administrator\.m2\repository\com\clearspring\analytics\stream\2.7.0\stream-2.7.0.jar;C:\Users\Administrator\.m2\repository\io\dropwizard\metrics\metrics-core\3.1.5\metrics-core-3.1.5.jar;C:\Users\Administrator\.m2\repository\io\dropwizard\metrics\metrics-jvm\3.1.5\metrics-jvm-3.1.5.jar;C:\Users\Administrator\.m2\repository\io\dropwizard\metrics\metrics-json\3.1.5\metrics-json-3.1.5.jar;C:\Users\Administrator\.m2\repository\io\dropwizard\metrics\metrics-graphite\3.1.5\metrics-graphite-3.1.5.jar;C:\Users\Administrator\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.6.7.3\jackson-databind-2.6.7.3.jar;C:\Users\Administrator\.m2\repository\com\fasterxml\jackson\module\jackson-module-scala_2.12\2.6.7.1\jackson-module-scala_2.12-2.6.7.1.jar;C:\Users\Administrator\.m2\repository\org\scala-lang\scala-reflect\2.12.1\scala-reflect-2.12.1.jar;C:\Users\Administrator\.m2\repository\com\fasterxml\jackson\module\jackson-module-paranamer\2.7.9\jackson-module-paranamer-2.7.9.jar;C:\Users\Administrator\.m2\repository\org\apache\ivy\ivy\2.4.0\ivy-2.4.0.jar;C:\Users\Administrator\.m2\repository\oro\oro\2.0.8\oro-2.0.8.jar;C:\Users\Administrator\.m2\repository\net\razorvine\pyrolite\4.13\pyrolite-4.13.jar;C:\Users\Administrator\.m2\repository\net\sf\py4j\py4j\0.10.7\py4j-0.10.7.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-tags_2.12\2.4.5\spark-tags_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\apache\commons\commons-crypto\1.0.0\commons-crypto-1.0.0.jar;C:\Users\Administrator\.m2\repository\org\spark-project\spark\unused\1.0.0\unused-1.0.0.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-sql_2.12\2.4.5\spark-sql_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\com\univocity\univocity-parsers\2.7.3\univocity-parsers-2.7.3.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-sketch_2.12\2.4.5\spark-sketch_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-catalyst_2.12\2.4.5\spark-catalyst_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\org\scala-lang\modules\scala-parser-combinators_2.12\1.1.0\scala-parser-combinators_2.12-1.1.0.jar;C:\Users\Administrator\.m2\repository\org\codehaus\janino\janino\3.0.9\janino-3.0.9.jar;C:\Users\Administrator\.m2\repository\org\codehaus\janino\commons-compiler\3.0.9\commons-compiler-3.0.9.jar;C:\Users\Administrator\.m2\repository\org\antlr\antlr4-runtime\4.7\antlr4-runtime-4.7.jar;C:\Users\Administrator\.m2\repository\org\apache\orc\orc-core\1.5.5\orc-core-1.5.5-nohive.jar;C:\Users\Administrator\.m2\repository\org\apache\orc\orc-shims\1.5.5\orc-shims-1.5.5.jar;C:\Users\Administrator\.m2\repository\com\google\protobuf\protobuf-java\2.5.0\protobuf-java-2.5.0.jar;C:\Users\Administrator\.m2\repository\commons-lang\commons-lang\2.6\commons-lang-2.6.jar;C:\Users\Administrator\.m2\repository\io\airlift\aircompressor\0.10\aircompressor-0.10.jar;C:\Users\Administrator\.m2\repository\org\apache\orc\orc-mapreduce\1.5.5\orc-mapreduce-1.5.5-nohive.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-column\1.10.1\parquet-column-1.10.1.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-common\1.10.1\parquet-common-1.10.1.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-encoding\1.10.1\parquet-encoding-1.10.1.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-hadoop\1.10.1\parquet-hadoop-1.10.1.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-format\2.4.0\parquet-format-2.4.0.jar;C:\Users\Administrator\.m2\repository\org\apache\parquet\parquet-jackson\1.10.1\parquet-jackson-1.10.1.jar;C:\Users\Administrator\.m2\repository\org\apache\arrow\arrow-vector\0.10.0\arrow-vector-0.10.0.jar;C:\Users\Administrator\.m2\repository\org\apache\arrow\arrow-format\0.10.0\arrow-format-0.10.0.jar;C:\Users\Administrator\.m2\repository\org\apache\arrow\arrow-memory\0.10.0\arrow-memory-0.10.0.jar;C:\Users\Administrator\.m2\repository\com\carrotsearch\hppc\0.7.2\hppc-0.7.2.jar;C:\Users\Administrator\.m2\repository\com\vlkan\flatbuffers\1.2.0-3f79e055\flatbuffers-1.2.0-3f79e055.jar;C:\Users\Administrator\.m2\repository\joda-time\joda-time\2.9.7\joda-time-2.9.7.jar;C:\Users\Administrator\.m2\repository\mysql\mysql-connector-java\5.1.44\mysql-connector-java-5.1.44.jar;C:\Users\Administrator\.m2\repository\org\apache\spark\spark-hive_2.12\2.4.5\spark-hive_2.12-2.4.5.jar;C:\Users\Administrator\.m2\repository\com\twitter\parquet-hadoop-bundle\1.6.0\parquet-hadoop-bundle-1.6.0.jar;C:\Users\Administrator\.m2\repository\org\spark-project\hive\hive-exec\1.2.1.spark2\hive-exec-1.2.1.spark2.jar;C:\Users\Administrator\.m2\repository\commons-io\commons-io\2.4\commons-io-2.4.jar;C:\Users\Administrator\.m2\repository\javolution\javolution\5.5.1\javolution-5.5.1.jar;C:\Users\Administrator\.m2\repository\log4j\apache-log4j-extras\1.2.17\apache-log4j-extras-1.2.17.jar;C:\Users\Administrator\.m2\repository\org\antlr\antlr-runtime\3.4\antlr-runtime-3.4.jar;C:\Users\Administrator\.m2\repository\org\antlr\stringtemplate\3.2.1\stringtemplate-3.2.1.jar;C:\Users\Administrator\.m2\repository\antlr\antlr\2.7.7\antlr-2.7.7.jar;C:\Users\Administrator\.m2\repository\org\antlr\ST4\4.0.4\ST4-4.0.4.jar;C:\Users\Administrator\.m2\repository\com\googlecode\javaewah\JavaEWAH\0.3.2\JavaEWAH-0.3.2.jar;C:\Users\Administrator\.m2\repository\org\iq80\snappy\snappy\0.2\snappy-0.2.jar;C:\Users\Administrator\.m2\repository\stax\stax-api\1.0.1\stax-api-1.0.1.jar;C:\Users\Administrator\.m2\repository\net\sf\opencsv\opencsv\2.3\opencsv-2.3.jar;C:\Users\Administrator\.m2\repository\org\spark-project\hive\hive-metastore\1.2.1.spark2\hive-metastore-1.2.1.spark2.jar;C:\Users\Administrator\.m2\repository\com\jolbox\bonecp\0.8.0.RELEASE\bonecp-0.8.0.RELEASE.jar;C:\Users\Administrator\.m2\repository\commons-cli\commons-cli\1.2\commons-cli-1.2.jar;C:\Users\Administrator\.m2\repository\commons-logging\commons-logging\1.1.3\commons-logging-1.1.3.jar;C:\Users\Administrator\.m2\repository\org\datanucleus\datanucleus-api-jdo\3.2.6\datanucleus-api-jdo-3.2.6.jar;C:\Users\Administrator\.m2\repository\org\datanucleus\datanucleus-rdbms\3.2.9\datanucleus-rdbms-3.2.9.jar;C:\Users\Administrator\.m2\repository\commons-pool\commons-pool\1.5.4\commons-pool-1.5.4.jar;C:\Users\Administrator\.m2\repository\commons-dbcp\commons-dbcp\1.4\commons-dbcp-1.4.jar;C:\Users\Administrator\.m2\repository\javax\jdo\jdo-api\3.0.1\jdo-api-3.0.1.jar;C:\Users\Administrator\.m2\repository\javax\transaction\jta\1.1\jta-1.1.jar;C:\Users\Administrator\.m2\repository\commons-httpclient\commons-httpclient\3.1\commons-httpclient-3.1.jar;C:\Users\Administrator\.m2\repository\org\apache\calcite\calcite-avatica\1.2.0-incubating\calcite-avatica-1.2.0-incubating.jar;C:\Users\Administrator\.m2\repository\org\apache\calcite\calcite-core\1.2.0-incubating\calcite-core-1.2.0-incubating.jar;C:\Users\Administrator\.m2\repository\org\apache\calcite\calcite-linq4j\1.2.0-incubating\calcite-linq4j-1.2.0-incubating.jar;C:\Users\Administrator\.m2\repository\net\hydromatic\eigenbase-properties\1.1.5\eigenbase-properties-1.1.5.jar;C:\Users\Administrator\.m2\repository\org\apache\httpcomponents\httpclient\4.5.6\httpclient-4.5.6.jar;C:\Users\Administrator\.m2\repository\org\apache\httpcomponents\httpcore\4.4.10\httpcore-4.4.10.jar;C:\Users\Administrator\.m2\repository\org\codehaus\jackson\jackson-mapper-asl\1.9.13\jackson-mapper-asl-1.9.13.jar;C:\Users\Administrator\.m2\repository\commons-codec\commons-codec\1.10\commons-codec-1.10.jar;C:\Users\Administrator\.m2\repository\org\jodd\jodd-core\3.5.2\jodd-core-3.5.2.jar;C:\Users\Administrator\.m2\repository\org\datanucleus\datanucleus-core\3.2.10\datanucleus-core-3.2.10.jar;C:\Users\Administrator\.m2\repository\org\apache\thrift\libthrift\0.9.3\libthrift-0.9.3.jar;C:\Users\Administrator\.m2\repository\org\apache\thrift\libfb303\0.9.3\libfb303-0.9.3.jar;C:\Users\Administrator\.m2\repository\org\apache\derby\derby\10.12.1.1\derby-10.12.1.1.jar cn.yanqi.sparksql.Demo1
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
root
|-- _c0: string (nullable = true)
|-- _c1: string (nullable = true)
|-- _c2: string (nullable = true)
+-----+---+---------+
| _c0|_c1| _c2|
+-----+---+---------+
| name|age| job|
|Jorge| 30|Developer|
| Bob| 32|Developer|
+-----+---+---------+
root
|-- _c0: string (nullable = true)
+------------------+
| _c0|
+------------------+
| name;age;job|
|Jorge;30;Developer|
| Bob;32;Developer|
+------------------+
root
|-- name: string (nullable = true)
|-- age: integer (nullable = true)
|-- job: string (nullable = true)
+-----+---+---------+
| name|age| job|
+-----+---+---------+
|Jorge| 30|Developer|
| Bob| 32|Developer|
+-----+---+---------+
root
|-- name: string (nullable = true)
|-- age: integer (nullable = true)
|-- job: string (nullable = true)
+-----+---+---------+
| name|age| job|
+-----+---+---------+
|Jorge| 30|Developer|
| Bob| 32|Developer|
+-----+---+---------+
Process finished with exit code 0
Walter Savage Landor:strove with none,for none was worth my strife.Nature I loved and, next to Nature, Art:I warm'd both hands before the fire of life.It sinks, and I am ready to depart
——W.S.Landor