sparksql load/save

java

 1 public class LoadAndSaveDemo {
 2     private static SparkConf conf = new SparkConf().setAppName("loadandsavedemo").setMaster("local");
 3     private static JavaSparkContext jsc = new JavaSparkContext(conf);
 4     private static SparkSession session = new SparkSession(jsc.sc());
 5 
 6     public static void main(String[] args) {
 7 
 8         // 读取,不指定数据类型则默认是parquet
 9         Dataset<Row> dataset = session.read().load("./src/main/java/cn/tele/spark_sql/dataset/users.parquet");
10 
11         dataset.show();
12 
13         dataset.createOrReplaceTempView("users");
14 
15         // 保存,指定目录,不指定则默认保存的类型是parquet
16         // session.sql("select * from
17         // users").write().save("./src/main/java/cn/tele/spark_sql/dataset/users");
18 
19         // 或者
20         // dataset.select(dataset.col("")).write().save("");
21 
22         // 读取json
23         Dataset<Row> json = session.read().format("json")
24                 .load("./src/main/java/cn/tele/spark_sql/dataset/students.json");
25         json.show();
26 
27 //        json.select(json.col("name")).write().format("json").save("./src/main/java/cn/tele/spark_sql/dataset/students");
28 
29         // 指定savemodel
30         json.select(json.col("name")).write().format("json").mode(SaveMode.Append)
31                 .save("./src/main/java/cn/tele/spark_sql/dataset/students");
32 
33         session.stop();
34         jsc.close();
35     }
36 }

scala

 1 object LoadAndSaveDemo {
 2   def main(args: Array[String]): Unit = {
 3     val conf = new SparkConf().setAppName("loadandsavedemo").setMaster("local")
 4     val sc = new SparkContext(conf)
 5     val sqlContext = new SQLContext(sc)
 6 
 7     /* val df = sqlContext.read.load("./src/main/scala/cn/tele/spark_sql/dataframe/users.parquet")
 8     df.show()
 9 
10     df.write.save("./src/main/scala/cn/tele/spark_sql/dataframe/users")*/
11 
12     //读取指定格式
13     val df = sqlContext.read.format("json").load("./src/main/scala/cn/tele/spark_sql/dataframe/students.json")
14     df.write.format("json").mode(SaveMode.Append).save("./src/main/scala/cn/tele/spark_sql/dataframe/students")
15   }
16 }

 

posted @ 2019-02-14 22:08  tele  阅读(392)  评论(0编辑  收藏  举报