wqy1027

eeee

 

Spark中RDD对DF的转换

SparkRDDToDF

package com.sql

import org.apache.spark.rdd.RDD
import org.apache.spark.sql.{DataFrame, Row, SparkSession}

object Demo06RDDtoDF {
  def main(args: Array[String]): Unit = {

    val spark: SparkSession = SparkSession
      .builder()
      .appName("Demo06RDDtoDF")
      .master("local")
      .config("spark.sql.shuffle.partitions", 2)
      .getOrCreate()

    import spark.implicits._

    val stuRDD: RDD[String] = spark.sparkContext.textFile("bigdata19-spark/data/students.txt")

    // RDD to DataFrame
    // 1、手动指定列名
    val stuRddToDF: DataFrame = stuRDD.map(line => {
      val splits: Array[String] = line.split(",")
      (splits(0), splits(1), splits(2).toInt, splits(3), splits(4))
    }).toDF("id", "name", "age", "gender", "clazz")

    stuRddToDF.show()

    //第2种,使用样例类
    val stuRddToDF2: DataFrame = stuRDD.map(line => {
      val strings: Array[String] = line.split(",")
      StuRDDToDF(strings(0), strings(1), strings(2).toInt, strings(3), strings(4))
    }).toDF()
    stuRddToDF2.show()

    // DF to RDD
    // 直接调用.rdd方法即可得到一个 每一条数据都是Row对象的RDD
    val rdd: RDD[Row] = stuRddToDF.rdd
  }

}
case class StuRDDToDF(id:String,name:String,age:Int,gender:String,clazz:String)

 

posted on 2022-10-28 19:54  不想写代码的小玉  阅读(173)  评论(0编辑  收藏  举报

导航