SparkSQL demo

1、数据样本:data1.txt

xiaoming,25,chengdu
xiaohua,23,beijing
liuyang,16,hangzhou
xiaoqiang,19,zhejiang

2、demo例子

package com.test.sparksql

import org.apache.spark.sql.SQLContext
import org.apache.spark.{SparkConf, SparkContext}

/**
  * Created by admin on 2017/12/26-16:45.
  * Description : 
  */
object SparkSqlApplication {

  case class Person(name:String, age:Int, addr:String)

  def main(args: Array[String]): Unit = {
    val conf = new SparkConf().setAppName("Spark Sql Test").setMaster("local")
    val sc = new SparkContext(conf)
    val sqlContext = new SQLContext(sc)

    import sqlContext._
    import sqlContext.implicits._

    val people = sc.textFile("D:\\SparkSqlDataTest\\data1.txt").map(_.split(",")).map( p => Person(p(0),p(1).trim.toInt,p(2))).toDF()

    people.registerTempTable("people")

    val teenagers = sql("SELECT name, age, addr FROM people ORDER BY age")

    teenagers.map( t => "name:" + t(0) + " age:" + t(1) + " addr:" + t(2)).collect().foreach(println)

    sc.stop();
  }
}

 

3、输出结果

name:liuyang age:16 addr:hangzhou
name:xiaoqiang age:19 addr:zhejiang
name:xiaohua age:23 addr:beijing
name:xiaoming age:25 addr:chengdu

 

posted @ 2017-12-26 18:46  麻雀虽小五脏俱全  阅读(1813)  评论(0编辑  收藏  举报