SparkSQL

package com.bnls.test.parse
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming.{Seconds, StreamingContext}

object ParseSQL {
def main(args: Array[String]): Unit = {

val sparkConf = new SparkConf().setMaster("local[2]").setAppName("parsehabse")
.set("spark.serilizer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.rdd.compress", "true")
val sc = new SparkContext(sparkConf)
val hiveContext = new HiveContext(sc)
val ssc = new StreamingContext(sc, Seconds(60))
val sql = "select count(1) as rn from ods_hbase_salesdb.ec_sales_pay_detail"
val resultDataFram = hiveContext.sql(sql)
resultDataFram.show()
resultDataFram.rdd.foreach(row=>{
val a = row.getString(1)
println(a)
})
ssc.start()
ssc.awaitTermination()
}
}
posted @ 2018-12-04 15:20  何国秀_xue  阅读(139)  评论(0编辑  收藏  举报