导航

搭建sparksql的hive测试环境

Posted on 2016-03-31 15:59  ggzone  阅读(145)  评论(0编辑  收藏  举报

sbt依赖

name := "Pi"
version := "1.0"
scalaVersion := "2.10.6"


libraryDependencies++= Seq(
  "org.apache.spark" %% "spark-core" % "1.5.2",
  "org.apache.spark" % "spark-hive_2.10" % "1.5.2",
  "joda-time" % "joda-time" % "2.9.2"
)

resolvers+="OS China" at "http://maven.oschina.net/content/groups/public/"
import org.apache.spark._
import org.apache.spark.sql.SQLContext
import org.apache.spark.sql.hive.HiveContext


/**
  * Created by code-pc on 16/3/14.
  */
object Pi {


  def main(args: Array[String]) {

    val conf = new SparkConf().setMaster("local[5]").setAppName("AndrzejApp")
    val sc=new SparkContext(conf)

    val hqlc=new HiveContext(sc)
    val st=hqlc.sql("show tables")
    println("hello")
    st.collect().foreach(println)
  }
}

这里写图片描述