package com.bnls.test.parse
import org.apache.spark.{SparkConf, SparkContext}
import org.apache.spark.sql.hive.HiveContext
import org.apache.spark.streaming.{Seconds, StreamingContext}
object ParseHbase {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setMaster("local[2]").setAppName("parsehabse")
.set("spark.serilizer", "org.apache.spark.serializer.KryoSerializer")
.set("spark.rdd.compress", "true")
val sc = new SparkContext(sparkConf)
// val v = 1.to(10,2)
for (v <- 1.to(10,2)){
println(v)
}
/*val hiveContext = new HiveContext(sc)
val ssc = new StreamingContext(sc, Seconds(60))
val sql = "select * from ods_hbase_salesdb.ec_sales_pay_detail"
val resultDataFram = hiveContext.sql(sql)
//resultDataFram.show()
resultDataFram.rdd.foreach(row=>{
val a = row.getString(1)
println(a)
})
ssc.start()
ssc.awaitTermination()*/
}
}