spark实例1---WordCount
import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object WordCount extends App {
private val conf = new SparkConf()
conf.setMaster("local").setAppName("wc")
private val sc: SparkContext = new SparkContext(conf)
private val line: RDD[String] = sc.textFile("data/wc.txt")
private val word: RDD[String] = line.flatMap(x => x.split(" "))
private val wordAnd1: RDD[(String, Int)] = word.map(x => (x, 1))
private val result: RDD[(String, Int)] = wordAnd1.reduceByKey((x, y) => x + y)
result.foreach(println)
sc.stop()
}
本文来自博客园,作者:jsqup,转载请注明原文链接:https://www.cnblogs.com/jsqup/p/16123610.html