spark实例1---WordCount

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}
object WordCount extends App {
private val conf = new SparkConf()
conf.setMaster("local").setAppName("wc")
private val sc: SparkContext = new SparkContext(conf)
private val line: RDD[String] = sc.textFile("data/wc.txt")
private val word: RDD[String] = line.flatMap(x => x.split(" "))
private val wordAnd1: RDD[(String, Int)] = word.map(x => (x, 1))
private val result: RDD[(String, Int)] = wordAnd1.reduceByKey((x, y) => x + y)
result.foreach(println)
sc.stop()
}
posted @   jsqup  阅读(25)  评论(0编辑  收藏  举报
相关博文:
阅读排行:
· TypeScript + Deepseek 打造卜卦网站:技术与玄学的结合
· Manus的开源复刻OpenManus初探
· AI 智能体引爆开源社区「GitHub 热点速览」
· 三行代码完成国际化适配,妙~啊~
· .NET Core 中如何实现缓存的预热?
点击右上角即可分享
微信分享提示