4-Spark学习笔记4

  • SparkCore-WordCount
package com.lotuslaw.spark.core.wc

import org.apache.spark.rdd.RDD
import org.apache.spark.{SparkConf, SparkContext}

import scala.collection.mutable

/**
 * @author: lotuslaw
 * @version: V1.0
 * @package: com.lotuslaw.spark.core.wc
 * @create: 2021-12-02 10:08
 * @description:
 */
object Spark01_WordCount2 {

  def main(args: Array[String]): Unit = {
    // 创建Spark运行配置对象
    val sparkConf = new SparkConf().setMaster("local[*]").setAppName("WordCount")

    // 创建Spark上下文环境对象(连接对象)
    val sc = new SparkContext(sparkConf)

    wordCount1(sc).collect().foreach(println)

    // 关闭连接
    sc.stop()
  }

  // groupBy
  def wordCount1(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val group = words.groupBy(word => word)
    val wordCount = group.mapValues(iter => iter.size)
    wordCount
  }

  // groupByKey
  def wordcount2(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val group = wordOne.groupByKey()
    val wordCount = group.mapValues(iter => iter.size)
    wordCount
  }

  // reduceByKey
  def wordCount3(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.reduceByKey(_ + _)
    wordCount
  }

  // aggregateByKey
  def wordCount4(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.aggregateByKey(0)(_ + _, _ + _)
    wordCount
  }

  // foldByKey
  def wordCount5(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne.foldByKey(0)(_ + _)
    wordCount
  }

  // combineByKey
  def wordCount6(sc: SparkContext): RDD[(String, Int)] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val wordCount = wordOne combineByKey(
      v => v,
      (x:Int, y) => x + y,
      (x:Int, y:Int) => x + y
    )
    wordCount
  }

  // countByKey
  def wordCount7(sc: SparkContext): collection.Map[String, Long] = {
    val rdd = sc.makeRDD(List("Hello Spark", "Hello Scala"))
    val words = rdd.flatMap(_.split(" "))
    val wordOne = words.map((_, 1))
    val WordCount = wordOne.countByKey()
    WordCount
  }

  // countByValue
  def wordCount8(sc: SparkContext): collection.Map[String, Long] = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))
    val wordCount = words.countByValue()
    wordCount
  }

  // reduce, aggregate, fold
  def wordCount91011(sc: SparkContext): Unit = {
    val rdd = sc.makeRDD(List("Hello Scala", "Hello Spark"))
    val words = rdd.flatMap(_.split(" "))

    val mapWord = words.map(
      word => {
        mutable.Map[String, Long]((word, 1))
      }
    )

    mapWord.reduce(
      (map1, map2) => {
        map2.foreach{
          case (word, count) => {
            val newCount = map1.getOrElse(word, 0L) + count
            map1.update(word, newCount)
          }
        }
        map1
      }
    )
  }

}

作者:lotuslaw

出处:https://www.cnblogs.com/lotuslaw/p/15640169.html

版权:本作品采用「署名-非商业性使用-相同方式共享 4.0 国际」许可协议进行许可。

posted @   lotuslaw  阅读(23)  评论(0编辑  收藏  举报
编辑推荐:
· 开发者必知的日志记录最佳实践
· SQL Server 2025 AI相关能力初探
· Linux系列:如何用 C#调用 C方法造成内存泄露
· AI与.NET技术实操系列(二):开始使用ML.NET
· 记一次.NET内存居高不下排查解决与启示
阅读排行:
· 阿里最新开源QwQ-32B,效果媲美deepseek-r1满血版,部署成本又又又降低了!
· 开源Multi-agent AI智能体框架aevatar.ai,欢迎大家贡献代码
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
more_horiz
keyboard_arrow_up light_mode palette
选择主题
点击右上角即可分享
微信分享提示