Spark读取文件统计行数

 

package sy3

import org.apache.spark.SparkContext
import org.apache.spark.SparkConf

object tjhs {
  def main(args: Array[String]) {
    val logFile = "E:/IntelliJ IDEA 2019.3.3/WorkSpace/MyScala/src/main/scala/sy3/test1"
    //val logFile = "hdfs://192.168.249.128:9000/mapreduce/input1/in1.txt"
    val conf = new SparkConf().setAppName("Simple Application")
    val sc = new SparkContext(conf)
    val logData = sc.textFile(logFile, 2)
    val num = logData.count()
    println("这个文件有 %d 行!".format(num))
  }
}

 

posted @ 2021-01-07 15:38  .HAHA  阅读(1621)  评论(0编辑  收藏  举报