spark 分析作者发布文章的总阅读量

 

import org.apache.spark.SparkConf
import org.apache.spark.SparkContext
import org.apache.spark.sql.catalyst.expressions.Second
import javafx.scene.shape.Line
import org.apache.spark.SparkConf
import org.dmg.pmml.True
import org.apache.spark.SparkConf
import scala.util.matching.Regex
import java.io.FileNotFoundException
import breeze.io.TextReader.FileReader
import org.apache.spark.SparkConf
import org.apache.commons.math3.geometry.euclidean.twod.Line
import scala.tools.nsc.doc.model.Val

object spark_test {
  def main(args: Array[String]) {

    var conf = new SparkConf().setAppName("name").setMaster("local");
    val sc = new SparkContext(conf);
    var line = sc.textFile("/Users/lihu/Desktop/crawle/tap.txt", 1);
    val reduceData = line.map(line => (line.split(" ")(1), Integer.parseInt(
                                String.valueOf(line.split(" ")(2))))).reduceByKey(_+_).collect.foreach(println _);

    sc.stop()
  }
}

 

posted on 2017-02-05 15:41  sunyaxue  阅读(238)  评论(0编辑  收藏  举报

导航