sparkshell中执行wordcount
-- 第一步 启动spark shell
cd $SPARK_HOME bin/spark-shell --master spark://master:7077 --executor-memory 512M
-- 第二步 执行语句
scala> val textFile = sc.textFile("file:///home/hadoop/mooc_data/wc.txt")
scala> val counts = textFile.flatMap(line => line.split(" ")).map(word => (word, 1)).reduceByKey(_ + _)
scala> counts.saveAsTextFile("hdfs://master:8020/mooc_data")