import org.apache.spark.streaming._
import org.apache.spark.SparkConf
import org.apache.spark.sql.SparkSession
val conf = new SparkConf().setAppName("WordCountOnLine").setMaster("local[3]")
val ssc = new StreamingContext(conf, Seconds(5))
// 监听端口,接收流数据
// 可以在新开窗口中执行命令:nc -lk 9999
// 再不断输入数据
val receiverInputStream = ssc.socketTextStream("spark1", 9999)
// 将接收到的数据拆分
val words = receiverInputStream.flatMap(_.split(" "))
val pairs = words.map(word => (word, 1))
// 计数
val word_count = pairs.reduceByKey(_ + _)
// 打印
word_count.print()
// 启动
ssc.start()
// 等待
ssc.awaitTermination()