spark练习题
site1,user1,2016-11-20 02:12:22 site1,user2,2016-11-28 04:12:22 site1,user3,2016-11-20 11:12:22 site1,user3,2016-11-23 11:12:22 site2,user4,2016-11-20 15:12:22 site3,user5,2016-11-29 08:12:22 site3,user6,2016-11-22 08:12:22 site4,user7,2016-11-20 10:12:22 site4,user7,2016-11-24 11:12:22
现在要对最近7天的日志进行统计,统计结果格式如下,key(date(日期),hour(时间),site(网站))
value:(pv (访问次数),uv(独立访问人数,相同的访客id去重))
统计结果需要存至Hbase,使用spark写出伪代码
object tst { def main(args: Array[String]): Unit = { val conf = new SparkConf().setAppName("test").setMaster("local[3]") val sc = new SparkContext(conf); val curentDay = "2016-11-20" val format = "yyyy-MM-dd" val sdf = new SimpleDateFormat(format) val rdd = sc.textFile("D:\\test\\spark\\笔试题\\siteuser.txt").map(t=>{ val fileds = t.split(",") val date = fileds(2).split(" ")(0); val hour = fileds(2).split(" ")(1).substring(0,2); //site1,user1,2016-11-20 02 (fileds(0),fileds(1),date,hour) }).filter(t=>{ TimeUtils.intervalDays(curentDay,t._3,sdf,sdf) < 7 }).cache()//如果不cache,在计算uvrdd时会重复计算之前的操作 val pvrdd = rdd.groupBy(t=>(t._3,t._4,t._1)).map(t=>(t._1,t._2.size)) println(pvrdd.collect().toBuffer) val uvrdd = rdd.groupBy(t=>t).map(t=>((t._1._3,t._1._4,t._1._1),t._2.size)).reduceByKey(_+_); print(uvrdd.collect().toBuffer) sc.stop() } }
学好计算机,走遍天下都不怕