Hadoop.2.x_WebUV示例

一、网站基本指标(即针对于网站用户行为而产生的日志中进行统计分析)

1. PV:网页浏览量(Page View页面浏览次数,只要进入该网页就产生一条记录,不限IP,统计点每天(较多)/每周/每月/..)
2. UV:独立访客数(Unique Vistor,以Cookie为依据,同一天内一个用户多次访问,只记为一个)
3. VV:访客的访问次数(Visit View,以Session为依据,访客访问网站到关掉该网站所有页面即记为一次访问)
4. IP:独立IP数(即记录不同IP,同一IP访问多次算作一次)
5. 通常网站流量(traffic)是指网站的访问量,是用来描述访问一个网站的用户数量以及用户所浏览的网页数量等指标
   对于虚拟空间商来说流量是指:用户在访问网站过程中,产生的数据量大小

二、UV统计示例(也就是每天每个省份有多少人访问了该网站)

1. 分析需求
    1> 我们得到的是怎样的数据,找出共同点,会议map,shuffle,reduce都做了什么事
    2> 我们想要怎样的数据,列举出来
2. 实施计划注意的地方
    1> 数据以什么分隔,是否我们需要自定义数据类型
    2> 大致上我们需要过滤掉无效的记录
       使用自定义数据类型将我们需要的字段组合起来
       然后根据省份对记录累加(去重阶段)
    3> 数据类型可以不定义,使用Text将字段值组合即可
       然后转到reduce方法,建立hashMap对于数据根据时间和省份递增存储
       在cleanup中组合成想要的输出数据即可

三、UV统计代码示例

WebUvMr.java
============
package com.bigdata_senior.WebUvMr;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;

import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.NullWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class WebUvMr {

	//Mapper Class
	private static class WordCountMapper extends Mapper<LongWritable, Text, Text, NullWritable>{

		private Text mapOutKey = new Text();
		@Override
		public void map(LongWritable key, Text value, Context context)
				throws IOException, InterruptedException {
			
			String lineValue = value.toString();
			String [] strValue = lineValue.split("\t");
			if(30 > strValue.length){
				return;
			}
			String guidIdValue = strValue[5];
			if(StringUtils.isBlank(guidIdValue)){
				return;
			}
			String trackTimeValue = strValue[17];
			if(StringUtils.isBlank(trackTimeValue)){
				return;
			}
			String dateValue = trackTimeValue.substring(0,10);
			Integer proviceIdValue = Integer.MAX_VALUE;
			try{
				if(StringUtils.isBlank(strValue[23])){
					return;
				}
				proviceIdValue = Integer.valueOf(strValue[23]);
			}catch(Exception e){
				return;
			}
		
			mapOutKey.set(dateValue+"\t"+proviceIdValue+"_"+guidIdValue);
			//System.out.println("key--> "+mapOutKey+" value--> "+NullWritable.get());
			context.write(mapOutKey, NullWritable.get());
		}
	}
	
	//Reduce Class
	private static class WordCountReduce extends Reducer<Text, NullWritable, Text, LongWritable>{

		private Map<String,Integer> dateMap;
		private Text outputKey = new Text();
		private LongWritable outputvalue = new LongWritable();

		@Override
		protected void setup(Context context) throws IOException,
				InterruptedException {
			dateMap = new HashMap<String,Integer>();
		}

		@Override
		public void reduce(Text key, Iterable<NullWritable> values,Context context)
				throws IOException, InterruptedException {
			
			String date = key.toString().split("_")[0];
			if(dateMap.containsKey(date)){
				
				Integer preUv = dateMap.get(date);
				//System.out.println("====-> "+preUv);
				Integer uv = preUv + 1;
				dateMap.put(date, uv);
			}else{
				dateMap.put(date, 1);
			}
			//System.out.println(dateMap.toString());
		}
		
		@Override
		protected void cleanup(Context context) throws IOException,
				InterruptedException {
			
			Set<String> dateSet = dateMap.keySet();
			//System.out.println(dateSet.toString());
			for(String date : dateSet){
				Integer uv = dateMap.get(date);
				outputKey.set(date);
				outputvalue.set(uv);
				System.out.println("result:-->key "+outputKey+" value-->"+outputvalue);
				context.write(outputKey, outputvalue);
			}
		}
	}
	
	//Driver
	public int run(String[] args) throws Exception {
		
		Configuration configuration = new Configuration();
		Job job = Job.getInstance(configuration, this.getClass().getSimpleName());
		job.setJarByClass(this.getClass());
		
		//input
		Path inPath = new Path(args[0]);
		FileInputFormat.addInputPath(job,inPath);
		
		//output
		Path outPath = new Path(args[1]);
		FileOutputFormat.setOutputPath(job, outPath);
		
		//mapper
		job.setMapperClass(WordCountMapper.class);
		job.setMapOutputKeyClass(Text.class);
		job.setMapOutputValueClass(NullWritable.class);
		
		//Reduce
		job.setReducerClass(WordCountReduce.class);
		job.setOutputKeyClass(Text.class);
		job.setOutputValueClass(LongWritable.class);
		
		//submit job
		boolean isSuccess = job.waitForCompletion(true);
		
		return isSuccess ? 0 : 1;
	}
	
	public static void main(String[] args) throws Exception {
		
		args = new String[]{
			"hdfs://hadoop09-linux-01.ibeifeng.com:8020/user/liuwl/tmp/webuv/input",
			"hdfs://hadoop09-linux-01.ibeifeng.com:8020/user/liuwl/tmp/webuv/output4"
		};
		//run job
		int status = new WebUvMr().run(args);
		System.exit(status);
	}
}
posted @ 2016-10-28 18:48  eRrsr  阅读(389)  评论(0编辑  收藏  举报