【Hbase学习之五】HBase MapReduce

环境
  虚拟机:VMware 10
  Linux版本:CentOS-6.5-x86_64
  客户端:Xshell4
  FTP:Xftp4
  jdk8
  hadoop-2.6.5
  hbase-0.98.12.1-hadoop2

 

package wc;

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

public class WCMapper extends Mapper<LongWritable, Text, Text, IntWritable> {

    @Override
    protected void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {
        String[] strs = value.toString().split(" ");
        for (String string : strs) {
            context.write(new Text(string), new IntWritable(1));
        }
    }
}

 

package wc;

import java.io.IOException;

import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableReducer;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;

public class WCReducer extends TableReducer<Text, IntWritable, ImmutableBytesWritable> {

    @Override
    protected void reduce(Text text, Iterable<IntWritable> iterable, Context context)
            throws IOException, InterruptedException {

        int sum = 0;
        for (IntWritable it : iterable) {
            sum += it.get();
        }
        //将mr结果输出到HBase
        Put put = new Put(text.toString().getBytes());
        put.add("cf".getBytes(), "ct".getBytes(), (sum + "").getBytes());
        context.write(null, put);

    }
}

 

package wc;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;

public class WCRunner {

    public static void main(String[] args) throws Exception {

        Configuration conf = new Configuration();
        conf.set("fs.defaultFS", "hdfs://node1:8020");
        conf.set("hbase.zookeeper.quorum", "node1,node2,node3");
        Job job = Job.getInstance(conf);
        job.setJarByClass(WCRunner.class);

        // 指定mapper 和 reducer
        job.setMapperClass(WCMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        // 最后一个参数设置false
        // adddependecyjars=false  表示不将jar上传分布式集群 即 本地执行
        TableMapReduceUtil.initTableReducerJob("wc", WCReducer.class, job, null, null, null, null, false);
        FileInputFormat.addInputPath(job, new Path("/user/hive/warehouse/wc/"));
        job.waitForCompletion(true);
    }
}

 

posted @ 2019-03-01 17:52  cac2020  阅读(226)  评论(0编辑  收藏  举报