把hbase数据导出到集群上

package com.asp;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
public class Demo6 {
	public static Configuration conf = HBaseConfiguration.create();
	static {
		Configuration HBASE_CONFIG = new Configuration();
		HBASE_CONFIG.set("hbase.zookeeper.quorum",
				"BJ-YZ-103R-63-38,BJ-YZ-103R-63-39,BJ-YZ-103R-63-40");
		HBASE_CONFIG.set("hbase.zookeeper.property.clientPort", "2181");
		conf = HBaseConfiguration.create(HBASE_CONFIG);
	}
	public static class THMapper extends TableMapper<Text, Text> {
	
		private Text text = new Text();
		public void map(ImmutableBytesWritable row, Result value,
				Context context) {
			String rows = new String(row.get());
			byte[] t1=	value.getValue(Bytes.toBytes("c1"),Bytes.toBytes("name"));
			if (t1==null){
				t1=Bytes.toBytes("");
			}
			String s1=new String(t1);
			byte[] t2=	value.getValue(Bytes.toBytes("c1"),Bytes.toBytes("age"));
			if (t2==null){
				t2=Bytes.toBytes("");
			}
			String s2=new String(t2);
			StringBuffer sb=new StringBuffer();
			sb.append(s1).append("@_@"+s2);
			StringBuffer sb1=new StringBuffer();
			sb1.append(rows+"@_@");
			text.set(sb.toString());
			try {//context.write(new  Text(NullWritable.get().toString()), text);
				//context.write(NullWritable.get(), text);
				//context.write(new Text().set(sb1.toString()) , text);
			   context.write(new Text(sb1.toString()) , text);
			} catch (IOException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			} catch (InterruptedException e) {
				// TODO Auto-generated catch block
				e.printStackTrace();
			}
		}
	}
	public static Job createSubmittableJob(Configuration conf)
			throws IOException {
		Job job = new Job(conf, "hello");
		job.setJarByClass(Demo6.class);
		Path out = new Path("/tmp/test/");
		job.setOutputFormatClass(TextOutputFormat.class);
		FileOutputFormat.setOutputPath(job, out);
		job.setMapperClass(THMapper.class);
		Scan scan = new Scan();
		TableMapReduceUtil.initTableMapperJob("test", scan, THMapper.class, Text.class, Text.class, job);
		 try {
			job.waitForCompletion(true);
		} catch (InterruptedException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (ClassNotFoundException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return job;
	}
	public static void main(String[] args) throws Exception {
		Job job = createSubmittableJob(conf);
		System.exit(job.waitForCompletion(true) ? 0 : 1);
	}
}

  

posted @ 2014-04-22 17:17  沙漠里的小鱼  阅读(227)  评论(0编辑  收藏  举报