MapReduce-从HBase读取数据处理后再写入HBase
MapReduce-从HBase读取处理后再写入HBase
代码如下
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 | package com.hbase.mapreduce; import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.mapreduce.TableInputFormat; import org.apache.hadoop.hbase.mapreduce.TableMapper; import org.apache.hadoop.hbase.mapreduce.TableOutputFormat; import org.apache.hadoop.hbase.mapreduce.TableReducer; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.ToolRunner; /** * @author:FengZhen * @create:2018年9月17日 * 从HBase读写入HBase * zip -d HBaseToHBase.jar 'META-INF/.SF' 'META-INF/.RSA' 'META-INF/*SF' */ public class HBaseToHBase extends Configured implements Tool{ private static String addr= "HDP233,HDP232,HDP231" ; private static String port= "2181" ; public enum Counters { ROWS, COLS, VALID, ERROR, EMPTY, NOT_EMPTY} static class ParseMapper extends TableMapper<ImmutableBytesWritable, Put>{ private byte [] columnFamily = null ; @Override protected void setup(Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Put>.Context context) throws IOException, InterruptedException { columnFamily = Bytes.toBytes(context.getConfiguration(). get ( "conf.columnfamily" )); } @Override protected void map(ImmutableBytesWritable key, Result value, Mapper<ImmutableBytesWritable, Result, ImmutableBytesWritable, Put>.Context context) throws IOException, InterruptedException { context.getCounter(Counters.ROWS).increment(1); String hbaseValue = null ; Put put = new Put(key. get ()); for (Cell cell : value.listCells()) { context.getCounter(Counters.COLS).increment(1); hbaseValue = Bytes.toString(CellUtil.cloneValue(cell)); if (hbaseValue.length() > 0) { String top = hbaseValue.substring(0, hbaseValue.length()/2); String detail = hbaseValue.substring(hbaseValue.length()/2, hbaseValue.length() - 1); put.addColumn(columnFamily, Bytes.toBytes( "top" ), Bytes.toBytes(top)); put.addColumn(columnFamily, Bytes.toBytes( "detail" ), Bytes.toBytes(detail)); context.getCounter(Counters.NOT_EMPTY).increment(1); } else { put.addColumn(columnFamily, Bytes.toBytes( "empty" ), Bytes.toBytes(hbaseValue)); context.getCounter(Counters.EMPTY).increment(1); } } try { context.write(key, put); context.getCounter(Counters.VALID).increment(1); } catch (Exception e) { e.printStackTrace(); context.getCounter(Counters.ERROR).increment(1); } } } static class ParseTableReducer extends TableReducer<ImmutableBytesWritable, Put, ImmutableBytesWritable>{ @Override protected void reduce(ImmutableBytesWritable key, Iterable<Put> values, Reducer<ImmutableBytesWritable, Put, ImmutableBytesWritable, Mutation>.Context context) throws IOException, InterruptedException { for (Put put : values) { context.write(key, put); } } } public int run(String[] arg0) throws Exception { String table = arg0[0]; String column = arg0[1]; String destTable = arg0[2]; Configuration configuration = HBaseConfiguration.create(); configuration. set ( "hbase.zookeeper.quorum" ,addr); configuration. set ( "hbase.zookeeper.property.clientPort" , port); Scan scan = new Scan(); if ( null != column) { byte [][] colkey = KeyValue.parseColumn(Bytes.toBytes(column)); if (colkey.length > 1) { scan.addColumn(colkey[0], colkey[1]); configuration. set ( "conf.columnfamily" , Bytes.toString(colkey[0])); configuration. set ( "conf.columnqualifier" , Bytes.toString(colkey[1])); } else { scan.addFamily(colkey[0]); configuration. set ( "conf.columnfamily" , Bytes.toString(colkey[0])); } } Job job = Job.getInstance(configuration); job.setJobName( "HBaseToHBase2" ); job.setJarByClass(HBaseToHBase2. class ); job.getConfiguration(). set (TableInputFormat.INPUT_TABLE, table); job.getConfiguration(). set (TableOutputFormat.OUTPUT_TABLE, destTable); job.setMapperClass(ParseMapper. class ); job.setMapOutputKeyClass(ImmutableBytesWritable. class ); job.setMapOutputValueClass(Put. class ); // job.setReducerClass(ParseTableReducer.class); job.setOutputKeyClass(ImmutableBytesWritable. class ); job.setOutputValueClass(Put. class ); job.setInputFormatClass(TableInputFormat. class ); TableInputFormat.addColumns(scan, KeyValue.parseColumn(Bytes.toBytes(column))); job.setOutputFormatClass(TableOutputFormat. class ); job.setNumReduceTasks(0); //使用TableMapReduceUtil会报类找不到错误 //Caused by: java.lang.ClassNotFoundException: com.yammer.metrics.core.MetricsRegistry // TableMapReduceUtil.initTableMapperJob(table, scan, ParseMapper.class, ImmutableBytesWritable.class, Put.class, job); // TableMapReduceUtil.initTableReducerJob(table, IdentityTableReducer.class, job); return job.waitForCompletion( true ) ? 0 : 1; } public static void main(String[] args) throws Exception { String[] params = new String[] { "test_table_mr" , "data:info" , "test_table_dest" }; int exitCode = ToolRunner.run( new HBaseToHBase2(), params ); System.exit(exitCode); } } |
打包测试
1 2 | zip -d HBaseToHBase.jar 'META-INF/.SF' 'META-INF/.RSA' 'META-INF/*SF' hadoop jar HBaseToHBase.jar com.hbase.mapreduce.HBaseToHBase |
出现的问题
一开始使用额TableMapReduceUtil,但是报下面这个错
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 | Exception in thread "main" java.lang.NoClassDefFoundError: com/yammer/metrics/core/MetricsRegistry at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addHBaseDependencyJars(TableMapReduceUtil.java:732) at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(TableMapReduceUtil.java:777) at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initTableMapperJob(TableMapReduceUtil.java:212) at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initTableMapperJob(TableMapReduceUtil.java:168) at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initTableMapperJob(TableMapReduceUtil.java:291) at org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.initTableMapperJob(TableMapReduceUtil.java:92) at com.hbase.mapreduce.HBaseToHBase.run(HBaseToHBase.java:108) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:76) at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:90) at com.hbase.mapreduce.HBaseToHBase.main(HBaseToHBase.java:115) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.util.RunJar.run(RunJar.java:233) at org.apache.hadoop.util.RunJar.main(RunJar.java:148) Caused by : java.lang.ClassNotFoundException: com.yammer.metrics.core.MetricsRegistry at java.net.URLClassLoader.findClass(URLClassLoader.java:381) at java.lang.ClassLoader.loadClass(ClassLoader.java:424) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) ... 16 more |
解决,不使用TableMapReduceUtil,分布设置便可解决此问题
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 记一次.NET内存居高不下排查解决与启示
· 探究高空视频全景AR技术的实现原理
· 理解Rust引用及其生命周期标识(上)
· 浏览器原生「磁吸」效果!Anchor Positioning 锚点定位神器解析
· 没有源码,如何修改代码逻辑?
· 分享4款.NET开源、免费、实用的商城系统
· 全程不用写代码,我用AI程序员写了一个飞机大战
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· 白话解读 Dapr 1.15:你的「微服务管家」又秀新绝活了
· 记一次.NET内存居高不下排查解决与启示