菜鸟的博客

纵有疾风起,人生不言弃。

导航

< 2025年3月 >
23 24 25 26 27 28 1
2 3 4 5 6 7 8
9 10 11 12 13 14 15
16 17 18 19 20 21 22
23 24 25 26 27 28 29
30 31 1 2 3 4 5

统计

CountMapper

复制代码
import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

public class RowCountMapper extends TableMapper<Text, LongWritable> {
    private static final LongWritable ONE = new LongWritable(1);
    private Text rowKey = new Text();

    @Override
    protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
        rowKey.set(key.get());
        context.write(new Text("rowCount"), ONE);
    }
}
复制代码
复制代码
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;

public class CountRows {
    public static void main(String[] args) throws Exception {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
        Table table = connection.getTable(TableName.valueOf("shujuku"));
        
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        
        int rowCount = 0;
        for (Result result : scanner) {
            rowCount++;
        }
        
        scanner.close();
        connection.close();
        
        System.out.println("Row count: " + rowCount);
    }
}
复制代码
复制代码
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;

import java.util.ArrayList;
import java.util.List;

public class ClearTableData {
    public static void main(String[] args) throws Exception {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
        Table table = connection.getTable(TableName.valueOf("shujuku"));
        
        Scan scan = new Scan();
        List<Delete> deletes = new ArrayList<>();
        
        table.getScanner(scan).forEach(result -> {
            Delete delete = new Delete(result.getRow());
            deletes.add(delete);
        });
        
        table.delete(deletes);
        
        System.out.println("All records deleted successfully.");
        connection.close();
    }
}
复制代码

 

posted on   hhmzd233  阅读(6)  评论(0编辑  收藏  举报

相关博文:
阅读排行:
· DeepSeek 开源周回顾「GitHub 热点速览」
· 物流快递公司核心技术能力-地址解析分单基础技术分享
· .NET 10首个预览版发布:重大改进与新特性概览!
· AI与.NET技术实操系列(二):开始使用ML.NET
· 单线程的Redis速度为什么快?
点击右上角即可分享
微信分享提示