菜鸟的博客

纵有疾风起,人生不言弃。

导航

补实验

import org.apache.hadoop.hbase.mapreduce.TableMapper;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;

public class RowCountMapper extends TableMapper<Text, LongWritable> {
    private static final LongWritable ONE = new LongWritable(1);
    private Text rowKey = new Text();

    @Override
    protected void map(ImmutableBytesWritable key, Result value, Context context) throws IOException, InterruptedException {
        rowKey.set(key.get());
        context.write(new Text("rowCount"), ONE);
    }
}
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;

public class CountRows {
    public static void main(String[] args) throws Exception {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
        Table table = connection.getTable(TableName.valueOf("shujuku"));
        
        Scan scan = new Scan();
        ResultScanner scanner = table.getScanner(scan);
        
        int rowCount = 0;
        for (Result result : scanner) {
            rowCount++;
        }
        
        scanner.close();
        connection.close();
        
        System.out.println("Row count: " + rowCount);
    }
}
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table;

import java.util.ArrayList;
import java.util.List;

public class ClearTableData {
    public static void main(String[] args) throws Exception {
        org.apache.hadoop.conf.Configuration config = HBaseConfiguration.create();
        Connection connection = ConnectionFactory.createConnection(config);
        Table table = connection.getTable(TableName.valueOf("shujuku"));
        
        Scan scan = new Scan();
        List<Delete> deletes = new ArrayList<>();
        
        table.getScanner(scan).forEach(result -> {
            Delete delete = new Delete(result.getRow());
            deletes.add(delete);
        });
        
        table.delete(deletes);
        
        System.out.println("All records deleted successfully.");
        connection.close();
    }
}

 

posted on 2024-12-03 23:05  hhmzd233  阅读(2)  评论(0编辑  收藏  举报