Hbase使用java api
Hbase使用java api
1、首先创建maven工程
pom.xml文件如下
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.example</groupId>
<artifactId>HbaseApi</artifactId>
<version>1.0-SNAPSHOT</version>
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.1.4</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-auth</artifactId>
<version>3.1.4</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-client -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-client</artifactId>
<version>2.2.6</version>
</dependency>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-server</artifactId>
<version>2.2.6</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.14.3</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.testng</groupId>
<artifactId>testng</artifactId>
<version>6.14.3</version>
<scope>compile</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.0</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
<encoding>UTF-8</encoding>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.2</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<filters>
<filter>
<artifact>*:*</artifact>
<excludes>
<exclude>META-INF/*.SF</exclude>
<exclude>META-INF/*.DSA</exclude>
<exclude>META-INF/*/RSA</exclude>
</excludes>
</filter>
</filters>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
</build>
</project>
2、创建表格
package com.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
public class Test01 {
/**
* 1、创建myuser表
* 2、表里有两个列族f1和f2
*/
public static void main(String[] args) throws IOException {
/**
* 1、获取连接
* 2、获取客户端对象
* 3、操作数据库
* 4、关闭
*/
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
//创建连接对象
Connection connection = ConnectionFactory.createConnection(configuration);
//获取管理员对象,来数据库进行ddl操作,创建一张表
Admin admin = connection.getAdmin();
//指定表名
final TableName myuser = TableName.valueOf("test01");
TableDescriptorBuilder tableDescriptorBuilder = TableDescriptorBuilder.newBuilder(myuser);
//创建两个列族
ColumnFamilyDescriptor f1 = ColumnFamilyDescriptorBuilder.of("f1");
ColumnFamilyDescriptor f2 = ColumnFamilyDescriptorBuilder.of("f2");
//设置两个列族给这个表格
tableDescriptorBuilder.setColumnFamily(f1);
tableDescriptorBuilder.setColumnFamily(f2);
//转化为创建表格的操作
TableDescriptor build = tableDescriptorBuilder.build();
//admin下达创建表格的命令
admin.createTable(build);
admin.close();
connection.close();
}
}
3、插入数据
package com.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import java.io.IOException;
public class InsertData {
public static void main(String[] args) throws IOException {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
Connection connection = ConnectionFactory.createConnection(configuration);
//获取我们需要操作的表,它也是一个操作对象
Table table = connection.getTable(TableName.valueOf("myuser"));
//获取主键,对主键为0001的内容进行操作
Put put = new Put("0001".getBytes());
put.addColumn("f1".getBytes(),"name".getBytes(),"zhangsan".getBytes());
put.addColumn("f2".getBytes(),"age".getBytes(),"12".getBytes());
table.put(put);
table.close();
connection.close();
}
}
4、get查询数据(用于主键精确查找)
package com.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.text.SimpleDateFormat;
import java.util.*;
import java.io.IOException;
/**
* @author huangchao
* @date 2021/5/9
*/
public class GetData {
/**
* get查询的方式
*/
public static void main(String[] args) throws IOException {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
Connection connection = ConnectionFactory.createConnection(configuration);
Table table = connection.getTable(TableName.valueOf("myuser"));
//通过get对象指定rowkey
Get get = new Get("0001".getBytes());
//限制只查询f1列族下所有的列的值
get.addFamily("f1".getBytes());
get.addFamily("f2".getBytes());
//查询f2列族为name的字段, 查询某一个字段的值
// get.addColumn("f2".getBytes(),"name".getBytes());
Result result = table.get(get);
List<Cell> cells = result.listCells();
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
if (cells != null) {
for (Cell cell:cells) {
//获取列族名
byte[] familyName = CellUtil.cloneFamily(cell);
//获取列名
byte[] columnName = CellUtil.cloneQualifier(cell);
//获取rowkey
byte[] rowkey = CellUtil.cloneRow(cell);
//获取cell值
byte[] cellValue = CellUtil.cloneValue(cell);
System.out.println(sdf.format(cell.getTimestamp()));
System.out.println(Bytes.toString(familyName));
System.out.println(Bytes.toString(columnName));
System.out.println(Bytes.toString(rowkey));
System.out.println(Bytes.toString(cellValue));
System.out.println("--------------");
}
}
}
}
5、scan查询数据(用于主键范围查找)
package com.hbase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.util.Bytes;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
/**
* @author huangchao
* @date 2021/5/9
*/
public class ScanData {
public static void main(String[] args) throws IOException {
Configuration configuration = HBaseConfiguration.create();
configuration.set("hbase.zookeeper.quorum","node01:2181,node02:2181,node03:2181");
Connection connection = ConnectionFactory.createConnection(configuration);
Table table = connection.getTable(TableName.valueOf("myuser"));
Map<String,Map<String,String>> map = new HashMap<>();
Scan scan = new Scan();
scan.addFamily("f1".getBytes());
scan.withStartRow("0001".getBytes());
scan.withStopRow("0002".getBytes());
ResultScanner scanner = table.getScanner(scan);
for (Result result:scanner) {
List<Cell> cells = result.listCells();
for (Cell cell : cells) {
//获取键值
byte[] rowKey = CellUtil.cloneRow(cell);
//获取列族
byte[] family = CellUtil.cloneFamily(cell);
//获取列
byte[] qualifier = CellUtil.cloneQualifier(cell);
//获取值
byte[] value = CellUtil.cloneValue(cell);
System.out.println(Bytes.toString(family));
System.out.println(Bytes.toString(qualifier));
System.out.println(Bytes.toString(rowKey));
System.out.println(Bytes.toString(value));
System.out.println("--------------");
}
}
}
}