版本信息
<version.hbase>2.1.0-cdh6.2.1</version.hbase>
<version.hadoop>3.0.0-cdh6.2.1</version.hadoop>
<version.hive.jdbc>2.1.1-cdh6.2.1</version.hive.jdbc>
HDFS访问
private Configuration createHDFSConfiguration(String cluster){
Configuration configuration = new Configuration();
try{
configuration.addResource(new Path(Objects.requireNonNull(PathUtil.getResourcePath("hdfsconf/"+cluster+"/core-site.xml")).toString()));
configuration.addResource(new Path(Objects.requireNonNull(PathUtil.getResourcePath("hdfsconf/"+cluster+"/hdfs-site.xml")).toString()));
//防止打包后与hadoop-common包里的fileSystem冲突导致异常
configuration.set("fs.hdfs.impl", "org.apache.hadoop.hdfs.DistributedFileSystem");
//kerberos
System.setProperty("java.security.krb5.conf", KerberosCheckUtil.getKrb5Conf());
} catch (Exception e){
logger.error(cluster + " get hdfs configuration error !",e);
throw new MyCheckException("cluster + get hdfs configuration error !");
}
return configuration;
}
public HDFSDao(String cluster) {
init();
try {
Configuration hdfsConf = createHDFSConfiguration(cluster);
UserGroupInformation.setConfiguration(hdfsConf);
if (UserGroupInformation.isLoginKeytabBased() && UserGroupInformation.getLoginUser().getUserName().equals(KerberosCheckUtil.principal)) {
logger.info("hdfs:" + cluster + ",user [{}] is login already!", KerberosCheckUtil.principal);
} else {
UserGroupInformation.loginUserFromKeytab(KerberosCheckUtil.principal, KerberosCheckUtil.getKeyTabFile());
logger.info("hdfs:" + cluster + ",user [{}] login successed!",KerberosCheckUtil.principal);
new Timer().scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
logger.info("hdfs:" + cluster + ",User [{}] is relogin from keytab", KerberosCheckUtil.principal);
} catch (IOException e) {
// TODO Auto-generated catch block
logger.error("kerberos reloginFromKeytab error", e);
}
}
}, 0, 300 * 60 * 1000);
}
fileSystem = FileSystem.get(hdfsConf);
} catch (IOException e) {
e.printStackTrace();
logger.error("hdfs kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
throw new MyCheckException("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
}
}
HBase访问
public HBaseDao(String cluster) {
init();
hbConf = createHbaseConfiguration(cluster);
try {
//kerberos
if (getIsKerberosOnByCluster(cluster)){
logger.debug("[KEYTAB]" + KerberosCheckUtil.getKeyTabFile());
logger.debug("[KRB5]" + KerberosCheckUtil.getKrb5Conf());
try{
UserGroupInformation.setConfiguration(hbConf);
if (UserGroupInformation.isLoginKeytabBased() && UserGroupInformation.getLoginUser().getUserName().equals(KerberosCheckUtil.principal)) {
logger.info("hbase:" + cluster + ",user [{}] is login already!",KerberosCheckUtil.principal);
}else {
UserGroupInformation.loginUserFromKeytab(KerberosCheckUtil.principal, KerberosCheckUtil.getKeyTabFile());
logger.info("hbase:" + cluster + ",user [{}] login successed!",KerberosCheckUtil.principal);
new Timer().scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
logger.info("hbase:" + cluster + ",User [{}] is relogin from keytab", KerberosCheckUtil.principal);
} catch (IOException e) {
// TODO Auto-generated catch block
logger.error("kerberos reloginFromKeytab error", e);
}
}
}, 0, 300 * 60 * 1000);
}
}catch (IOException e){
e.printStackTrace();
logger.error("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
throw new MyCheckException("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
}
}
//executor = Executors.newFixedThreadPool(20);
conn = ConnectionFactory.createConnection(hbConf);
//conn = ConnectionFactory.createConnection(conf,executor);
} catch (IOException e) {
// TODO Auto-generated catch block
logger.error("create hbase connection error", e);
throw new MyCheckException("创建链接失败" + e.getMessage());
}
}
private Configuration createHbaseConfiguration(String cluster){
Configuration hbaseConf = HBaseConfiguration.create();
//调整部分配置
String hbaseIp = getHbaseIpByCluster(cluster);
hbaseConf.set("hbase.zookeeper.quorum", hbaseIp + ":2181");
hbaseConf.set("hbase.master", hbaseIp+":60000");
//避免超时
hbaseConf.set("hbase.rpc.timeout", "10000");//10s
hbaseConf.set("hbase.client.retries.number", "2");
hbaseConf.set("hbase.client.operation.timeout", "10000");
//kerbose
if (getIsKerberosOnByCluster(cluster)) {
hbaseConf.set("hadoop.security.authentication", "Kerberos");
hbaseConf.set("hbase.security.authentication", "kerberos");
//hbaseConf.set("hbase.master", getHbaseIpByCluster(cluster) + ":16000");
hbaseConf.set("hbase.master.kerberos.principal", "hbase/_HOST@CVBG.COM");
hbaseConf.set("hbase.regionserver.kerberos.principal", "hbase/_HOST@CVBG.COM");
System.setProperty("javax.security.auth.useSubjectCredOnly", "false");
//System.setProperty("sun.security.krb5.debug", "true");
System.setProperty("java.security.krb5.conf", KerberosCheckUtil.getKrb5Conf());
}
return hbaseConf;
}
### Hive访问
private Configuration createKerberosConfiguration(){
System.setProperty("java.security.krb5.conf", KerberosCheckUtil.getKrb5Conf());
Configuration hdfsConf = new Configuration();
hdfsConf.setBoolean("hadoop.security.authorization", true);
hdfsConf.set("hadoop.security.authentication", "kerberos");
return hdfsConf;
}
public HiveDao(String cluster) {
init();
//kerberos认证
Configuration conf = createKerberosConfiguration();
try {
UserGroupInformation.setConfiguration(conf);
if (UserGroupInformation.isLoginKeytabBased() && UserGroupInformation.getLoginUser().getUserName().equals(KerberosCheckUtil.principal)) {
logger.info("hive:" + cluster + ",user [{}] is login already!",KerberosCheckUtil.principal);
}else {
UserGroupInformation.loginUserFromKeytab(KerberosCheckUtil.principal, KerberosCheckUtil.getKeyTabFile());
logger.info("hive:" + cluster + ",user [{}] login successed!",KerberosCheckUtil.principal);
new Timer().scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
logger.info("hive:" + cluster + ",User [{}] is relogin from keytab", KerberosCheckUtil.principal);
} catch (IOException e) {
// TODO Auto-generated catch block
logger.error("kerberos reloginFromKeytab error", e);
}
}
}, 0, 300 * 60 * 1000);
}
} catch (Exception e) {
e.printStackTrace();
logger.error("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
throw new MyCheckException("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
}
//配置连接
dataSource = createConnectionPool(cluster);
}
### Impala访问
public ImpalaDbDao(String cluster) {
init();
//kerberos认证
Configuration conf = createKerberosConfiguration();
try {
UserGroupInformation.setConfiguration(conf);
if (UserGroupInformation.isLoginKeytabBased() && UserGroupInformation.getLoginUser().getUserName().equals(KerberosCheckUtil.principal)) {
logger.info("impala:" + cluster + ",user [{}] is login already!",KerberosCheckUtil.principal);
}else {
UserGroupInformation.loginUserFromKeytab(KerberosCheckUtil.principal, KerberosCheckUtil.getKeyTabFile());
logger.info("impala:" + cluster + ",user [{}] login successed!",KerberosCheckUtil.principal);
new Timer().scheduleAtFixedRate(new TimerTask() {
@Override
public void run() {
try {
UserGroupInformation.getLoginUser().checkTGTAndReloginFromKeytab();
logger.info("impala:" + cluster + ",User [{}] is relogin from keytab", KerberosCheckUtil.principal);
} catch (IOException e) {
// TODO Auto-generated catch block
logger.error("kerberos reloginFromKeytab error", e);
}
}
}, 0, 300 * 60 * 1000);
}
} catch (Exception e) {
e.printStackTrace();
logger.error("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
throw new MyCheckException("kerbose登录报错," + KerberosCheckUtil.getKeyTabFile());
}
//配置连接
dataSource = createConnectionPool(cluster);
}
private Configuration createKerberosConfiguration(){
System.setProperty("java.security.krb5.conf", KerberosCheckUtil.getKrb5Conf());
Configuration hdfsConf = new Configuration();
hdfsConf.setBoolean("hadoop.security.authorization", true);
hdfsConf.set("hadoop.security.authentication", "kerberos");
return hdfsConf;
}
注: KerberosCheckUtil.getKeyTabFile()返回keytab文件路径;KerberosCheckUtil.getKrb5Conf()返回kerberos配置文件路径