h大数据

安全认证

hw HBase安全认证(创建HBaseHolder时认证)

String userPrincipal = FeatureContext.INSTANCE.getOrElse(Constants.SPARK.USER, "username");
String userKeytabPath = FeatureContext.INSTANCE.getOrElse(Constants.SPARK.USER_KEYTAB_PATH, "/opt/FIclient/user.keytab");
String krb5ConfPath = FeatureContext.INSTANCE.getOrElse(Constants.SPARK.KRB5_CONF_PATH, "/opt/FIclient/KrbClient/kerberos/var/krb5kdc/krb5.conf");

Configuration conf = new Configuration();
conf.addResource(FeatureContext.class.getClassLoader().getResourceAsStream("core-site.xml"));
conf.addResource(FeatureContext.class.getClassLoader().getResourceAsStream("hdfs-site.xml"));
conf.addResource(FeatureContext.class.getClassLoader().getResourceAsStream("hbase-site.xml"));
conf.set("hbase.rpc.timeout","600000");
conf.set("hbase.client.scanner.timeout.period","600000");

try {
LoginUtil.login(userPrincipal, userKeytabPath, krb5ConfPath, conf);
} catch (IOException e) {
LOGGER.error("Error while login with hw security.", e);
}

 

Spark 安全认证(创建SparkContext时认证)
def getSparkContext(): SparkContext = {

// hw安全认证
val userPrincipal = FeatureContext.INSTANCE.getOrElse(Constants.SPARK.USER, "username")
val userKeytabPath = "/opt/FIclient/user.keytab"
val krb5ConfPath = "/opt/FIclient/KrbClient/kerberos/var/krb5kdc/krb5.conf"
val ZKServerPrincipal = "zookeeper/hadoop.hadoop.com"

val ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME: String = "Client"
val ZOOKEEPER_SERVER_PRINCIPAL_KEY: String = "zookeeper.server.principal"
val hadoopConf: Configuration = new Configuration()
LoginUtil.setJaasConf(ZOOKEEPER_DEFAULT_LOGIN_CONTEXT_NAME, userPrincipal, userKeytabPath)
LoginUtil.setZookeeperServerPrincipal(ZOOKEEPER_SERVER_PRINCIPAL_KEY, ZKServerPrincipal)
LoginUtil.login(userPrincipal, userKeytabPath, krb5ConfPath, hadoopConf)

System.setProperty("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
System.setProperty("spark.kryo.registrator", "com.hikvision.vsp.feature.context.MyRegistrator")

val sparkContext: SparkContext = new SparkContext(SparkContextHolder.getConf)
if (sparkContext.getCheckpointDir.isEmpty) {
val checkpointDir: String = FeatureContext.INSTANCE.getStringValue(Constants.SPARK.SPARK_CHECKPOINT_DIR)
sparkContext.setCheckpointDir(checkpointDir)
}
sparkContext.addSparkListener(new MyListener)
sparkContext
}

 

 

hw管理页面: http://IP:8080/web
https://IP:28443/web/

操作系统:suse 11.3
节点配置:1块SAS硬盘+7块SATA硬盘 SATA硬盘位于 0-6槽位;SAS硬盘位于7号槽位

大数据配置路径:
/opt/huawei/Bigdata/FusionInsight_V100R002C60SPC200/etc/2_31_Broker

cd /ficlient
source bigdata_env
kinit kxtest
passwd: xxxxx@123
或者
kinit admin
passwd:xxxxx@123

solrctl confset --create mytest /home
solrctl collection --create collectionTest -c mytest -s 1 -r 1 -m 1

solrctl collection --create collectionTest -c myconf -s 1 -r 1 -m 1 -n 10.33.37.143:21100
http://support.huawei.com/enterprise/productNewOffering?idAbsPath=7919749|7919788|19942925|21110924&pid=21110924&productname=FusionInsight%20HD
http://support.huawei.com/enterprise/docinforeader.action?contentId=DOC1000104109&idPath=7919749|7919788|19942925|21110924

配置:
export HADOOP_CONF_DIR=/usr/local/src/zhanglei/spark_client/Spark/spark/conf
spark.hbase.obtainToken.enabled=true
spark.inputFormat.cache.enabled=false

posted on 2017-06-14 14:18  一笑之奈何  阅读(688)  评论(0编辑  收藏  举报