Spark2.4.0 访问hive(集成kerberos)(scala)
Spark2.4.0(scala)
1.kerberso 认证(需要将相应用户的keytab文件引入本地)
代码如下:
class KerberosAuth1 {
def kerberosAuth(debug: Boolean): Unit = {
try {
System.setProperty("java.security.krb5.conf", "D:\\cdh\\spark\\src\\main\\kerberos\\krb5.conf")
// System.setProperty("java.security.krb5.conf","/lvm/data3/zhc/krb5.conf");
System.setProperty("javax.security.auth.useSubjectCredsOnly", "false")
if (debug) System.setProperty("sun.security.krb5.debug", "true")
UserGroupInformation.loginUserFromKeytab("gree2@GREE.IO", "D:\\cdh\\spark\\src\\main\\kerberos\\gree2.keytab")
// UserGroupInformation.loginUserFromKeytab("gree1@GREE.IO", "/lvm/data3/zhc/gree1.keytab");
System.out.println(UserGroupInformation.getCurrentUser)
} catch {
case e: Exception =>
e.printStackTrace()
}
}
}
2.maven 依赖
<dependencies>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-client</artifactId>
<version>3.0.0-cdh6.3.0</version>
</dependency>
<dependency>
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-common</artifactId>
<version>3.0.0-cdh6.3.0</version>
</dependency
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-hive_2.11</artifactId>
<version>2.4.0-cdh6.3.0</version>
<scope>compile</scope>
</dependency>
</dependencies>
3.代码访问
main
object SparkTest {
def main(args: Array[String]): Unit = {
new KerberosAuth1().kerberosAuth(false);
val sparkSession = SparkSession.builder().appName("spark2Test")
// .config("spark.sql.warehouse.dir","/user/hive/warehouse")
.master("local[*]").enableHiveSupport.getOrCreate()
sparkSession.sql("select name from test.test").show()
// sparkSession.sql("show tables").show()
// sparkSession.sql("select name from default.linetest ").show()
}
}