org.apache.spark.unsafe.types.UTF8String.toInt()I

这是我这边导入的依赖~
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_2.11</artifactId>
<version>2.2.1</version>
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_2.11</artifactId>
<version>2.1.1</version>
</dependency>
如下是一个小demo,在执行的过程中报:Exception in thread "main" java.lang.NoSuchMethodError: org.apache.spark.unsafe.types.UTF8String.toInt()I
def main(args: Array[String]): Unit = {

val session = SparkSession
.builder()
.appName("dd")
.master("local[*]")
.config("hive.metastore.uris", "thrift://test-dev-cdh-1:9083")
.config("spark.sql.warehouse.dir", "/user/hive/warehouse")
.config("metastore.catalog.default", "hive")
.enableHiveSupport()
.getOrCreate()
session.sql("show databases").show()
session.sql("use dev_analysis")
session.sql("show tables").show()
session.sql("select day from tsgl_terminal_exhaust4 limit 10").show()
}

解决方案:去掉sparkCore依赖
posted @ 2020-11-05 14:31  帅东我不帅  阅读(513)  评论(0编辑  收藏  举报