Oracle ClassNotFoundException以及环境过程

目的是实现oracle连接查询

  1. 引入maven相关依赖
  2. 编写spark相关代码
  3. 遇到的问题

引入相关依赖,完整的pom文件
点击cdp的配置
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
    <parent>
        <groupId>com.bigdata</groupId>
        <artifactId>bigtest</artifactId>
        <version>1.0.0</version>
    </parent>
    <modelVersion>4.0.0</modelVersion>

    <artifactId>bigtestcdp</artifactId>

    <properties>
        <spark.version>2.4.0.7.1.5.0-257</spark.version>
        <scala.binary.version>2.11</scala.binary.version>
        <scala.version>2.11.12</scala.version>
        <hadoop.version>3.1.1.7.1.5.0-257</hadoop.version>
        <hbase.version>2.2.3.7.1.5.0-257</hbase.version>
        <kafka.version>2.4.1.7.1.5.0-257</kafka.version>
        <kudu.version>1.15.0.7.1.7.0-551</kudu.version>
    </properties>

<!-- cdh   <properties>-->
<!--        <spark.version>2.3.2</spark.version>-->
<!--        <scala.binary.version>2.11</scala.binary.version>-->
<!--        <scala.version>2.11.12</scala.version>-->
<!--        <hadoop.version>3.0.0-cdh6.2.1</hadoop.version>-->
<!--        <hbase.version>2.1.0-cdh6.2.1</hbase.version>-->
<!--        <kafka.version>2.1.0-cdh6.2.1</kafka.version>-->
<!--    </properties>-->

    <repositories>
        <repository>
            <id>cloudera</id>
            <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url>
        </repository>
    </repositories>

    <dependencies>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-mapreduce-client-core</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-hdfs</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-common -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-yarn-client -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-mapreduce</artifactId>
            <version>${hbase.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>${hbase.version}</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-common -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-common</artifactId>
            <version>${hbase.version}</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/org.apache.hbase/hbase-server -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>${hbase.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-repl_${scala.binary.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_${scala.binary.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_${scala.binary.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_${scala.binary.version}</artifactId>
            <version>${spark.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka_2.11</artifactId>
            <version>${kafka.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>${kafka.version}</version>
        </dependency>

        <dependency>
            <groupId>org.elasticsearch</groupId>
            <artifactId>elasticsearch-spark-20_${scala.binary.version}</artifactId>
            <version>6.7.1</version>
        </dependency>

        <!-- https://mvnrepository.com/artifact/com.101tec/zkclient -->
        <dependency>
            <groupId>com.101tec</groupId>
            <artifactId>zkclient</artifactId>
            <version>0.10</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kafka</groupId>
            <artifactId>kafka-clients</artifactId>
            <version>${kafka.version}</version>
        </dependency>

        <dependency>
            <groupId>org.apache.kudu</groupId>
            <artifactId>kudu-spark2_2.11</artifactId>
            <version>${kudu.version}</version>
        </dependency>

    </dependencies>


</project>
源码及问题
  1. 源码
public static void testMysql2(){
        SparkSession spark = SparkSession.builder().master("local").appName("test").getOrCreate();
        HashMap<String, String> map = new HashMap<>();
        map.put("url","jdbc:oracle:thin:@//IP:1521/orcl");
        map.put("user","root");
        map.put("password","123");
        map.put("dbtable","user.tm_order");
        map.put("driver","oracle.jdbc.OracleDriver");
        Dataset<Row> d = spark.read().format("jdbc").options(map).load();
        System.out.println("输出schema");
        d.printSchema();
        spark.stop();
    }

  1. 暴露的问题

java.lang.ClassNotFoundException: oracle.jdbc.OracleDriver

  1. 解决办法

下载ojdbc14包并引入idea。具体怎么引入就不写哈

posted @ 2022-04-21 20:09  堕落先锋  阅读(88)  评论(0编辑  收藏  举报