一 parent下pom.xml新增
1.私服地址
<distributionManagement>
<repository>
<id>bilibili-nexus-releases</id>
<name>Nexus Release Repository</name>
<url>http://xxx/content/repositories/releases/</url>
</repository>
<snapshotRepository>
<id>bilibili-nexus-snapshots</id>
<name>Nexus Snapshot Repository</name>
<url>http://xxx/content/repositories/snapshots/</url>
</snapshotRepository>
</distributionManagement>
2. scala语法检查,若不修改编译不能通过
<failOnViolation>true</failOnViolation>
修改为
<failOnViolation>false</failOnViolation>
3.maven插件版本修改成本地一致(这个不一定要修改)
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>3.2.0</version>
</plugin>
4.dependencies依赖需加上
<dependency>
<groupId>com.cenqua.clover</groupId>
<artifactId>clover</artifactId>
<!-- Use the version needed by maven-clover-plugin -->
<version>3.0.2</version>
</dependency> 4.maven-surefire-plugin插件里面添加这个
<artifactId>maven-surefire-plugin</artifactId> <configuration>
<testFailureIgnore>true</testFailureIgnore>
|
二 修改各依赖的pom.xml
spark-avro_2.11
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
|
spark-catalyst_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
spark-graphx2.11
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
|
spark-hive_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-sql_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
<!--<type>test-jar</type>-->
<!--<version>${project.version}</version>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
spark-mllib_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-sql_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
kafka-0-8
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
|
spark-repl_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
spark-sql-kafka-0-10_2.11
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-catalyst_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-sql_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
|
spark-sql_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-catalyst_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
spark-streaming-kafka-0-10_2.11
<dependency>
<groupId>org.apache.spark</groupId>
<artifactId>spark-core_${scala.binary.version}</artifactId>
<version>${project.version}</version>
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
</dependency>
|
spark-streaming_2.11
<!--<dependency>-->
<!--<groupId>org.apache.spark</groupId>-->
<!--<artifactId>spark-core_${scala.binary.version}</artifactId>-->
<!--<version>${project.version}</version>-->
<!--<type>test-jar</type>-->
<!--<scope>test</scope>-->
<!--</dependency>-->
|
三 编译命令
mvn clean deploy -Dmaven.test.skip=true -U
|