rapids-4-spark-private-parent
Used in:
components
- OverviewOverview
- VersionsVersions
- DependentsDependents
- DependenciesDependencies
<dependency> <groupId>com.nvidia</groupId> <artifactId>rapids-4-spark-private-parent</artifactId> <version>23.10.0</version> </dependency>
<?xml version="1.0" encoding="UTF-8"?> <!-- SPDX-FileCopyrightText: Copyright (c) 2023 NVIDIA CORPORATION & AFFILIATES. All rights reserved. SPDX-License-Identifier: LicenseRef-NvidiaProprietary NVIDIA CORPORATION, its affiliates and licensors retain all intellectual property and proprietary rights in and to this material, related documentation and any modifications thereto. Any use, reproduction, disclosure or distribution of this material and related documentation without an express license agreement from NVIDIA CORPORATION or its affiliates is strictly prohibited. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <groupId>com.nvidia</groupId> <artifactId>rapids-4-spark-private-parent</artifactId> <version>23.10.0</version> <packaging>pom</packaging> <name>RAPIDS Accelerator for Apache Spark Private Plugin</name> <description>The RAPIDS Private plugin for Apache Spark</description> <url>http://nvidia.github.io/</url> <licenses> <license> <name>NVIDIA SPARK-RAPIDS PRIVATE LICENSE</name> </license> </licenses> <scm> <connection>scm:git:http://nvidia.github.io/</connection> <developerConnection>scm:git:git@nvidia.github.io/</developerConnection> <tag>HEAD</tag> <url>http://nvidia.github.io/</url> </scm> <developers> <developer> <id>tgravescs</id> <name>Thomas Graves</name> <email>tgraves@nvidia.com</email> <roles> <role>Committer</role> </roles> <timezone>-6</timezone> </developer> <developer> <id>andygrove</id> <name>Andy Grove</name> <email>andygrove@nvidia.com</email> <roles> <role>Committer</role> </roles> <timezone>-6</timezone> </developer> </developers> <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <project.reporting.sourceEncoding>UTF-8</project.reporting.sourceEncoding> <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding> <spark.version>${spark311.version}</spark.version> <scala.binary.version>2.12</scala.binary.version> <scalatest-maven-plugin.version>2.0.2</scalatest-maven-plugin.version> <maven.scalastyle.skip>false</maven.scalastyle.skip> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <maven.jar.plugin.version>3.2.0</maven.jar.plugin.version> <scala.version>2.12.15</scala.version> <scala.javac.args>-Xlint:all,-serial,-path,-try</scala.javac.args> <java.major.version>8</java.major.version> <spark311.version>3.1.1</spark311.version> <spark312.version>3.1.2</spark312.version> <spark312db.version>3.1.2-databricks</spark312db.version> <spark313.version>3.1.3</spark313.version> <spark314.version>3.1.4-SNAPSHOT</spark314.version> <spark320.version>3.2.0</spark320.version> <spark321.version>3.2.1</spark321.version> <spark321cdh.version>3.2.1.3.2.7171000.0-3</spark321cdh.version> <spark321db.version>3.2.1-databricks</spark321db.version> <spark322.version>3.2.2</spark322.version> <spark323.version>3.2.3</spark323.version> <spark324.version>3.2.4</spark324.version> <spark330.version>3.3.0</spark330.version> <spark331.version>3.3.1</spark331.version> <spark332.version>3.3.2</spark332.version> <spark333.version>3.3.3</spark333.version> <spark340.version>3.4.0</spark340.version> <spark341.version>3.4.1</spark341.version> <spark330cdh.version>3.3.0.3.3.7180.0-274</spark330cdh.version> <spark330db.version>3.3.0-databricks</spark330db.version> <spark332db.version>3.3.2-databricks</spark332db.version> <spark350.version>3.5.0</spark350.version> <spark.shim.dest>${project.basedir}/target/${spark.version.classifier}/generated/src</spark.shim.dest> <spark.version.classifier>spark${buildver}</spark.version.classifier> <target.classifier>${spark.version.classifier}</target.classifier> <arrow.cdh.version>2.0.0</arrow.cdh.version> <mockito.version>3.6.0</mockito.version> <spark.rapids.jni.version>23.10.0</spark.rapids.jni.version> <spark.rapids.jni.classifier>cuda11</spark.rapids.jni.classifier> <noSnapshot.buildvers> 311, 312, 313, 320, 321, 321cdh, 322, 323, 324, 330, 331, 332, 330cdh, 333, 340, 341, 350 </noSnapshot.buildvers> <snapshot.buildvers> </snapshot.buildvers> <databricks.buildvers> 312db, 321db, 330db, 332db, </databricks.buildvers> <all.buildvers> ${noSnapshot.buildvers}, ${snapshot.buildvers}, ${databricks.buildvers}, </all.buildvers> <shimplify.shims>${all.buildvers}</shimplify.shims> </properties> <dependencyManagement> <dependencies> <dependency> <groupId>com.nvidia</groupId> <artifactId>spark-rapids-jni</artifactId> <version>${spark.rapids.jni.version}</version> <classifier>${spark.rapids.jni.classifier}</classifier> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> <classifier>tests</classifier> <scope>test</scope> </dependency> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_${scala.binary.version}</artifactId> <version>3.2.16</version> <scope>test</scope> </dependency> <dependency> <groupId>org.scalatestplus</groupId> <artifactId>mockito-4-11_${scala.binary.version}</artifactId> <version>3.2.16.0</version> <scope>test</scope> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <version>${mockito.version}</version> <scope>test</scope> </dependency> </dependencies> </dependencyManagement> <profiles> <profile> <id>noSnapshotsWithDatabricks</id> <properties> <included_buildvers> ${noSnapshot.buildvers}, ${databricks.buildvers} </included_buildvers> </properties> </profile> <profile> <id>noSnapshots</id> <properties> <included_buildvers> ${noSnapshot.buildvers} </included_buildvers> </properties> </profile> <profile> <id>snapshots</id> <properties> <included_buildvers> ${noSnapshot.buildvers}, ${snapshot.buildvers} </included_buildvers> </properties> </profile> <profile> <id>snapshotOnly</id> <properties> <included_buildvers> ${snapshot.buildvers} </included_buildvers> </properties> </profile> <profile> <id>snapshotsWithDatabricks</id> <properties> <included_buildvers> ${noSnapshot.buildvers}, ${snapshot.buildvers}, ${databricks.buildvers} </included_buildvers> </properties> </profile> <profile> <id>release311</id> <activation> <activeByDefault>true</activeByDefault> <property> <name>buildver</name> <value>311</value> </property> </activation> <properties> <buildver>311</buildver> <spark.version>${spark311.version}</spark.version> <spark.test.version>${spark311.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=312db --> <id>release312db</id> <activation> <property> <name>buildver</name> <value>312db</value> </property> </activation> <properties> <buildver>312db</buildver> <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 --> <scala.plugin.version>3.4.4</scala.plugin.version> <spark.version.classifier>spark312db</spark.version.classifier> <!-- Note that we are using the Spark version for all of the Databricks dependencies as well. The jenkins/databricks/build.sh script handles installing the jars as maven artifacts. This is to make it easier and not have to change version numbers for each individual dependency and deal with differences between Databricks versions --> <spark.version>${spark312db.version}</spark.version> <spark.test.version>${spark312db.version}</spark.test.version> <hadoop.client.version>2.7.4</hadoop.client.version> <rat.consoleOutput>true</rat.consoleOutput> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release312</id> <activation> <property> <name>buildver</name> <value>312</value> </property> </activation> <properties> <buildver>312</buildver> <spark.version>${spark312.version}</spark.version> <spark.test.version>${spark312.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release313</id> <activation> <property> <name>buildver</name> <value>313</value> </property> </activation> <properties> <buildver>313</buildver> <spark.version>${spark313.version}</spark.version> <spark.test.version>${spark313.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release314</id> <activation> <property> <name>buildver</name> <value>314</value> </property> </activation> <properties> <buildver>314</buildver> <spark.version>${spark314.version}</spark.version> <spark.test.version>${spark314.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release320</id> <activation> <property> <name>buildver</name> <value>320</value> </property> </activation> <properties> <buildver>320</buildver> <spark.version>${spark320.version}</spark.version> <spark.test.version>${spark320.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release321</id> <activation> <property> <name>buildver</name> <value>321</value> </property> </activation> <properties> <buildver>321</buildver> <spark.version>${spark321.version}</spark.version> <spark.test.version>${spark321.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release321cdh</id> <activation> <property> <name>buildver</name> <value>321cdh</value> </property> </activation> <properties> <buildver>321cdh</buildver> <spark.version>${spark321cdh.version}</spark.version> <spark.test.version>${spark321cdh.version}</spark.test.version> </properties> <repositories> <repository> <id>cloudera-repo</id> <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url> </repository> </repositories> <modules> <module>core</module> </modules> </profile> <profile> <id>release322</id> <activation> <property> <name>buildver</name> <value>322</value> </property> </activation> <properties> <buildver>322</buildver> <spark.version>${spark322.version}</spark.version> <spark.test.version>${spark322.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release323</id> <activation> <property> <name>buildver</name> <value>323</value> </property> </activation> <properties> <buildver>323</buildver> <spark.version>${spark323.version}</spark.version> <spark.test.version>${spark323.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release324</id> <activation> <property> <name>buildver</name> <value>324</value> </property> </activation> <properties> <buildver>324</buildver> <spark.version>${spark324.version}</spark.version> <spark.test.version>${spark324.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=321db --> <id>release321db</id> <activation> <property> <name>buildver</name> <value>321db</value> </property> </activation> <properties> <buildver>321db</buildver> <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 --> <scala.plugin.version>3.4.4</scala.plugin.version> <spark.version.classifier>spark321db</spark.version.classifier> <!-- Note that we are using the Spark version for all of the Databricks dependencies as well. The jenkins/databricks/build.sh script handles installing the jars as maven artifacts. This is to make it easier and not have to change version numbers for each individual dependency and deal with differences between Databricks versions --> <spark.version>${spark321db.version}</spark.version> <spark.test.version>${spark321db.version}</spark.test.version> <hadoop.client.version>3.3.1</hadoop.client.version> <rat.consoleOutput>true</rat.consoleOutput> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release330</id> <activation> <property> <name>buildver</name> <value>330</value> </property> </activation> <properties> <buildver>330</buildver> <spark.version>${spark330.version}</spark.version> <spark.test.version>${spark330.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release331</id> <activation> <property> <name>buildver</name> <value>331</value> </property> </activation> <properties> <buildver>331</buildver> <spark.version>${spark331.version}</spark.version> <spark.test.version>${spark331.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release332</id> <activation> <property> <name>buildver</name> <value>332</value> </property> </activation> <properties> <buildver>332</buildver> <spark.version>${spark332.version}</spark.version> <spark.test.version>${spark332.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release333</id> <activation> <property> <name>buildver</name> <value>333</value> </property> </activation> <properties> <buildver>333</buildver> <spark.version>${spark333.version}</spark.version> <spark.test.version>${spark333.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release340</id> <activation> <property> <name>buildver</name> <value>340</value> </property> </activation> <properties> <buildver>340</buildver> <spark.version>${spark340.version}</spark.version> <spark.test.version>${spark340.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release341</id> <activation> <property> <name>buildver</name> <value>341</value> </property> </activation> <properties> <buildver>341</buildver> <spark.version>${spark341.version}</spark.version> <spark.test.version>${spark341.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release330cdh</id> <activation> <property> <name>buildver</name> <value>330cdh</value> </property> </activation> <properties> <buildver>330cdh</buildver> <spark.version>${spark330cdh.version}</spark.version> <spark.test.version>${spark330cdh.version}</spark.test.version> </properties> <repositories> <repository> <id>cloudera-repo</id> <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url> <releases> <enabled>true</enabled> </releases> <snapshots> <enabled>true</enabled> </snapshots> </repository> </repositories> <modules> <module>core</module> </modules> </profile> <profile> <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=330db --> <!-- Note that 330db backports many features from Spark3.4.0 --> <id>release330db</id> <activation> <property> <name>buildver</name> <value>330db</value> </property> </activation> <properties> <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 --> <scala.plugin.version>3.4.4</scala.plugin.version> <spark.version.classifier>spark330db</spark.version.classifier> <!-- Note that we are using the Spark version for all of the Databricks dependencies as well. The jenkins/databricks/build.sh script handles installing the jars as maven artifacts. This is to make it easier and not have to change version numbers for each individual dependency and deal with differences between Databricks versions --> <spark.version>${spark330db.version}</spark.version> <spark.test.version>${spark330db.version}</spark.test.version> <hadoop.client.version>3.3.1</hadoop.client.version> <rat.consoleOutput>true</rat.consoleOutput> </properties> <modules> <module>core</module> </modules> </profile> <profile> <!-- Note Databricks requires 2 properties -Ddatabricks and -Dbuildver=330db --> <!-- Note that 330db backports many features from Spark3.4.0 --> <id>release332db</id> <activation> <property> <name>buildver</name> <value>332db</value> </property> </activation> <properties> <!-- Downgrade scala plugin version due to: https://github.com/sbt/sbt/issues/4305 --> <scala.plugin.version>3.4.4</scala.plugin.version> <spark.version.classifier>spark332db</spark.version.classifier> <!-- Note that we are using the Spark version for all of the Databricks dependencies as well. The jenkins/databricks/build.sh script handles installing the jars as maven artifacts. This is to make it easier and not have to change version numbers for each individual dependency and deal with differences between Databricks versions --> <spark.version>${spark332db.version}</spark.version> <spark.test.version>${spark332db.version}</spark.test.version> <hadoop.client.version>3.3.1</hadoop.client.version> <rat.consoleOutput>true</rat.consoleOutput> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>release350</id> <activation> <property> <name>buildver</name> <value>350</value> </property> </activation> <properties> <buildver>350</buildver> <spark.version>${spark350.version}</spark.version> <spark.test.version>${spark350.version}</spark.test.version> </properties> <modules> <module>core</module> </modules> </profile> <profile> <id>source-javadoc</id> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-source-plugin</artifactId> <version>3.0.0</version> <executions> <execution> <id>attach-source</id> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-javadoc-plugin</artifactId> <version>3.0.0</version> <executions> <execution> <id>attach-javadoc</id> <goals> <goal>jar</goal> </goals> </execution> </executions> <configuration> <doclint>none</doclint> </configuration> </plugin> </plugins> </build> </profile> </profiles> <build> <directory>${project.basedir}/target/${target.classifier}</directory> <pluginManagement> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-antrun-plugin</artifactId> <version>3.1.0</version> <executions> <execution> <id>shimplify-shim-sources</id> <goals><goal>run</goal></goals> <phase>generate-sources</phase> <configuration> <target xmlns:ac="antlib:net.sf.antcontrib"> <scriptdef name="shimplify" language="jython" src="${spark.rapids.source.basedir}/build/shimplify.py"> <attribute name="if"/> </scriptdef> <shimplify if="shimplify"/> </target> </configuration> </execution> <execution> <id>generate-build-info</id> <phase>generate-resources</phase> <configuration> <!-- Execute the shell script to generate the plugin build information. --> <target name="build-info"> <mkdir dir="${project.build.directory}/extra-resources"/> <mkdir dir="${project.build.directory}/tmp"/> <exec executable="bash" output="${project.build.directory}/extra-resources/rapids4spark-private-version-info.properties" resultproperty="build-info.exitCode" errorproperty="build-info.errorMsg" failonerror="false"> <arg value="${spark.rapids.source.basedir}/build/build-info"/> </exec> <fail message="exec build-info.sh failed, exit code is ${build-info.exitCode}, error msg is ${build-info.errorMsg}"> <condition> <not> <equals arg1="${build-info.exitCode}" arg2="0"/> </not> </condition> </fail> </target> </configuration> <goals> <goal>run</goal> </goals> </execution> </executions> <dependencies> <dependency> <groupId>org.apache.ant</groupId> <artifactId>ant</artifactId> <version>1.10.12</version> </dependency> <dependency> <groupId>ant-contrib</groupId> <artifactId>ant-contrib</artifactId> <version>1.0b3</version> </dependency> <dependency> <groupId>org.python</groupId> <artifactId>jython-standalone</artifactId> <version>2.7.2</version> </dependency> </dependencies> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-jar-plugin</artifactId> <version>${maven.jar.plugin.version}</version> <executions> <execution> <id>default-test-jar</id> <goals> <goal>test-jar</goal> </goals> <configuration> <classifier>${spark.version.classifier}tests</classifier> <skipIfEmpty>true</skipIfEmpty> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-compiler-plugin</artifactId> <executions> <execution> <id>default-compile</id> <phase>none</phase> </execution> <execution> <id>default-testCompile</id> <phase>none</phase> </execution> </executions> </plugin> <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> <version>4.7.1</version> <executions> <execution> <id>eclipse-add-source</id> <goals> <goal>add-source</goal> </goals> </execution> <execution> <id>scala-compile-first</id> <phase>process-resources</phase> <goals> <goal>compile</goal> </goals> </execution> <execution> <id>scala-test-compile-first</id> <phase>process-test-resources</phase> <goals> <goal>testCompile</goal> </goals> </execution> <execution> <id>attach-scaladocs</id> <phase>verify</phase> <goals> <goal>doc-jar</goal> </goals> <configuration> <args> <arg> -doc-external-doc:${java.home}/lib/rt.jar#https://docs.oracle.com/javase/${java.major.version}/docs/api/index.html </arg> <arg> -doc-external-doc:${settings.localRepository}/${scala.local-lib.path}#https://scala-lang.org/api/${scala.version}/ </arg> <arg> -doc-external-doc:${settings.localRepository}/org/apache/spark/spark-sql_${scala.binary.version}/${spark.version}/spark-sql_${scala.binary.version}-${spark.version}.jar#https://spark.apache.org/docs/${spark.version}/api/scala/index.html </arg> </args> </configuration> </execution> </executions> <configuration> <scalaVersion>${scala.version}</scalaVersion> <checkMultipleScalaVersions>true</checkMultipleScalaVersions> <!--TODO [WARNING] Expected all dependencies to require Scala version: 2.12.15 [WARNING] org.scalatest:scalatest_2.12:3.0.8 requires scala version: 2.12.8 [WARNING] Multiple versions of scala libraries detected! <failOnMultipleScalaVersions>false</failOnMultipleScalaVersions> --> <recompileMode>${scala.recompileMode}</recompileMode> <args> <arg>-unchecked</arg> <arg>-deprecation</arg> <arg>-feature</arg> <arg>-explaintypes</arg> <arg>-Yno-adapted-args</arg> <arg>-Ywarn-unused:imports</arg> <arg>-Xlint:missing-interpolator</arg> <arg>-Xfatal-warnings</arg> </args> <jvmArgs> <jvmArg>-Xms1024m</jvmArg> <jvmArg>-Xmx1024m</jvmArg> </jvmArgs> <addJavacArgs>${scala.javac.args}</addJavacArgs> <secondaryCacheDir>${spark.rapids.source.basedir}/target/${spark.version.classifier}/.sbt/1.0/zinc/org.scala-sbt</secondaryCacheDir> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-assembly-plugin</artifactId> <version>2.4</version> <configuration> <descriptorRefs> <descriptorRef>jar-with-dependencies</descriptorRef> </descriptorRefs> </configuration> <executions> <execution> <phase>package</phase> <goals> <goal>single</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.scalatest</groupId> <artifactId>scalatest-maven-plugin</artifactId> <version>${scalatest-maven-plugin.version}</version> <configuration> <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory> <junitxml>.</junitxml> <filereports>scala-test-output.txt</filereports> <!-- <argLine>${argLine} -ea -Xmx4g -Xss4m</argLine>--> <stderr/> <systemProperties> <java.awt.headless>true</java.awt.headless> <java.io.tmpdir>${project.build.directory}/tmp</java.io.tmpdir> <spark.ui.enabled>false</spark.ui.enabled> <spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress> <spark.unsafe.exceptionOnMemoryLeak>true</spark.unsafe.exceptionOnMemoryLeak> </systemProperties> <tagsToExclude>${test.exclude.tags}</tagsToExclude> <tagsToInclude>${test.include.tags}</tagsToInclude> </configuration> <executions> <execution> <id>test</id> <goals> <goal>test</goal> </goals> </execution> </executions> </plugin> </plugins> </pluginManagement> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>build-helper-maven-plugin</artifactId> <version>3.3.0</version> <executions> <execution> <id>add-shimple-sources</id> <phase>generate-sources</phase> <goals><goal>add-source</goal></goals> <configuration> <sources> <source>${spark.shim.dest}/main/scala</source> <source>${spark.shim.dest}/main/java</source> </sources> </configuration> </execution> <execution> <id>add-shimple-test-sources</id> <phase>generate-test-sources</phase> <goals><goal>add-test-source</goal></goals> <configuration> <sources> <source>${spark.shim.dest}/test/scala</source> <source>${spark.shim.dest}/test/java</source> </sources> </configuration> </execution> </executions> </plugin> <!--use this plugin to configure "spark.rapids.source.basedir" property--> <plugin> <groupId>org.commonjava.maven.plugins</groupId> <artifactId>directory-maven-plugin</artifactId> <version>0.1</version> <executions> <execution> <id>directories</id> <goals> <goal>highest-basedir</goal> </goals> <phase>initialize</phase> <configuration> <property>spark.rapids.source.basedir</property> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-antrun-plugin</artifactId> <version>3.1.0</version> <!-- parent-pom only executions --> <inherited>false</inherited> <executions> <execution> <!-- This is an alternative implementation of the scalastyle check invocation, a replacement for scalastyle-maven-plugin. It's motivated to address the following: - All scala files are checked at once regardless of the module, so the developer can focus on addressing violations without being distracted by the build issues in-between - We don't have to hardcode the source code roots added dynamically by other maven plugins to the project - The scalastyle launch cost is amortized across all modules --> <id>scalastyle-all-modules</id> <phase>verify</phase> <goals> <goal>run</goal> </goals> <configuration> <skip>${maven.scalastyle.skip}</skip> <target> <pathconvert property="scalastyle.dirs" pathsep=" "> <dirset dir="${project.basedir}" includes="**/src/main/scala"/> <dirset dir="${project.basedir}" includes="**/src/main/*/scala"/> <dirset dir="${project.basedir}" includes="**/src/test/scala"/> <dirset dir="${project.basedir}" includes="**/src/test/*/scala"/> </pathconvert> <echo>Checking scalastyle for all modules using following paths: ${scalastyle.dirs} </echo> <java classname="org.scalastyle.Main" failonerror="true"> <arg line="--verbose false"/> <arg line="--warnings false"/> <arg line="--config scalastyle-config.xml"/> <arg line="--xmlOutput ${project.basedir}/target/scalastyle-output.xml"/> <arg line="--inputEncoding ${project.build.sourceEncoding}"/> <arg line="--xmlEncoding ${project.reporting.outputEncoding}"/> <arg line="${scalastyle.dirs}"/> </java> </target> </configuration> </execution> <execution> <id>clean-all-modules</id> <phase>clean</phase> <goals> <goal>run</goal> </goals> <configuration> <skip>${maven.cleanall.skip}</skip> <target> <dirset dir="${project.basedir}" includes="**/target" id="target.dirs.for.clean"/> <pathconvert property="target.dirs.str" pathsep=" "> <dirset refid="target.dirs.for.clean"/> </pathconvert> <echo>Cleaning build directories of all modules ${target.dirs.str}</echo> <!-- workaround ant delete does not work with dirset --> <exec dir="${project.basedir}" executable="rm"> <arg value="-rf"/> <arg line="${target.dirs.str}"/> </exec> </target> </configuration> </execution> </executions> <dependencies> <dependency> <groupId>com.beautiful-scala</groupId> <artifactId>scalastyle_${scala.binary.version}</artifactId> <version>1.5.1</version> </dependency> </dependencies> </plugin> </plugins> </build> <repositories> <repository> <id>snapshots-repo</id> <url>https://oss.sonatype.org/content/repositories/snapshots</url> <releases> <enabled>false</enabled> </releases> <snapshots> <enabled>true</enabled> </snapshots> </repository> <repository> <id>apache-snapshots-repo</id> <url>https://repository.apache.org/content/repositories/snapshots/</url> <releases> <enabled>false</enabled> </releases> <snapshots> <enabled>true</enabled> </snapshots> </repository> </repositories> </project>