hadoop-ks3
Used in:
components
- OverviewOverview
- VersionsVersions
- DependentsDependents
- DependenciesDependencies
<dependency> <groupId>com.ksyun.kmr</groupId> <artifactId>hadoop-ks3</artifactId> <version>3.1.1-1.1.2</version> </dependency>
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <parent> <artifactId>gaeafs</artifactId> <groupId>com.ksyun.kmr</groupId> <version>3.1.1-1.1.2</version> </parent> <modelVersion>4.0.0</modelVersion> <artifactId>hadoop-ks3</artifactId> <name>Apache Hadoop KSYun KS3 support</name> <version>3.1.1-1.1.2</version> <description>This module contains code to support integration with KSYun KS3 Storage. It also declares the dependencies needed to work with KSYun KS3 Storage.</description> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>appassembler-maven-plugin</artifactId> <version>1.3.1</version> <executions> <execution> <id>make-assembly</id> <phase>package</phase> <goals> <goal>create-repository</goal> </goals> </execution> </executions> <configuration> <repositoryLayout>flat</repositoryLayout> <repositoryName>lib</repositoryName> <assembleDirectory>${project.build.directory}/${project.artifactId}-${project.version}</assembleDirectory> </configuration> </plugin> <plugin> <artifactId>maven-source-plugin</artifactId> <version>3.2.1</version> <executions> <execution> <id>attach-sources</id> <phase>verify</phase> <goals> <goal>jar-no-fork</goal> </goals> </execution> </executions> </plugin> <plugin> <artifactId>maven-jar-plugin</artifactId> <version>3.2.0</version> <configuration> <excludes> <exclude>**/log4j.xml</exclude> <exclude>**/test/resource/**</exclude> </excludes> </configuration> </plugin> <plugin> <artifactId>maven-shade-plugin</artifactId> <version>3.2.4</version> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> </execution> </executions> <configuration> <artifactSet> <includes> <include>com.ksyun:ks3-kss-java-sdk</include> <include>joda-time:joda-time</include> <include>org.apache.httpcomponents:httpclient</include> <include>org.apache.httpcomponents:httpcore</include> <include>com.google.guava:guava</include> <include>com.lmax:disruptor</include> <include>org.apache.commons:commons-lang3</include> </includes> </artifactSet> <relocations> <relocation> <pattern>org.apache.http</pattern> <shadedPattern>shadedforhadoopks3.org.apache.http</shadedPattern> </relocation> <relocation> <pattern>com.google.common</pattern> <shadedPattern>shadedforhadoopks3.com.google.common</shadedPattern> </relocation> <relocation> <pattern>org.joda</pattern> <shadedPattern>shadedforhadoopks3.org.joda</shadedPattern> </relocation> <relocation> <pattern>com.lmax</pattern> <shadedPattern>shadedforhadoopks3.com.lmax</shadedPattern> </relocation> <relocation> <pattern>org.apache.commons.lang3</pattern> <shadedPattern>shadedforhadoopks3.org.apache.commons.lang3</shadedPattern> </relocation> </relocations> </configuration> </plugin> <plugin> <artifactId>maven-compiler-plugin</artifactId> <version>3.8.1</version> <configuration> <source>1.8</source> <target>1.8</target> </configuration> </plugin> <plugin> <groupId>net.alchim31.maven</groupId> <artifactId>scala-maven-plugin</artifactId> <version>3.2.2</version> <executions> <execution> <id>scala-compile-first</id> <phase>process-resources</phase> <goals> <goal>add-source</goal> <goal>compile</goal> </goals> </execution> <execution> <id>scala-test-compile</id> <phase>process-test-resources</phase> <goals> <goal>testCompile</goal> </goals> </execution> </executions> </plugin> <plugin> <artifactId>maven-surefire-plugin</artifactId> <version>2.22.2</version> <configuration> <environmentVariables> <SPARK_PREPEND_CLASSES>1</SPARK_PREPEND_CLASSES> <SPARK_SCALA_VERSION>${scala.binary.version}</SPARK_SCALA_VERSION> <SPARK_TESTING>1</SPARK_TESTING> </environmentVariables> <systemProperties> <derby.system.durability>test</derby.system.durability> <java.awt.headless>true</java.awt.headless> <java.io.tmpdir>${spark.test.home}/tmp</java.io.tmpdir> <spark.test.home>${spark.test.home}</spark.test.home> <spark.testing>1</spark.testing> <spark.ui.enabled>false</spark.ui.enabled> <spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress> <spark.jobs.path>${basedir}/../integration_test_job_jars/spark_jobs/target/spark_jobs-3.1.1-1.1.2.jar</spark.jobs.path> <integration.test.data>${basedir}/../integration_test_data</integration.test.data> <hadoop.test.home>${hadoop.test.home}</hadoop.test.home> </systemProperties> </configuration> </plugin> </plugins> </build> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> <version>3.1.1</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>3.1.1</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.junit.jupiter</groupId> <artifactId>junit-jupiter</artifactId> <version>5.7.0</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>junit-jupiter-api</artifactId> <groupId>org.junit.jupiter</groupId> </exclusion> <exclusion> <artifactId>junit-jupiter-params</artifactId> <groupId>org.junit.jupiter</groupId> </exclusion> <exclusion> <artifactId>junit-jupiter-engine</artifactId> <groupId>org.junit.jupiter</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-minicluster</artifactId> <version>3.1.1</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>hadoop-common</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-hdfs</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-yarn-server-tests</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-hdfs</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-mapreduce-client-app</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-mapreduce-client-hs</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-all</artifactId> <version>1.10.19</version> <scope>test</scope> </dependency> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> <version>2.12.15</version> <scope>test</scope> </dependency> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_2.12</artifactId> <version>3.0.3</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>scalactic_2.12</artifactId> <groupId>org.scalactic</groupId> </exclusion> <exclusion> <artifactId>scala-reflect</artifactId> <groupId>org.scala-lang</groupId> </exclusion> <exclusion> <artifactId>scala-xml_2.12</artifactId> <groupId>org.scala-lang.modules</groupId> </exclusion> <exclusion> <artifactId>scala-parser-combinators_2.12</artifactId> <groupId>org.scala-lang.modules</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.12</artifactId> <version>3.2.0</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>avro-mapred</artifactId> <groupId>org.apache.avro</groupId> </exclusion> <exclusion> <artifactId>chill_2.12</artifactId> <groupId>com.twitter</groupId> </exclusion> <exclusion> <artifactId>chill-java</artifactId> <groupId>com.twitter</groupId> </exclusion> <exclusion> <artifactId>xbean-asm9-shaded</artifactId> <groupId>org.apache.xbean</groupId> </exclusion> <exclusion> <artifactId>hadoop-client-api</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-client-runtime</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>spark-kvstore_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>spark-network-common_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>spark-network-shuffle_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>spark-unsafe_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>activation</artifactId> <groupId>javax.activation</groupId> </exclusion> <exclusion> <artifactId>jakarta.servlet-api</artifactId> <groupId>jakarta.servlet</groupId> </exclusion> <exclusion> <artifactId>jul-to-slf4j</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>jcl-over-slf4j</artifactId> <groupId>org.slf4j</groupId> </exclusion> <exclusion> <artifactId>compress-lzf</artifactId> <groupId>com.ning</groupId> </exclusion> <exclusion> <artifactId>lz4-java</artifactId> <groupId>org.lz4</groupId> </exclusion> <exclusion> <artifactId>zstd-jni</artifactId> <groupId>com.github.luben</groupId> </exclusion> <exclusion> <artifactId>RoaringBitmap</artifactId> <groupId>org.roaringbitmap</groupId> </exclusion> <exclusion> <artifactId>json4s-jackson_2.12</artifactId> <groupId>org.json4s</groupId> </exclusion> <exclusion> <artifactId>jersey-client</artifactId> <groupId>org.glassfish.jersey.core</groupId> </exclusion> <exclusion> <artifactId>jersey-common</artifactId> <groupId>org.glassfish.jersey.core</groupId> </exclusion> <exclusion> <artifactId>jersey-server</artifactId> <groupId>org.glassfish.jersey.core</groupId> </exclusion> <exclusion> <artifactId>jersey-container-servlet</artifactId> <groupId>org.glassfish.jersey.containers</groupId> </exclusion> <exclusion> <artifactId>jersey-container-servlet-core</artifactId> <groupId>org.glassfish.jersey.containers</groupId> </exclusion> <exclusion> <artifactId>jersey-hk2</artifactId> <groupId>org.glassfish.jersey.inject</groupId> </exclusion> <exclusion> <artifactId>netty-all</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <artifactId>stream</artifactId> <groupId>com.clearspring.analytics</groupId> </exclusion> <exclusion> <artifactId>metrics-core</artifactId> <groupId>io.dropwizard.metrics</groupId> </exclusion> <exclusion> <artifactId>metrics-jvm</artifactId> <groupId>io.dropwizard.metrics</groupId> </exclusion> <exclusion> <artifactId>metrics-json</artifactId> <groupId>io.dropwizard.metrics</groupId> </exclusion> <exclusion> <artifactId>metrics-graphite</artifactId> <groupId>io.dropwizard.metrics</groupId> </exclusion> <exclusion> <artifactId>metrics-jmx</artifactId> <groupId>io.dropwizard.metrics</groupId> </exclusion> <exclusion> <artifactId>jackson-module-scala_2.12</artifactId> <groupId>com.fasterxml.jackson.module</groupId> </exclusion> <exclusion> <artifactId>ivy</artifactId> <groupId>org.apache.ivy</groupId> </exclusion> <exclusion> <artifactId>oro</artifactId> <groupId>oro</groupId> </exclusion> <exclusion> <artifactId>pyrolite</artifactId> <groupId>net.razorvine</groupId> </exclusion> <exclusion> <artifactId>py4j</artifactId> <groupId>net.sf.py4j</groupId> </exclusion> <exclusion> <artifactId>spark-tags_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>commons-crypto</artifactId> <groupId>org.apache.commons</groupId> </exclusion> <exclusion> <artifactId>unused</artifactId> <groupId>org.spark-project.spark</groupId> </exclusion> <exclusion> <artifactId>scala-xml_2.12</artifactId> <groupId>org.scala-lang.modules</groupId> </exclusion> <exclusion> <artifactId>scala-reflect</artifactId> <groupId>org.scala-lang</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_2.12</artifactId> <version>3.2.0</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>spark-tags_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>unused</artifactId> <groupId>org.spark-project.spark</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-launcher_2.12</artifactId> <version>3.2.0</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>spark-tags_2.12</artifactId> <groupId>org.apache.spark</groupId> </exclusion> <exclusion> <artifactId>unused</artifactId> <groupId>org.spark-project.spark</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-yarn_2.12</artifactId> <version>3.2.0</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>hadoop-client-api</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>hadoop-client-runtime</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <artifactId>unused</artifactId> <groupId>org.spark-project.spark</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-text</artifactId> <version>1.9</version> <scope>test</scope> </dependency> </dependencies> <properties> <hadoop.test.home>${basedir}/../../hadoop/hadoop-dist/target/hadoop-3.1.1</hadoop.test.home> <ksyun.ks3.version>1.0.2</ksyun.ks3.version> <spark.test.home>${basedir}/../../spark</spark.test.home> <junit.version>5.7.0</junit.version> </properties> </project>