shc-core
Used in:
components
- OverviewOverview
- VersionsVersions
- DependentsDependents
- DependenciesDependencies
<dependency> <groupId>com.xiachufang</groupId> <artifactId>shc-core</artifactId> <version>1.1.0-2.0-s_2.11</version> </dependency>
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <!-- BUILD: mvn -e -T1C -Phbase,hadoop-2.4 -Dhadoop.version=2.4.0 -DskipTests clean package install --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <groupId>com.xiachufang</groupId> <artifactId>shc</artifactId> <version>1.1.0-2.0-s_2.11</version> <relativePath>../pom.xml</relativePath> </parent> <artifactId>shc-core</artifactId> <version>1.1.0-2.0-s_2.11</version> <packaging>jar</packaging> <name>HBase Spark Connector Project Core</name> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-catalyst_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-server</artifactId> <version>${hbase.version}</version> <exclusions> <exclusion> <groupId>asm</groupId> <artifactId>asm</artifactId> </exclusion> <exclusion> <groupId>org.jboss.netty</groupId> <artifactId>netty</artifactId> </exclusion> <exclusion> <groupId>io.netty</groupId> <artifactId>netty</artifactId> </exclusion> <exclusion> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> </exclusion> <exclusion> <groupId>org.jruby</groupId> <artifactId>jruby-complete</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-common</artifactId> <version>${hbase.version}</version> <exclusions> <exclusion> <groupId>asm</groupId> <artifactId>asm</artifactId> </exclusion> <exclusion> <groupId>org.jboss.netty</groupId> <artifactId>netty</artifactId> </exclusion> <exclusion> <groupId>io.netty</groupId> <artifactId>netty</artifactId> </exclusion> <exclusion> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> </exclusion> <exclusion> <groupId>org.jruby</groupId> <artifactId>jruby-complete</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.phoenix</groupId> <artifactId>phoenix-core</artifactId> <version>${phoenix.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-testing-util</artifactId> <type>test-jar</type> <version>${hbase.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.avro</groupId> <artifactId>avro</artifactId> <version>${avro.version}</version> </dependency> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_${scala.binary.version}</artifactId> <version>2.2.1</version> <scope>test</scope> </dependency> </dependencies> <build> <!-- Scalatest runs all Scala tests --> <plugins> <plugin> <groupId>org.scalatest</groupId> <artifactId>scalatest-maven-plugin</artifactId> <version>1.0</version> <!-- Note config is repeated in surefire config --> <configuration> <reportsDirectory>${project.build.directory}/surefire-reports</reportsDirectory> <junitxml>.</junitxml> <filereports>SparkTestSuite.txt</filereports> <argLine>-ea -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=${CodeCacheSize} </argLine> <stderr/> <systemProperties> <java.awt.headless>true</java.awt.headless> <!-- <spark.test.home>${spark.test.home}</spark.test.home> --> <spark.hbase.test.home>${basedir}</spark.hbase.test.home> <spark.testing>1</spark.testing> <spark.ui.enabled>false</spark.ui.enabled> <spark.ui.showConsoleProgress>false</spark.ui.showConsoleProgress> <spark.driver.allowMultipleContexts>true</spark.driver.allowMultipleContexts> </systemProperties> <parallel>false</parallel> </configuration> <executions> <execution> <id>test</id> <goals> <goal>test</goal> </goals> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-clean-plugin</artifactId> <version>2.5</version> <configuration> <filesets> <fileset> <directory>work</directory> </fileset> <fileset> <directory>checkpoint</directory> </fileset> <fileset> <directory>lib_managed</directory> </fileset> </filesets> </configuration> </plugin> </plugins> </build> </project>