streamingpro-mlsql-spark_3.0_2.12
Used in:
components
- OverviewOverview
- VersionsVersions
- DependentsDependents
- DependenciesDependencies
<dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-mlsql-spark_3.0_2.12</artifactId> <version>2.1.0</version> </dependency>
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd"> <parent> <artifactId>streamingpro_2.12</artifactId> <groupId>tech.mlsql</groupId> <version>2.1.0</version> </parent> <modelVersion>4.0.0</modelVersion> <properties> <!-- spark 2.4 start --> <!-- <scala.version>2.11.12</scala.version> --> <!-- <scala.binary.version>2.11</scala.binary.version> --> <!-- <spark.version>2.4.3</spark.version> --> <!-- <spark.bigversion>2.4</spark.bigversion> --> <!-- <scalatest.version>3.0.3</scalatest.version> --> <!-- spark 2.4 end --> <!-- spark 3.0 start --> <scala.version>2.12.10</scala.version> <scala.binary.version>2.12</scala.binary.version> <spark.version>3.1.1</spark.version> <spark.bigversion>3.0</spark.bigversion> <scalatest.version>3.0.3</scalatest.version> <!-- spark 3.0 end --> </properties> <artifactId>streamingpro-mlsql-spark_${spark.bigversion}_${scala.binary.version}</artifactId> <profiles> <profile> <id>aliyun-oss</id> <properties> <hadoop-client-version>3.2.0</hadoop-client-version> </properties> <dependencies> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-aliyun</artifactId> <version>3.2.0</version> </dependency> </dependencies> </profile> <profile> <id>shade</id> <build> <plugins> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>3.2.1</version> <configuration> <filters> <filter> <artifact>*:*</artifact> <excludes> <exclude>META-INF/*.SF</exclude> <exclude>META-INF/*.DSA</exclude> <exclude>META-INF/*.RSA</exclude> </excludes> </filter> </filters> <createDependencyReducedPom>false</createDependencyReducedPom> </configuration> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> <configuration> <artifactSet> <excludes> <exclude>org.scala-lang:scala-compiler</exclude> <exclude>org.antlr:antlr4</exclude> <exclude>org.antlr:stringtemplate</exclude> <exclude>org.antlr:ST4</exclude> <exclude>org.antlr:antlr4-runtime</exclude> <exclude>org.fusesource.leveldbjni:leveldbjni-all</exclude> <exclude>org.scala-lang:scala-reflect</exclude> <exclude>org.scala-lang.modules:scala-xml_${scala.binary.version}</exclude> <exclude> org.scala-lang.modules:scala-parser-combinators_${scala.binary.version} </exclude> <exclude>org.apache.spark:spark-tags_${scala.binary.version}</exclude> <exclude>org.spark-project.spark:unused</exclude> <exclude>org.scala-lang:jline</exclude> </excludes> </artifactSet> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> <profile> <id>build-distr</id> <build> <plugins> <plugin> <artifactId>maven-assembly-plugin</artifactId> <version>3.0.0</version> <configuration> <descriptorRefs> <descriptorRef>jar-with-dependencies</descriptorRef> </descriptorRefs> </configuration> <executions> <execution> <id>make-assembly</id> <!-- this is used for inheritance merges --> <phase>package</phase> <!-- bind to the packaging phase --> <goals> <goal>single</goal> </goals> </execution> </executions> </plugin> </plugins> </build> </profile> <profile> <id>unit-test</id> <dependencies> <dependency> <groupId>org.elasticsearch</groupId> <artifactId>elasticsearch-spark-20_2.12</artifactId> <version>6.5.0</version> </dependency> <dependency> <groupId>org.mongodb.spark</groupId> <artifactId>mongo-spark-connector_2.12</artifactId> <version>2.4.1</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>1.2.0</version> <exclusions> <exclusion> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> </exclusion> <exclusion> <groupId>org.mortbay.jetty</groupId> <artifactId>jetty</artifactId> </exclusion> <exclusion> <groupId>org.mortbay.jetty</groupId> <artifactId>servlet-api-2.5</artifactId> </exclusion> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs-client</artifactId> </exclusion> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-auth</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-redis-${spark.bigversion}_${scala.binary.version}</artifactId> <version>0.1.0</version> </dependency> <dependency> <groupId>org.apache.hive</groupId> <artifactId>hive-jdbc</artifactId> <version>2.2.0</version> <exclusions> <exclusion> <groupId>org.eclipse.jetty.aggregate</groupId> <artifactId>jetty-all</artifactId> </exclusion> <exclusion> <groupId>org.apache.hive</groupId> <artifactId>hive-shims</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-web</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-1.2-api</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-api</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-api</artifactId> </exclusion> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-slf4j-impl</artifactId> </exclusion> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.lucidworks.spark</groupId> <artifactId>spark-solr</artifactId> <version>3.6.0</version> <exclusions> <exclusion> <groupId>org.apache.logging.log4j</groupId> <artifactId>log4j-slf4j-impl</artifactId> </exclusion> <exclusion> <groupId>org.slf4j</groupId> <artifactId>slf4j-log4j12</artifactId> </exclusion> <exclusion> <artifactId>*</artifactId> <groupId>javax.servlet</groupId> </exclusion> <exclusion> <artifactId>*</artifactId> <groupId>org.apache.hadoop</groupId> </exclusion> <exclusion> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-core</artifactId> </exclusion> <exclusion> <groupId>com.fasterxml.jackson.core</groupId> <artifactId>jackson-databind</artifactId> </exclusion> <exclusion> <groupId>com.fasterxml.jackson.module</groupId> <artifactId>jackson-module-scala_${scala.binary.version}</artifactId> </exclusion> <exclusion> <groupId>com.esotericsoftware</groupId> <artifactId>kryo-shaded</artifactId> </exclusion> </exclusions> </dependency> </dependencies> </profile> <profile> <id>streamingpro-spark-2.3.0-adaptor</id> <dependencies> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-2.3.0-adaptor_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-8_${scala.binary.version}</artifactId> <version>${spark.version}</version> <exclusions> <exclusion> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.databricks</groupId> <artifactId>spark-xml_${scala.binary.version}</artifactId> <version>0.4.1</version> </dependency> </dependencies> </profile> <profile> <id>streamingpro-spark-2.4.0-adaptor</id> <dependencies> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-2.4.0-adaptor_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>python-controller-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-sql-profiler-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-ets-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-mysql-store-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-watcher-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-healthy-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming-kafka-0-8_${scala.binary.version}</artifactId> <version>${spark.version}</version> <exclusions> <exclusion> <groupId>org.apache.kafka</groupId> <artifactId>kafka-clients</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>com.databricks</groupId> <artifactId>spark-xml_${scala.binary.version}</artifactId> <version>0.4.1</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-autosuggest-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> </dependencies> </profile> <profile> <id>streamingpro-spark-3.0.0-adaptor</id> <activation> <activeByDefault>true</activeByDefault> </activation> <dependencies> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-3.0.0-adaptor_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-ets-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>python-controller-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-sql-profiler-30-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-mysql-store-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-watcher-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-healthy-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>mlsql-autosuggest-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> </dependencies> </profile> <profile> <id>hive-thrift-server</id> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive-thriftserver_${scala.binary.version}</artifactId> <version>${spark.version}</version> <exclusions> <exclusion> <groupId>org.eclipse.jetty.aggregate</groupId> <artifactId>jetty-all</artifactId> </exclusion> </exclusions> </dependency> </dependencies> </profile> <profile> <id>jython-support</id> <dependencies> <dependency> <groupId>org.python</groupId> <artifactId>jython-standalone</artifactId> <version>${jython.version}</version> </dependency> </dependencies> </profile> <profile> <id>chinese-analyzer-support</id> <dependencies> </dependencies> </profile> <profile> <id>online</id> <properties> <mode>online</mode> <scope>provided</scope> </properties> <dependencies> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-common-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> <exclusions> <exclusion> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-dependency</artifactId> </exclusion> </exclusions> </dependency> </dependencies> </profile> <profile> <id>local</id> <activation> <activeByDefault>true</activeByDefault> </activation> <properties> <mode>local</mode> <scope>compile</scope> </properties> <dependencies> <!--<dependency>--> <!--<groupId>org.elasticsearch</groupId>--> <!--<artifactId>elasticsearch-spark-20_2.12</artifactId>--> <!--<version>5.5.2</version>--> <!--</dependency>--> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-common-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> </dependencies> </profile> </profiles> <dependencies> <dependency> <groupId>org.apache.kylin</groupId> <artifactId>kylin-jdbc</artifactId> <version>${kylin-jdbc.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>common-utils_${scala.binary.version}</artifactId> <version>${common-utils-version}</version> </dependency> <dependency> <groupId>org.jsoup</groupId> <artifactId>jsoup</artifactId> <version>1.11.2</version> </dependency> <dependency> <groupId>us.codecraft</groupId> <artifactId>xsoup</artifactId> <version>0.3.1</version> </dependency> <!-- <dependency>--> <!-- <groupId>cn.edu.hfut.dmic.webcollector</groupId>--> <!-- <artifactId>WebCollector</artifactId>--> <!-- <version>${WebCollector.version}</version>--> <!-- <exclusions>--> <!-- <exclusion>--> <!-- <groupId>junit</groupId>--> <!-- <artifactId>junit</artifactId>--> <!-- </exclusion>--> <!-- <exclusion>--> <!-- <groupId>org.springframework</groupId>--> <!-- <artifactId>spring-jdbc</artifactId>--> <!-- </exclusion>--> <!-- <exclusion>--> <!-- <groupId>com.mchange</groupId>--> <!-- <artifactId>c3p0</artifactId>--> <!-- </exclusion>--> <!-- <exclusion>--> <!-- <groupId>org.apache.avro</groupId>--> <!-- <artifactId>avro</artifactId>--> <!-- </exclusion>--> <!-- </exclusions>--> <!-- </dependency>--> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-dsl-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>simple-schema_${scala.binary.version}</artifactId> <version>0.2.0</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-spark-common-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>tech.mlsql</groupId> <artifactId>streamingpro-core-${spark.bigversion}_${scala.binary.version}</artifactId> <version>${project.parent.version}</version> </dependency> <dependency> <groupId>org.scalactic</groupId> <artifactId>scalactic_${scala.binary.version}</artifactId> <version>3.0.0</version> <scope>test</scope> </dependency> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_${scala.binary.version}</artifactId> <version>3.0.0</version> <scope>test</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-streaming_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> <exclusions> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-vector</artifactId> </exclusion> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-memory</artifactId> </exclusion> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-format</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> <exclusions> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-mllib_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-graphx_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-catalyst_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_${scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>${scope}</scope> <exclusions> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-vector</artifactId> </exclusion> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-memory</artifactId> </exclusion> <exclusion> <groupId>org.apache.arrow</groupId> <artifactId>arrow-format</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql-kafka-0-10_${scala.binary.version}</artifactId> <version>${spark.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop-client-version}</version> <scope>${scope}</scope> <exclusions> <exclusion> <groupId>javax.servlet</groupId> <artifactId>servlet-api</artifactId> </exclusion> <exclusion> <groupId>javax.servlet</groupId> <artifactId>javax.servlet-api</artifactId> </exclusion> <exclusion> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> </exclusion> <exclusion> <groupId>com.sun.jersey</groupId> <artifactId>jersey-core</artifactId> </exclusion> <exclusion> <groupId>com.sun.jersey</groupId> <artifactId>jersey-client</artifactId> </exclusion> <exclusion> <groupId>com.sun.jersey</groupId> <artifactId>jersey-json</artifactId> </exclusion> <exclusion> <groupId>com.sun.jersey.contribs</groupId> <artifactId>jersey-guice</artifactId> </exclusion> <exclusion> <groupId>com.sun.jersey</groupId> <artifactId>jersey-server</artifactId> </exclusion> <exclusion> <groupId>com.google.guava</groupId> <artifactId>guava</artifactId> </exclusion> <exclusion> <groupId>com.google.code.gson</groupId> <artifactId>gson</artifactId> </exclusion> <exclusion> <groupId>com.google.code.gson</groupId> <artifactId>gson</artifactId> </exclusion> <exclusion> <groupId>io.netty</groupId> <artifactId>netty</artifactId> </exclusion> </exclusions> </dependency> </dependencies> <build> <plugins> <plugin> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-maven-plugins</artifactId> <executions> <execution> <id>version-info</id> <phase>generate-resources</phase> <goals> <goal>version-info</goal> </goals> <configuration> <source> <directory>${basedir}/src/main</directory> <includes> <include>java/**/*.java</include> <include>java/**/*.scala</include> </includes> </source> </configuration> </execution> </executions> </plugin> </plugins> </build> </project>