spark-interpreter
Used in:
components
- OverviewOverview
- VersionsVersions
- DependentsDependents
- DependenciesDependencies
<dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>spark-interpreter</artifactId> <version>0.9.0-preview1</version> </dependency>
<?xml version="1.0" encoding="UTF-8"?> <!-- ~ Licensed to the Apache Software Foundation (ASF) under one or more ~ contributor license agreements. See the NOTICE file distributed with ~ this work for additional information regarding copyright ownership. ~ The ASF licenses this file to You under the Apache License, Version 2.0 ~ (the "License"); you may not use this file except in compliance with ~ the License. You may obtain a copy of the License at ~ ~ http://www.apache.org/licenses/LICENSE-2.0 ~ ~ Unless required by applicable law or agreed to in writing, software ~ distributed under the License is distributed on an "AS IS" BASIS, ~ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ~ See the License for the specific language governing permissions and ~ limitations under the License. --> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/xsd/maven-4.0.0.xsd"> <modelVersion>4.0.0</modelVersion> <parent> <artifactId>spark-parent</artifactId> <groupId>org.apache.zeppelin</groupId> <version>0.9.0-preview1</version> <relativePath>../pom.xml</relativePath> </parent> <groupId>org.apache.zeppelin</groupId> <artifactId>spark-interpreter</artifactId> <packaging>jar</packaging> <version>0.9.0-preview1</version> <name>Zeppelin: Spark Interpreter</name> <description>Zeppelin spark support</description> <properties> <interpreter.name>spark</interpreter.name> <!--library versions--> <jsoup.version>1.12.1</jsoup.version> <commons.exec.version>1.3</commons.exec.version> <commons.compress.version>1.9</commons.compress.version> <maven.plugin.api.version>3.0</maven.plugin.api.version> <aether.version>1.12</aether.version> <maven.aeither.provider.version>3.0.3</maven.aeither.provider.version> <wagon.version>2.7</wagon.version> <datanucleus.rdbms.version>3.2.9</datanucleus.rdbms.version> <datanucleus.apijdo.version>3.2.6</datanucleus.apijdo.version> <datanucleus.core.version>3.2.10</datanucleus.core.version> <scala.compile.version>${spark.scala.version}</scala.compile.version> <!-- settings --> <pyspark.test.exclude>**/PySparkInterpreterMatplotlibTest.java</pyspark.test.exclude> <pyspark.test.include>**/*Test.*</pyspark.test.include> </properties> <dependencies> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-display</artifactId> <version>${project.version}</version> <exclusions> <exclusion> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> </exclusion> <exclusion> <groupId>org.scala-lang</groupId> <artifactId>scala-compiler</artifactId> </exclusion> <exclusion> <groupId>org.scala-lang</groupId> <artifactId>scalap</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-interpreter-shaded</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-interpreter</artifactId> <version>${project.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>spark1-shims</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>spark2-shims</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>spark3-shims</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-kotlin</artifactId> <version>${project.version}</version> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-python</artifactId> <version>${project.version}</version> <exclusions> <exclusion> <groupId>net.sf.py4j</groupId> <artifactId>py4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-jupyter-interpreter</artifactId> <version>${project.version}</version> <exclusions> <exclusion> <groupId>net.sf.py4j</groupId> <artifactId>py4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>${project.groupId}</groupId> <artifactId>zeppelin-python</artifactId> <version>${project.version}</version> <classifier>tests</classifier> <scope>test</scope> <exclusions> <exclusion> <groupId>net.sf.py4j</groupId> <artifactId>py4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>r</artifactId> <version>${project.version}</version> <classifier>tests</classifier> <scope>test</scope> <exclusions> <exclusion> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.11</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.zeppelin</groupId> <artifactId>zeppelin-jupyter-interpreter</artifactId> <version>${project.version}</version> <classifier>tests</classifier> <scope>test</scope> <exclusions> <exclusion> <groupId>net.sf.py4j</groupId> <artifactId>py4j</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>${project.groupId}</groupId> <artifactId>r</artifactId> <version>${project.version}</version> <exclusions> <exclusion> <groupId>*</groupId> <artifactId>*</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-repl_${spark.scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_${spark.scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> <exclusions> <exclusion> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>2.6.0</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_${spark.scala.binary.version}</artifactId> <version>${spark.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-exec</artifactId> <version>${commons.exec.version}</version> </dependency> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> </dependency> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-library</artifactId> <version>${spark.scala.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-compiler</artifactId> <version>${spark.scala.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.scala-lang</groupId> <artifactId>scala-reflect</artifactId> <version>${spark.scala.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-lang3</artifactId> <scope>provided</scope> </dependency> <dependency> <groupId>org.apache.commons</groupId> <artifactId>commons-compress</artifactId> <version>${commons.compress.version}</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.jsoup</groupId> <artifactId>jsoup</artifactId> <version>${jsoup.version}</version> </dependency> <!--test libraries--> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_${spark.scala.binary.version}</artifactId> <version>${scalatest.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.datanucleus</groupId> <artifactId>datanucleus-core</artifactId> <version>${datanucleus.core.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.datanucleus</groupId> <artifactId>datanucleus-api-jdo</artifactId> <version>${datanucleus.apijdo.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.datanucleus</groupId> <artifactId>datanucleus-rdbms</artifactId> <version>${datanucleus.rdbms.version}</version> <scope>test</scope> </dependency> <dependency> <groupId>org.mockito</groupId> <artifactId>mockito-core</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-api-mockito</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>org.powermock</groupId> <artifactId>powermock-module-junit4</artifactId> <scope>test</scope> </dependency> <dependency> <groupId>net.jodah</groupId> <artifactId>concurrentunit</artifactId> <version>0.4.4</version> <scope>test</scope> </dependency> <dependency> <groupId>com.mashape.unirest</groupId> <artifactId>unirest-java</artifactId> <version>1.4.9</version> <scope>test</scope> </dependency> </dependencies> <build> <plugins> <plugin> <artifactId>maven-enforcer-plugin</artifactId> <executions> <execution> <id>enforce</id> <phase>none</phase> </execution> </executions> <configuration> <rules> <requireJavaVersion> <version>1.7</version> </requireJavaVersion> </rules> </configuration> </plugin> <plugin> <groupId>com.googlecode.maven-download-plugin</groupId> <artifactId>download-maven-plugin</artifactId> <executions> <execution> <id>download-pyspark-files</id> <phase>validate</phase> <goals> <goal>wget</goal> </goals> <configuration> <readTimeOut>60000</readTimeOut> <retries>5</retries> <unpack>true</unpack> <url>${spark.src.download.url}</url> <outputDirectory>${project.build.directory}</outputDirectory> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-antrun-plugin</artifactId> <executions> <execution> <id>zip-pyspark-files</id> <phase>generate-resources</phase> <goals> <goal>run</goal> </goals> <configuration> <target> <delete dir="../../interpreter/spark/pyspark" /> <copy file="${project.build.directory}/${spark.archive}/python/lib/py4j-${py4j.version}-src.zip" todir="${project.build.directory}/../../../interpreter/spark/pyspark" /> <zip basedir="${project.build.directory}/${spark.archive}/python" destfile="${project.build.directory}/../../../interpreter/spark/pyspark/pyspark.zip" includes="pyspark/*.py,pyspark/**/*.py" /> </target> </configuration> </execution> </executions> </plugin> <!-- include sparkr by default --> <plugin> <groupId>com.googlecode.maven-download-plugin</groupId> <artifactId>download-maven-plugin</artifactId> <executions> <execution> <id>download-sparkr-files</id> <phase>validate</phase> <goals> <goal>wget</goal> </goals> <configuration> <readTimeOut>60000</readTimeOut> <retries>5</retries> <url>${spark.bin.download.url}</url> <unpack>true</unpack> <outputDirectory>${project.build.directory}</outputDirectory> </configuration> </execution> </executions> </plugin> <plugin> <artifactId>maven-resources-plugin</artifactId> <version>2.7</version> <executions> <execution> <id>copy-sparkr-files</id> <phase>generate-resources</phase> <goals> <goal>copy-resources</goal> </goals> <configuration> <outputDirectory>${project.build.directory}/../../../interpreter/spark/R/lib</outputDirectory> <resources> <resource> <directory> ${project.build.directory}/spark-${spark.version}-bin-without-hadoop/R/lib </directory> </resource> </resources> </configuration> </execution> </executions> </plugin> <plugin> <groupId>org.scalatest</groupId> <artifactId>scalatest-maven-plugin</artifactId> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-surefire-plugin</artifactId> <configuration> <forkCount>1</forkCount> <reuseForks>false</reuseForks> <argLine>-Xmx3072m -XX:MaxPermSize=256m</argLine> <excludes> <exclude>${pyspark.test.exclude}</exclude> <exclude>${tests.to.exclude}</exclude> </excludes> <environmentVariables> <PYTHONPATH>${project.build.directory}/../../../interpreter/spark/pyspark/pyspark.zip:${project.build.directory}/../../../interpreter/spark/pyspark/py4j-${py4j.version}-src.zip</PYTHONPATH> <ZEPPELIN_HOME>${basedir}/../../</ZEPPELIN_HOME> </environmentVariables> </configuration> </plugin> <plugin> <groupId>org.apache.maven.plugins</groupId> <artifactId>maven-shade-plugin</artifactId> <version>${plugin.shade.version}</version> <configuration> <filters> <filter> <artifact>*:*</artifact> <excludes> <exclude>org/datanucleus/**</exclude> <exclude>META-INF/*.SF</exclude> <exclude>META-INF/*.DSA</exclude> <exclude>META-INF/*.RSA</exclude> </excludes> </filter> </filters> <artifactSet> <excludes> <exclude>org.scala-lang:scala-library</exclude> <exclude>org.scala-lang:scala-compiler</exclude> <exclude>org.scala-lang:scala-reflect</exclude> <exclude>commons-lang:commons-lang</exclude> <exclude>org.apache.commons:commons-compress</exclude> <exclude>org.apache.zeppelin:zeppelin-interpreter-shaded</exclude> </excludes> </artifactSet> <transformers> <transformer implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" /> <transformer implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer"> <resource>reference.conf</resource> </transformer> </transformers> <relocations> <relocation> <pattern>io.netty</pattern> <shadedPattern>org.apache.zeppelin.io.netty</shadedPattern> </relocation> <relocation> <pattern>com.google</pattern> <shadedPattern>org.apache.zeppelin.com.google</shadedPattern> </relocation> <relocation> <pattern>com.facebook.fb303</pattern> <shadedPattern>org.apache.zeppelin.com.facebook.fb303</shadedPattern> </relocation> </relocations> <outputFile>${project.basedir}/../../interpreter/${interpreter.name}/${project.artifactId}-${project.version}.jar</outputFile> </configuration> <executions> <execution> <phase>package</phase> <goals> <goal>shade</goal> </goals> </execution> </executions> </plugin> <plugin> <artifactId>maven-dependency-plugin</artifactId> <executions> <execution> <id>copy-dependencies</id> <phase>none</phase> <configuration> <skip>true</skip> </configuration> </execution> <execution> <id>copy-interpreter-dependencies</id> <phase>none</phase> <configuration> <skip>true</skip> </configuration> </execution> <execution> <id>copy-artifact</id> <phase>none</phase> <configuration> <skip>true</skip> </configuration> </execution> <execution> <id>copy-spark-interpreter</id> <phase>package</phase> <goals> <goal>copy</goal> </goals> <configuration> <outputDirectory>${project.build.directory}/../../../interpreter/spark</outputDirectory> <overWriteReleases>false</overWriteReleases> <overWriteSnapshots>false</overWriteSnapshots> <overWriteIfNewer>true</overWriteIfNewer> <artifactItems> <artifactItem> <groupId>${project.groupId}</groupId> <artifactId>${project.artifactId}</artifactId> <version>${project.version}</version> <type>${project.packaging}</type> </artifactItem> </artifactItems> </configuration> </execution> </executions> </plugin> <plugin> <artifactId>maven-resources-plugin</artifactId> <executions> <execution> <id>copy-interpreter-setting</id> <phase>package</phase> <goals> <goal>resources</goal> </goals> <configuration> <outputDirectory>${project.build.directory}/../../../interpreter/${interpreter.name}</outputDirectory> </configuration> </execution> </executions> </plugin> </plugins> </build> </project>