You can download spark-hive_2.10-1.3.0.jar in this page.
Apache Open Source
spark-hive_2.10-1.3.0.jar file has the following types.
META-INF/DEPENDENCIES META-INF/LICENSE META-INF/MANIFEST.MF META-INF/NOTICE META-INF/maven/org.apache.spark/spark-hive_2.10/pom.properties META-INF/maven/org.apache.spark/spark-hive_2.10/pom.xml META-INF/maven/org.spark-project.spark/unused/pom.properties META-INF/maven/org.spark-project.spark/unused/pom.xml org.apache.spark.sql.hive.DeferredObjectAdapter.class org.apache.spark.sql.hive.ExtendedHiveQlParser.class org.apache.spark.sql.hive.HadoopTableReader.class org.apache.spark.sql.hive.HiveContext.class org.apache.spark.sql.hive.HiveFunctionRegistry.class org.apache.spark.sql.hive.HiveFunctionWrapper.class org.apache.spark.sql.hive.HiveGenericUdaf.class org.apache.spark.sql.hive.HiveGenericUdf.class org.apache.spark.sql.hive.HiveGenericUdtf.class org.apache.spark.sql.hive.HiveInspectors.class org.apache.spark.sql.hive.HiveMetastoreCatalog.class org.apache.spark.sql.hive.HiveMetastoreTypes.class org.apache.spark.sql.hive.HiveQl.class org.apache.spark.sql.hive.HiveShim.class org.apache.spark.sql.hive.HiveSimpleUdf.class org.apache.spark.sql.hive.HiveStrategies.class org.apache.spark.sql.hive.HiveUdaf.class org.apache.spark.sql.hive.HiveUdafFunction.class org.apache.spark.sql.hive.InsertIntoHiveTable.class org.apache.spark.sql.hive.MetastoreRelation.class org.apache.spark.sql.hive.NativePlaceholder.class org.apache.spark.sql.hive.ResolveUdtfsAlias.class org.apache.spark.sql.hive.ShimFileSinkDesc.class org.apache.spark.sql.hive.SparkHiveDynamicPartitionWriterContainer.class org.apache.spark.sql.hive.SparkHiveWriterContainer.class org.apache.spark.sql.hive.TableReader.class org.apache.spark.sql.hive.execution.AddFile.class org.apache.spark.sql.hive.execution.AddJar.class org.apache.spark.sql.hive.execution.AnalyzeTable.class org.apache.spark.sql.hive.execution.CreateMetastoreDataSource.class org.apache.spark.sql.hive.execution.CreateMetastoreDataSourceAsSelect.class org.apache.spark.sql.hive.execution.CreateTableAsSelect.class org.apache.spark.sql.hive.execution.DescribeHiveTableCommand.class org.apache.spark.sql.hive.execution.DropTable.class org.apache.spark.sql.hive.execution.HiveNativeCommand.class org.apache.spark.sql.hive.execution.HiveScriptIOSchema.class org.apache.spark.sql.hive.execution.HiveTableScan.class org.apache.spark.sql.hive.execution.InsertIntoHiveTable.class org.apache.spark.sql.hive.execution.ScriptTransformation.class org.apache.spark.sql.hive.package.class org.apache.spark.sql.hive.test.TestHive.class org.apache.spark.sql.hive.test.TestHiveContext.class org.apache.spark.unused.UnusedStubClass.class
spark-hive_2.10-1.3.0.pom file content.
<?xml version="1.0" encoding="UTF-8"?> <project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd"> <parent> <artifactId>spark-parent_2.10</artifactId> <groupId>org.apache.spark</groupId> <version>1.3.0</version> <relativePath>../../pom.xml</relativePath> </parent> <modelVersion>4.0.0</modelVersion> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_2.10</artifactId> <name>Spark Project Hive</name> <url>http://spark.apache.org/</url> <build> <outputDirectory>target/scala-${scala.binary.version}/classes</outputDirectory> <testOutputDirectory>target/scala-${scala.binary.version}/test-classes</testOutputDirectory> <plugins> <plugin> <groupId>org.scalatest</groupId> <artifactId>scalatest-maven-plugin</artifactId> <configuration> <argLine>-da -Xmx3g -XX:MaxPermSize=${MaxPermGen} -XX:ReservedCodeCacheSize=512m</argLine> </configuration> </plugin> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>build-helper-maven-plugin</artifactId> <executions> <execution> <id>add-default-sources</id> <phase>generate-sources</phase> <goals> <goal>add-source</goal> </goals> <configuration> <sources> <source>v${hive.version.short}/src/main/scala</source> </sources> </configuration> </execution> </executions> </plugin> <plugin> <artifactId>maven-dependency-plugin</artifactId> <version>2.4</version> <executions> <execution> <id>copy-dependencies</id> <phase>package</phase> <goals> <goal>copy-dependencies</goal> </goals> <configuration> <outputDirectory>${basedir}/../../lib_managed/jars</outputDirectory> <overWriteReleases>false</overWriteReleases> <overWriteSnapshots>false</overWriteSnapshots> <overWriteIfNewer>true</overWriteIfNewer> <includeGroupIds>org.datanucleus</includeGroupIds> </configuration> </execution> </executions> </plugin> </plugins> </build> <profiles> <profile> <id>hive</id> <build> <plugins> <plugin> <groupId>org.codehaus.mojo</groupId> <artifactId>build-helper-maven-plugin</artifactId> <executions> <execution> <id>add-scala-test-sources</id> <phase>generate-test-sources</phase> <goals> <goal>add-test-source</goal> </goals> <configuration> <sources> <source>src/test/scala</source> <source>compatibility/src/test/scala</source> </sources> </configuration> </execution> </executions> </plugin> </plugins> </build> </profile> <profile> <id>hive-0.12.0</id> <dependencies> <dependency> <groupId>com.twitter</groupId> <artifactId>parquet-hive-bundle</artifactId> <version>1.5.0</version> </dependency> </dependencies> </profile> </profiles> <dependencies> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-core_2.10</artifactId> <version>1.3.0</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-sql_2.10</artifactId> <version>1.3.0</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.spark-project.hive</groupId> <artifactId>hive-metastore</artifactId> <version>0.13.1a</version> <scope>compile</scope> <exclusions> <exclusion> <artifactId>guava</artifactId> <groupId>com.google.guava</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>commons-httpclient</groupId> <artifactId>commons-httpclient</artifactId> <version>3.1</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.spark-project.hive</groupId> <artifactId>hive-exec</artifactId> <version>0.13.1a</version> <scope>compile</scope> <exclusions> <exclusion> <artifactId>commons-logging</artifactId> <groupId>commons-logging</groupId> </exclusion> <exclusion> <artifactId>kryo</artifactId> <groupId>com.esotericsoftware.kryo</groupId> </exclusion> <exclusion> <artifactId>avro-mapred</artifactId> <groupId>org.apache.avro</groupId> </exclusion> <exclusion> <artifactId>guava</artifactId> <groupId>com.google.guava</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.codehaus.jackson</groupId> <artifactId>jackson-mapper-asl</artifactId> <version>1.9.13</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.spark-project.hive</groupId> <artifactId>hive-serde</artifactId> <version>0.13.1a</version> <scope>compile</scope> <exclusions> <exclusion> <artifactId>commons-logging</artifactId> <groupId>commons-logging</groupId> </exclusion> <exclusion> <artifactId>commons-logging-api</artifactId> <groupId>commons-logging</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.apache.avro</groupId> <artifactId>avro</artifactId> <version>1.7.6</version> <scope>compile</scope> </dependency> <dependency> <groupId>org.apache.avro</groupId> <artifactId>avro-mapred</artifactId> <version>1.7.6</version> <classifier>hadoop2</classifier> <scope>compile</scope> <exclusions> <exclusion> <artifactId>netty</artifactId> <groupId>io.netty</groupId> </exclusion> <exclusion> <artifactId>jetty</artifactId> <groupId>org.mortbay.jetty</groupId> </exclusion> <exclusion> <artifactId>jetty-util</artifactId> <groupId>org.mortbay.jetty</groupId> </exclusion> <exclusion> <artifactId>servlet-api</artifactId> <groupId>org.mortbay.jetty</groupId> </exclusion> <exclusion> <artifactId>velocity</artifactId> <groupId>org.apache.velocity</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.scalacheck</groupId> <artifactId>scalacheck_2.10</artifactId> <version>1.11.3</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>test-interface</artifactId> <groupId>org.scala-sbt</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.10</version> <scope>test</scope> <exclusions> <exclusion> <artifactId>hamcrest-core</artifactId> <groupId>org.hamcrest</groupId> </exclusion> </exclusions> </dependency> <dependency> <groupId>org.codehaus.groovy</groupId> <artifactId>groovy-all</artifactId> <version>2.3.7</version> <scope>provided</scope> </dependency> <dependency> <groupId>org.scalatest</groupId> <artifactId>scalatest_2.10</artifactId> <version>2.2.1</version> <scope>test</scope> </dependency> </dependencies> <properties> <sbt.project.name>hive</sbt.project.name> </properties> </project>
<dependency> <groupId>org.apache.spark</groupId> <artifactId>spark-hive_2.10</artifactId> <version>1.3.0</version> </dependency>
If you think the following spark-hive_2.10-1.3.0.jar downloaded from Maven central repository is inappropriate, such as containing malicious code/tools or violating the copyright, please email , thanks.
Download spark-hive_2.10-1.3.0.jar file