当前位置:   article > 正文

查看Spark与Hadoop等其他组件的兼容版本_spark-2.4.0和hadoop3.0兼容嘛

spark-2.4.0和hadoop3.0兼容嘛

安装与Spark相关的其他组件的时候,例如JDK,Hadoop,Yarn,Hive,Kafka等,要考虑到这些组件和Spark的版本兼容关系。这个对应关系可以在Spark源代码的pom.xml文件中查看。

一、 下载Spark源代码

打开网址https://github.com/apache/spark,例如选择v2.4.0-rc5版本,再点击“Clone or download”按钮,点击下方的“Download ZIP”进行下载。在这里插入图片描述
二、查看pom.xml文件

将下载的源代码压缩包解压后,打开里面的pom.xml文件,查看properties标签内各配置项,里面有列出其他组件的兼容版本信息,例如<hadoop.version>2.6.5</hadoop.version>表示hadoop版本为2.6.5。如下:

  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    <project.reporting.outputEncoding>UTF-8</project.reporting.outputEncoding>
    <java.version>1.8</java.version>
    <maven.compiler.source>${java.version}</maven.compiler.source>
    <maven.compiler.target>${java.version}</maven.compiler.target>
    <maven.version>3.5.4</maven.version>
    <sbt.project.name>spark</sbt.project.name>
    <slf4j.version>1.7.16</slf4j.version>
    <log4j.version>1.2.17</log4j.version>
    <hadoop.version>2.6.5</hadoop.version>
    <protobuf.version>2.5.0</protobuf.version>
    <yarn.version>${hadoop.version}</yarn.version>
    <flume.version>1.6.0</flume.version>
    <zookeeper.version>3.4.6</zookeeper.version>
    <curator.version>2.6.0</curator.version>
    <hive.group>org.spark-project.hive</hive.group>
    <!-- Version used in Maven Hive dependency -->
    <hive.version>1.2.1.spark2</hive.version>
    <!-- Version used for internal directory structure -->
    <hive.version.short>1.2.1</hive.version.short>
    <derby.version>10.12.1.1</derby.version>
    <parquet.version>1.10.0</parquet.version>
    <orc.version>1.5.2</orc.version>
    <orc.classifier>nohive</orc.classifier>
    <hive.parquet.version>1.6.0</hive.parquet.version>
    <jetty.version>9.3.24.v20180605</jetty.version>
    <javaxservlet.version>3.1.0</javaxservlet.version>
    <chill.version>0.9.3</chill.version>
    <ivy.version>2.4.0</ivy.version>
    <oro.version>2.0.8</oro.version>
    <codahale.metrics.version>3.1.5</codahale.metrics.version>
    <avro.version>1.8.2</avro.version>
    <avro.mapred.classifier>hadoop2</avro.mapred.classifier>
    <aws.kinesis.client.version>1.8.10</aws.kinesis.client.version>
    <!-- Should be consistent with Kinesis client dependency -->
    <aws.java.sdk.version>1.11.271</aws.java.sdk.version>
    <!-- the producer is used in tests -->
    <aws.kinesis.producer.version>0.12.8</aws.kinesis.producer.version>
    <!--  org.apache.httpcomponents/httpclient-->
    <commons.httpclient.version>4.5.6</commons.httpclient.version>
    <commons.httpcore.version>4.4.10</commons.httpcore.version>
    <!--  commons-httpclient/commons-httpclient-->
    <httpclient.classic.version>3.1</httpclient.classic.version>
    <commons.math3.version>3.4.1</commons.math3.version>
    <!-- managed up from 3.2.1 for SPARK-11652 -->
    <commons.collections.version>3.2.2</commons.collections.version>
    <scala.version>2.11.12</scala.version>
    <scala.binary.version>2.11</scala.binary.version>
    <codehaus.jackson.version>1.9.13</codehaus.jackson.version>
    <fasterxml.jackson.version>2.6.7</fasterxml.jackson.version>
    <fasterxml.jackson.databind.version>2.6.7.1</fasterxml.jackson.databind.version>
    <snappy.version>1.1.7.1</snappy.version>
    <netlib.java.version>1.1.2</netlib.java.version>
    <calcite.version>1.2.0-incubating</calcite.version>
    <commons-codec.version>1.10</commons-codec.version>
    <commons-io.version>2.4</commons-io.version>
    <!-- org.apache.commons/commons-lang/-->
    <commons-lang2.version>2.6</commons-lang2.version>
    <!-- org.apache.commons/commons-lang3/-->
    <commons-lang3.version>3.5</commons-lang3.version>
    <datanucleus-core.version>3.2.10</datanucleus-core.version>
    <janino.version>3.0.9</janino.version>
    <jersey.version>2.22.2</jersey.version>
    <joda.version>2.9.3</joda.version>
    <jodd.version>3.5.2</jodd.version>
    <jsr305.version>1.3.9</jsr305.version>
    <libthrift.version>0.9.3</libthrift.version>
    <antlr4.version>4.7</antlr4.version>
    <jpam.version>1.1</jpam.version>
    <selenium.version>2.52.0</selenium.version>
    <!--
    Managed up from older version from Avro; sync with jackson-module-paranamer dependency version
    -->
    <paranamer.version>2.8</paranamer.version>
    <maven-antrun.version>1.8</maven-antrun.version>
    <commons-crypto.version>1.0.0</commons-crypto.version>
    <!--
    If you are changing Arrow version specification, please check ./python/pyspark/sql/utils.py,
    ./python/run-tests.py and ./python/setup.py too.
    -->
    <arrow.version>0.10.0</arrow.version>

    <test.java.home>${java.home}</test.java.home>
    <test.exclude.tags></test.exclude.tags>
    <test.include.tags></test.include.tags>

    <!-- Package to use when relocating shaded classes. -->
    <spark.shade.packageName>org.spark_project</spark.shade.packageName>

    <!-- Modules that copy jars to the build directory should do so under this location. -->
    <jars.target.dir>${project.build.directory}/scala-${scala.binary.version}/jars</jars.target.dir>

    <!-- Allow modules to enable / disable certain build plugins easily. -->
    <build.testJarPhase>prepare-package</build.testJarPhase>
    <build.copyDependenciesPhase>none</build.copyDependenciesPhase>

    <!--
      Dependency scopes that can be overridden by enabling certain profiles. These profiles are
      declared in the projects that build assemblies.

      For other projects the scope should remain as "compile", otherwise they are not available
      during compilation if the dependency is transivite (e.g. "graphx/" depending on "core/" and
      needing Hadoop classes in the classpath to compile).
    -->
    <flume.deps.scope>compile</flume.deps.scope>
    <hadoop.deps.scope>compile</hadoop.deps.scope>
    <hive.deps.scope>compile</hive.deps.scope>
    <orc.deps.scope>compile</orc.deps.scope>
    <parquet.deps.scope>compile</parquet.deps.scope>
    <parquet.test.deps.scope>test</parquet.test.deps.scope>

    <!--
      Overridable test home. So that you can call individual pom files directly without
      things breaking.
    -->
    <spark.test.home>${session.executionRootDirectory}</spark.test.home>

    <CodeCacheSize>512m</CodeCacheSize>
  </properties>
  • 1
  • 2
  • 3
  • 4
  • 5
  • 6
  • 7
  • 8
  • 9
  • 10
  • 11
  • 12
  • 13
  • 14
  • 15
  • 16
  • 17
  • 18
  • 19
  • 20
  • 21
  • 22
  • 23
  • 24
  • 25
  • 26
  • 27
  • 28
  • 29
  • 30
  • 31
  • 32
  • 33
  • 34
  • 35
  • 36
  • 37
  • 38
  • 39
  • 40
  • 41
  • 42
  • 43
  • 44
  • 45
  • 46
  • 47
  • 48
  • 49
  • 50
  • 51
  • 52
  • 53
  • 54
  • 55
  • 56
  • 57
  • 58
  • 59
  • 60
  • 61
  • 62
  • 63
  • 64
  • 65
  • 66
  • 67
  • 68
  • 69
  • 70
  • 71
  • 72
  • 73
  • 74
  • 75
  • 76
  • 77
  • 78
  • 79
  • 80
  • 81
  • 82
  • 83
  • 84
  • 85
  • 86
  • 87
  • 88
  • 89
  • 90
  • 91
  • 92
  • 93
  • 94
  • 95
  • 96
  • 97
  • 98
  • 99
  • 100
  • 101
  • 102
  • 103
  • 104
  • 105
  • 106
  • 107
  • 108
  • 109
  • 110
  • 111
  • 112
  • 113
  • 114
  • 115
  • 116
  • 117
  • 118
  • 119
  • 120

完毕。

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/Gausst松鼠会/article/detail/551079
推荐阅读
相关标签
  

闽ICP备14008679号