Apache spark 无法通过Spark初始化h2o上下文(IP不相等)

Apache spark 无法通过Spark初始化h2o上下文(IP不相等),apache-spark,h2o,Apache Spark,H2o,我正在尝试使用Spark on Thread和hbase以及Livy初始化h2o上下文。我的POM文件如下所示: <properties> <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding> <java.version>1.8</java.version> <gson.version>2.

我正在尝试使用Spark on Thread和hbase以及Livy初始化h2o上下文。我的POM文件如下所示:

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
        <java.version>1.8</java.version>
        <gson.version>2.8.0</gson.version>
        <java.home>${env.JAVA_HOME}</java.home>
    </properties>

    <dependencies>

        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-core_2.10 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-core_2.11</artifactId>
            <version>2.2.0</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/org.apache.spark/spark-sql_2.10 -->
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-sql_2.11</artifactId>
            <version>2.2.0</version>

        </dependency>

        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-hive_2.11</artifactId>
            <version>2.2.0</version>
        </dependency>
        <dependency>
            <groupId>org.apache.spark</groupId>
            <artifactId>spark-mllib_2.11</artifactId>
            <version>2.2.0</version>
        </dependency>
        <dependency>
            <groupId>com.databricks</groupId>
            <artifactId>spark-csv_2.11</artifactId>
            <version>1.5.0</version>
        </dependency>
        <!-- https://mvnrepository.com/artifact/ai.h2o/h2o-core -->
        <dependency>
            <groupId>ai.h2o</groupId>
            <artifactId>h2o-core</artifactId>
            <version>3.14.0.7</version>
            <!-- <scope>runtime</scope> -->
        </dependency>
        <!-- https://mvnrepository.com/artifact/ai.h2o/h2o-algos -->
        <dependency>
            <groupId>ai.h2o</groupId>
            <artifactId>h2o-algos</artifactId>
            <version>3.14.0.7</version>
            <!-- <scope>runtime</scope> -->
        </dependency>
        <!-- https://mvnrepository.com/artifact/ai.h2o/h2o-genmodel -->
        <dependency>
            <groupId>ai.h2o</groupId>
            <artifactId>h2o-genmodel</artifactId>
            <version>3.14.0.7</version>
            <!-- <scope>runtime</scope> -->
        </dependency>
        <!-- https://mvnrepository.com/artifact/ai.h2o/sparkling-water-core_2.10 -->
        <dependency>
            <!-- <groupId>ai.h2o</groupId> <artifactId>sparkling-water-core_2.10</artifactId> 
                <version>1.6.11</version> -->

            <groupId>ai.h2o</groupId>
            <artifactId>sparkling-water-core_2.11</artifactId>
            <version>2.1.16</version>
        </dependency>
        <dependency>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
            <version>${gson.version}</version>
        </dependency>
        <dependency>
            <groupId>com.cloudera.livy</groupId>
            <artifactId>livy-client-http</artifactId>
            <version>0.3.0</version>
        </dependency>
        <dependency>
            <groupId>com.cloudera.livy</groupId>
            <artifactId>livy-api</artifactId>
            <version>0.3.0</version>
        </dependency>
        <dependency>
            <groupId>it.unimi.dsi</groupId>
            <artifactId>fastutil</artifactId>
            <version>7.1.0</version>
        </dependency>
         <dependency>
            <groupId>org.scala-lang</groupId>
            <artifactId>scala-library</artifactId>
            <version>2.11.8</version>
        </dependency> 
<dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase-spark</artifactId>
    <version>1.1.2.2.6.2.14-5</version>
</dependency>
但我得到以下例外

java.util.concurrent.ExecutionException: java.lang.RuntimeException: org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 22.0 failed 4 times, most recent failure: Lost task 1.3 in stage 22.0 (TID 35134, auper01-02-30-05-1.prod.vroc.com.au, executor 9): java.lang.AssertionError: assertion failed: SpreadRDD failure - IPs are not equal: (10,auper01-02-30-03-0.prod.vroc.com.au,-1) != (9, auper01-02-30-05-1.prod.vroc.com.au)
    at scala.Predef$.assert(Predef.scala:170)
    at org.apache.spark.h2o.backends.internal.InternalBackendUtils$$anonfun$7.apply(InternalBackendUtils.scala:101)
    at org.apache.spark.h2o.backends.internal.InternalBackendUtils$$anonfun$7.apply(InternalBackendUtils.scala:100)
    at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
    at scala.collection.Iterator$class.foreach(Iterator.scala:893)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
    at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
.setConf("spark.executor.instances", "10")
            .setConf("spark.driver.memory", "10g")
            .setConf("spark.driver.cores", "5")
            .setConf("spark.executor.memory", "5g") // memory per executor
            .setConf("spark.executor.cores", "5")
            .setConf("spark.yarn.executor.memoryOverhead",
                    "5000")
            .setConf("spark.rdd.compress","true")
            .setConf("spark.default.parallelism", "3000")
            .setConf("spark.serializer", "org.apache.spark.serializer.KryoSerializer")
            .setConf("spark.driver.extraJavaOptions", "-XX:+UseG1GC -Xss5000m")
            .setConf("spark.executor.extraJavaOptions", "-XX:+UseG1GC -Xss5000m")
            .setConf("spark.sql.shuffle.partitions", "5000")
            .setConf("spark.kryoserializer.buffer.max", "1g")
            .setConf("spark.ext.h2o.cluster.size", "-1")
            .setConf("spark.ext.h2o.cloud.timeout", "6000")
            .setConf("spark.ext.h2o.spreadrdd.retries", "-1")
            .setConf("spark.ext.h2o.nthreads", "-1")
            .setConf("spark.ext.h2o.disable.ga", "true")
            .setConf("spark.ext.h2o.dummy.rdd.mul.factor", "10")
            .setConf("spark.ext.h2o.fail.on.unsupported.spark.param",
                    "false")
            .setConf("spark.ext.h2o.client.network.mask","10.0.0.0/8")  
            .setConf("spark.ext.h2o.node.network.mask","10.0.0.0/8")
            .setConf("spark.ext.h2o.repl.enabled","false")  
            .setConf("spark.ext.h2o.repl.enabled","false")  
            .setConf("spark.driver.extraClassPath", "/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/phoenix-server.jar:/usr/hdp/current/phoenix/lib/phoenix-spark2.jar:/usr/hdp/current/hbase/lib/hbase-common.jar:/usr/hdp/current/hbase/lib/hbase-server.jar")
            .setConf("spark.executor.extraClassPath", "/usr/hdp/current/phoenix-client/phoenix-client.jar:/usr/hdp/current/phoenix-client/phoenix-server.jar:/usr/hdp/current/phoenix/lib/phoenix-spark2.jar:/usr/hdp/current/hbase/lib/hbase-common.jar:/usr/hdp/current/hbase/lib/hbase-server.jar")
            .setConf("spark.shuffle.compress", "true")
            .setConf("spark.shuffle.spill.compress", "true")
            .setConf("spark.driver.maxResultSize", "0")
            .setConf("spark.network.timeout","600s")
            .setConf("spark.executor.heartbeatInterval","40s")      
            .build();
java.util.concurrent.ExecutionException: java.lang.RuntimeException: org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 22.0 failed 4 times, most recent failure: Lost task 1.3 in stage 22.0 (TID 35134, auper01-02-30-05-1.prod.vroc.com.au, executor 9): java.lang.AssertionError: assertion failed: SpreadRDD failure - IPs are not equal: (10,auper01-02-30-03-0.prod.vroc.com.au,-1) != (9, auper01-02-30-05-1.prod.vroc.com.au)
    at scala.Predef$.assert(Predef.scala:170)
    at org.apache.spark.h2o.backends.internal.InternalBackendUtils$$anonfun$7.apply(InternalBackendUtils.scala:101)
    at org.apache.spark.h2o.backends.internal.InternalBackendUtils$$anonfun$7.apply(InternalBackendUtils.scala:100)
    at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
    at scala.collection.Iterator$class.foreach(Iterator.scala:893)
    at scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
    at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
    at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)