通过Java连接mongoDB和Spark时出错:由Java.lang.ClassNotFoundException引起:com.mongoDB.MongoDriverInformation

通过Java连接mongoDB和Spark时出错:由Java.lang.ClassNotFoundException引起:com.mongoDB.MongoDriverInformation,mongodb,eclipse,apache-spark,Mongodb,Eclipse,Apache Spark,我有以下使用Eclipse的Java小代码,目标是从MongoDB读取一个集合,并将数据转换为RDD,以便spark工作: package spark; import org.apache.spark.api.java.JavaSparkContext; import org.apache.spark.sql.SparkSession; import org.bson.Document; import com.mongodb.spark.MongoSpark; import com.mong

我有以下使用Eclipse的Java小代码,目标是从MongoDB读取一个集合,并将数据转换为RDD,以便spark工作:

package spark;

import org.apache.spark.api.java.JavaSparkContext;
import org.apache.spark.sql.SparkSession;
import org.bson.Document;

import com.mongodb.spark.MongoSpark;
import com.mongodb.spark.rdd.api.java.JavaMongoRDD;

public class Spark {

    public static void createRdd() {
        
        SparkSession spark = SparkSession.builder()
                .master("local")
                .appName("MongoSparkConnector")
                .config("spark.mongodb.input.uri", "mongodb://127.0.0.1/TheFoodPlanner.join")           
                .getOrCreate();
            
            JavaSparkContext jsc = new JavaSparkContext(spark.sparkContext());
            
            JavaMongoRDD<Document> rddrecipes = MongoSpark.load(jsc);
            
            jsc.close();
        
    }
}
封装火花;
导入org.apache.spark.api.java.JavaSparkContext;
导入org.apache.spark.sql.SparkSession;
导入org.bson.Document;
导入com.mongodb.spark.MongoSpark;
导入com.mongodb.spark.rdd.api.java.JavaMongoRDD;
公共级火花{
公共静态void createRdd(){
SparkSession spark=SparkSession.builder()
.master(“本地”)
.appName(“MongoSparkConnector”)
.config(“spark.mongodb.input.uri”mongodb://127.0.0.1/TheFoodPlanner.join")           
.getOrCreate();
JavaSparkContext jsc=新的JavaSparkContext(spark.sparkContext());
javamongordrdrdrecipes=MongoSpark.load(jsc);
jsc.close();
}
}
包含以下依赖项的pom.xlm

<dependency>
        <groupId>org.mongodb</groupId>
        <artifactId>mongo-java-driver</artifactId>
        <version>3.4.1</version>
    </dependency>
    <dependency>
        <groupId>org.apache.spark</groupId>
        <artifactId>spark-sql_2.11</artifactId>
        <version>2.4.2</version>
    </dependency>
    
    <dependency>
        <groupId>log4j</groupId>
        <artifactId>log4j</artifactId>
        <version>1.2.17</version>
    </dependency>
  <dependency>
    <groupId>org.mongodb.spark</groupId>
    <artifactId>mongo-spark-connector_2.11</artifactId>
    <version>2.4.2</version>
  </dependency>
  <dependency>
    <groupId>org.json</groupId>
    <artifactId>json</artifactId>
    <version>20180130</version>
    </dependency>
    <dependency>
        <groupId>com.google.code.gson</groupId>
        <artifactId>gson</artifactId>
        <version>2.2.2</version>
    </dependency>
    <dependency>
            <groupId>org.mongodb</groupId>
            <artifactId>bson</artifactId>
            <version>3.4.1</version>
        </dependency>

org.mongodb