Warning: file_get_contents(/data/phpspider/zhask/data//catemap/6/mongodb/11.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Mongodb 更改文档后,将其保存_Mongodb_Scala_Apache Spark_Persist - Fatal编程技术网

Mongodb 更改文档后,将其保存

Mongodb 更改文档后,将其保存,mongodb,scala,apache-spark,persist,Mongodb,Scala,Apache Spark,Persist,我是spark的新手,无法找到将更改的文档持久化到数据库的方法: import com.mongodb.spark._ import com.mongodb.spark.config.{ReadConfig, WriteConfig} import com.typesafe.scalalogging.slf4j.LazyLogging import org.apache.spark.{SparkConf, SparkContext} import org.bson.Document obje

我是spark的新手,无法找到将更改的文档持久化到数据库的方法:

import com.mongodb.spark._
import com.mongodb.spark.config.{ReadConfig, WriteConfig}
import com.typesafe.scalalogging.slf4j.LazyLogging
import org.apache.spark.{SparkConf, SparkContext}
import org.bson.Document


object Test extends App with LazyLogging {

val conf = new SparkConf()
.setAppName("test")
.setMaster("local[*]")

val sc = new SparkContext(conf)
val readConfig = ReadConfig(Map("uri" -> "mongodb://127.0.0.1/", "database" -> "test", "collection" -> "customers"))
val rdd = sc.loadFromMongoDB(readConfig) //.toDF()


val logs = rdd.foreach {
  document => {
    val mongoDatabaseConnectionDetails = 
    document.get("address").asInstanceOf[Document]
    mongoDatabaseConnectionDetails.put("street", "azerty")
  }
}


val writeConfig = WriteConfig(
  Map(
    "uri" -> "mongodb://127.0.0.1/",
    "database" -> "test",
    "collection" -> "customers",
    "writeConcern.w" -> "majority"
  ))

// ??

}
文档更改后,我想在数据库中替换它


提前谢谢你

好的,那就最后设法去做吧。我认为这不是最好的方法,但它是有效的

import com.mongodb.client.MongoCollection
import com.mongodb.spark._
import com.mongodb.spark.config.{ReadConfig, WriteConfig}
import com.typesafe.scalalogging.slf4j.LazyLogging
import org.apache.spark.{SparkConf, SparkContext}
import org.bson.Document


object Test extends App with LazyLogging {

  val conf = new SparkConf()
    .setAppName("test")
    .setMaster("local[*]")

  val sc = new SparkContext(conf)

  val writeConfig = WriteConfig(Map("uri" -> "mongodb://127.0.0.1/", 
    "database" 
    -> "test", "collection" -> "customers"))

  val readConfig =  ReadConfig(Map( "uri" -> "mongodb://127.0.0.1/", 
    "database" 
    -> "test", "collection" -> "customers"))

  val rdd = sc.loadFromMongoDB(readConfig)
  rdd.map(document => {
    val mongoDatabaseConnectionDetails = 
      document.get("mongoDatabaseConnectionDetails").asInstanceOf[Document]
    mongoDatabaseConnectionDetails.replace("street", "azerty")
    document.replace("mongoDatabaseConnectionDetails", 
      mongoDatabaseConnectionDetails)

    save(document, writeConfig)
  }).collect()


  def save(document: Document, writeConfig: WriteConfig): Unit = {
    val mongoConnector = MongoConnector(writeConfig.asOptions)
    mongoConnector.withCollectionDo(writeConfig, { collection: 
      MongoCollection[Document] => {
    val searchDocument = new Document()
    searchDocument.append("_id", document.get("_id").asInstanceOf[String])
    collection.replaceOne(searchDocument, document)
  }

})
}
}

这是证明你的实际问题的最简洁的方式吗?