Spring webflux mongodb:保存喷射数据
我有无数的数据要保存到mongodb中。我使用ReactiveMongoRepository.saveAll只保存256行数据,如何保存 这是保存部分:Spring webflux mongodb:保存喷射数据,spring,mongodb,spring-boot,spring-data,spring-webflux,Spring,Mongodb,Spring Boot,Spring Data,Spring Webflux,我有无数的数据要保存到mongodb中。我使用ReactiveMongoRepository.saveAll只保存256行数据,如何保存 这是保存部分: public Mono<Mailbox> insertMailbox(JsonNode node) { this.checkArgs(node); Flux<Mailbox> mailboxes = this.getMailboxDoc(node); return mailboxTypeRepos
public Mono<Mailbox> insertMailbox(JsonNode node) {
this.checkArgs(node);
Flux<Mailbox> mailboxes = this.getMailboxDoc(node);
return mailboxTypeRepository
.findByName(node.get("type").asText("").toUpperCase())
.flatMap(type -> mailboxRepository.saveAll(this.setMailBoxCategory(node, type, mailboxes))
.subscribeOn(Schedulers.parallel())
.publishOn(Schedulers.parallel())
.next())
.log(LogUtils.getLogger(this.getClass()))
.subscribeOn(Schedulers.parallel())
.publishOn(Schedulers.parallel());
}
公共Mono insertMailbox(JsonNode节点){
this.checkArgs(节点);
Flux mailboxes=this.getMailboxDoc(节点);
返回mailboxTypeRepository
.findByName(node.get(“type”).asText(“”.toUpperCase())
.flatMap(类型->mailboxRepository.saveAll(此.setMailBoxCategory(节点、类型、邮箱))
.subscribeOn(Schedulers.parallel())
.publishOn(Schedulers.parallel())
.next())
.log(LogUtils.getLogger(this.getClass()))
.subscribeOn(Schedulers.parallel())
.publishOn(Schedulers.parallel());
}
此getMailboxDoc部分:
private Flux<Mailbox> getMailboxDoc(JsonNode node) {
return accountsAdapter.getAllUidSite()
.parallel()
.flatMap(accounts -> Mono.just(Mailbox.builder()
.mid(UUID.randomUUID().toString())
.type(TypeEnums.getTypeEnum(node.get("type").asText("")))
.category(CategoryEnums.getCategoryEnum(node.get("category").asText("")))
.interlocutor(Lists.asList(node.get("from").asText(), new String[]{node.get("to").asText()}))
.from(node.get("from").asText())
.to(accounts.getUidSite())
.title(node.get("title").asText())
.content(node.get("content").asText())
.groupId(this.getGroupId(node.get("groupId")))
.isDelete(false)
.isRead(false)
.site(node.get("site").asText())
.sendTime(ZonedDateTime.now(ZoneOffset.UTC).toLocalDateTime())
.data(mapper.convertValue(node, Map.class))
.build()))
.sequential();
}
私有流量getMailboxDoc(JsonNode){
返回accountsAdapter.getAllUidSite()
.parallel()
.flatMap(accounts->Mono.just(Mailbox.builder())
.mid(UUID.randomUUID().toString())
.type(TypeEnums.getTypeEnum(node.get(“type”).asText(“”))
.category(CategoryEnums.getCategoryEnum(node.get(“category”).asText(“”))
.interlector(Lists.asList(node.get(“from”).asText(),新字符串[]{node.get(“to”).asText()}))
.from(node.get(“from”).asText())
.to(accounts.getUidSite())
.title(node.get(“title”).asText())
.content(node.get(“content”).asText())
.groupId(这个.getGroupId(node.get(“groupId”))
.isDelete(错)
.isRead(错)
.site(node.get(“site”).asText())
.sendTime(ZoneDateTime.now(ZoneOffset.UTC).toLocalDateTime())
.data(mapper.convertValue(节点,Map.class))
.build())
.sequential();
}
设置setMailBoxCategory部分:
private Flux<Mailbox> setMailBoxCategory(JsonNode node, MailboxType type, Flux<Mailbox> mailboxes) {
log.debug("setMailBoxCategory :: {}", type.toString());
Optional<LinkedHashMap<String, String>> target = type.getCategory()
.stream()
.filter(category -> category.get("NAME").equalsIgnoreCase(node.get("category").asText())).findAny();
if (target.isPresent()) {
return mailboxes.parallel()
.map(mailbox -> {
mailbox.setCategoryId(target.get().get("ID"));
return mailbox;
})
.sequential();
} else {
throw new IllegalArgException("");
}
}
专用流量setMailBoxCategory(JsonNode节点、MailboxType类型、流量邮箱){
debug(“setMailBoxCategory::{}”,type.toString());
可选目标=type.getCategory()
.stream()
.filter(category->category.get(“NAME”).equalsIgnoreCase(node.get(“category”).astex()).findAny();
if(target.isPresent()){
返回邮箱。parallel()
.map(邮箱->{
mailbox.setCategoryId(target.get().get(“ID”);
返回邮箱;
})
.sequential();
}否则{
抛出新的IllegalArgException(“”);
}
}
accountsRepository.getAllUidSite()用于使用r2dbc获取信息