Lucene Hibernate搜索手动索引抛出一个;org.hibernate.transientObject异常:实例未与此会话关联;

Lucene Hibernate搜索手动索引抛出一个;org.hibernate.transientObject异常:实例未与此会话关联;,lucene,lazy-loading,hibernate-search,full-text-indexing,hibernate-onetomany,Lucene,Lazy Loading,Hibernate Search,Full Text Indexing,Hibernate Onetomany,我在SpringBoot2应用程序上使用HibernateSearch5.11,允许进行全文研究。 这个图书馆需要索引文件 当我的应用程序启动时,我会尝试每五分钟手动重新索引一个索引实体(MyEntity.class)的数据(出于特定原因,由于我的服务器上下文) 我尝试为MyEntity.class的数据编制索引 MyEntity.class有一个属性attachedFiles,它是一个哈希集,由join@OneToMany()填充,并启用了延迟加载模式: @OneToMany(mappedBy

我在SpringBoot2应用程序上使用HibernateSearch5.11,允许进行全文研究。 这个图书馆需要索引文件

当我的应用程序启动时,我会尝试每五分钟手动重新索引一个索引实体(MyEntity.class)的数据(出于特定原因,由于我的服务器上下文)

我尝试为MyEntity.class的数据编制索引

MyEntity.class有一个属性attachedFiles,它是一个哈希集,由join@OneToMany()填充,并启用了延迟加载模式:

@OneToMany(mappedBy = "myEntity", cascade = CascadeType.ALL, orphanRemoval = true)
private Set<AttachedFile> attachedFiles = new HashSet<>();
在本例中,调试模式在实体hashset值上指示类似“无法加载[…]”的消息

如果HashSet为空(非null,仅为空),则不会引发异常

我的索引方法:

private void indexDocumentsByEntityIds(List<Long> ids) {

final int BATCH_SIZE = 128;

Session session = entityManager.unwrap(Session.class);

FullTextSession fullTextSession = Search.getFullTextSession(session);
fullTextSession.setFlushMode(FlushMode.MANUAL);
fullTextSession.setCacheMode(CacheMode.IGNORE);

CriteriaBuilder builder = session.getCriteriaBuilder();
CriteriaQuery<MyEntity> criteria = builder.createQuery(MyEntity.class);
Root<MyEntity> root = criteria.from(MyEntity.class);
criteria.select(root).where(root.get("id").in(ids));

TypedQuery<MyEntity> query = fullTextSession.createQuery(criteria);

List<MyEntity> results = query.getResultList();

int index = 0;

for (MyEntity result : results) {
    index++;
    try {
        fullTextSession.index(result); //index each element
        if (index % BATCH_SIZE == 0 || index == ids.size()) {
            fullTextSession.flushToIndexes(); //apply changes to indexes
            fullTextSession.clear(); //free memory since the queue is processed
        }
    } catch (TransientObjectException toEx) {
        LOGGER.info(toEx.getMessage());
        throw toEx;
    }
}
}
private void indexdocumentsByEntityId(列表ID){
最终int批次大小=128;
Session Session=entityManager.unwrap(Session.class);
FullTextSession FullTextSession=Search.getFullTextSession(会话);
fullTextSession.setFlushMode(FlushMode.MANUAL);
fullTextSession.setCacheMode(CacheMode.IGNORE);
CriteriaBuilder=session.getCriteriaBuilder();
CriteriaQuery criteria=builder.createQuery(MyEntity.class);
Root=criteria.from(MyEntity.class);
选择(root).where(root.get(“id”).in(id));
TypedQuery query=fullTextSession.createQuery(条件);
List results=query.getResultList();
int指数=0;
对于(MyEntity结果:结果){
索引++;
试一试{
fullTextSession.index(结果);//索引每个元素
如果(索引%BATCH_SIZE==0 || index==ids.SIZE()){
fullTextSession.flushToIndexes();//将更改应用于索引
fullTextSession.clear();//处理队列后释放内存
}
}捕获(TransientObject异常toEx){
LOGGER.info(toEx.getMessage());
投掷脚趾;
}
}
}
有人有主意吗


谢谢

这可能是由循环中的“清除”调用引起的

本质上,你所做的是:

  • 加载所有要重新索引到会话中的实体
  • 索引一批实体
  • 从会话中删除所有实体(
    fullTextSession.clear()
  • 尝试索引下一批实体,即使它们不再在会话中
您需要做的是仅在会话清除后加载每批实体,以便在索引它们时确保它们仍在会话中

文档中有一个示例说明了如何使用滚动条和适当的批处理大小执行此操作:


或者,您可以将ID列表拆分为128个元素的较小列表,对于每个列表,运行查询以获取相应的实体,重新索引所有这128个实体,然后刷新并清除。

感谢@yrodier的解释,他们帮了我很多忙

我选择了您的替代解决方案:

或者,您可以将ID列表拆分为128个元素的较小列表,对于每个列表,运行查询以获取相应的实体,重新索引所有这128个实体,然后刷新并清除

…一切都很完美

见鬼了

请参阅下面的代码解决方案:

private List<List<Object>> splitList(List<Object> list, int subListSize) {

List<List<Object>> splittedList = new ArrayList<>();

if (!CollectionUtils.isEmpty(list)) {

    int i = 0;
    int nbItems = list.size();

    while (i < nbItems) {
        int maxLastSubListIndex = i + subListSize;
        int lastSubListIndex = (maxLastSubListIndex > nbItems) ? nbItems : maxLastSubListIndex;
        List<Object> subList = list.subList(i, lastSubListIndex);
        splittedList.add(subList);
        i = lastSubListIndex;
    }
}

return splittedList;
}


private void indexDocumentsByEntityIds(Class<Object> clazz, String entityIdPropertyName, List<Object> ids) {

Session session = entityManager.unwrap(Session.class);

List<List<Object>> splittedIdsLists = splitList(ids, 128);

for (List<Object> splittedIds : splittedIdsLists) {

    FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);

    Transaction transaction = fullTextSession.beginTransaction();

    CriteriaBuilder builder = session.getCriteriaBuilder();
    CriteriaQuery<Object> criteria = builder.createQuery(clazz);
    Root<Object> root = criteria.from(clazz);
    criteria.select(root).where(root.get(entityIdPropertyName).in(splittedIds));

    TypedQuery<Object> query = fullTextSession.createQuery(criteria);

    List<Object> results = query.getResultList();

    int index = 0;

    for (Object result : results) {
        index++;
        try {
            fullTextSession.index(result); //index each element
            if (index == splittedIds.size()) {
                fullTextSession.flushToIndexes(); //apply changes to indexes
                fullTextSession.clear(); //free memory since the queue is processed
            }
        } catch (TransientObjectException toEx) {
            LOGGER.info(toEx.getMessage());
            throw toEx;
        }
    }

    transaction.commit();
}
}
私有列表拆分列表(列表列表,int subListSize){
List splittedList=新建ArrayList();
如果(!CollectionUtils.isEmpty(列表)){
int i=0;
int nbItems=list.size();
而(inbItems)?nbItems:maxLastSubListIndex;
List subList=List.subList(i,lastSubListIndex);
splittedList.add(子列表);
i=最新子指数;
}
}
返回拆分列表;
}
私有void indexdocumentsByEntityId(类clazz、字符串entityIdPropertyName、列表ID){
Session Session=entityManager.unwrap(Session.class);
List splittedIdsLists=splitList(id,128);
对于(列表拆分ID:splittedIdsLists){
FullTextSession FullTextSession=Search.getFullTextSession(会话);
fullTextSession.setFlushMode(FlushMode.MANUAL);
fullTextSession.setCacheMode(CacheMode.IGNORE);
事务处理=fullTextSession.beginTransaction();
CriteriaBuilder=session.getCriteriaBuilder();
CriteriaQuery-criteria=builder.createQuery(clazz);
根=标准。从(clazz);
选择(root).where(root.get(entityIdPropertyName).in(SplittedId));
TypedQuery query=fullTextSession.createQuery(条件);
List results=query.getResultList();
int指数=0;
用于(对象结果:结果){
索引++;
试一试{
fullTextSession.index(结果);//索引每个元素
if(index==splittedIds.size()){
fullTextSession.flushToIndexes();//将更改应用于索引
fullTextSession.clear();//处理队列后释放内存
}
}捕获(TransientObject异常toEx){
LOGGER.info(toEx.getMessage());
投掷脚趾;
}
}
commit();
}
}
private List<List<Object>> splitList(List<Object> list, int subListSize) {

List<List<Object>> splittedList = new ArrayList<>();

if (!CollectionUtils.isEmpty(list)) {

    int i = 0;
    int nbItems = list.size();

    while (i < nbItems) {
        int maxLastSubListIndex = i + subListSize;
        int lastSubListIndex = (maxLastSubListIndex > nbItems) ? nbItems : maxLastSubListIndex;
        List<Object> subList = list.subList(i, lastSubListIndex);
        splittedList.add(subList);
        i = lastSubListIndex;
    }
}

return splittedList;
}


private void indexDocumentsByEntityIds(Class<Object> clazz, String entityIdPropertyName, List<Object> ids) {

Session session = entityManager.unwrap(Session.class);

List<List<Object>> splittedIdsLists = splitList(ids, 128);

for (List<Object> splittedIds : splittedIdsLists) {

    FullTextSession fullTextSession = Search.getFullTextSession(session);
    fullTextSession.setFlushMode(FlushMode.MANUAL);
    fullTextSession.setCacheMode(CacheMode.IGNORE);

    Transaction transaction = fullTextSession.beginTransaction();

    CriteriaBuilder builder = session.getCriteriaBuilder();
    CriteriaQuery<Object> criteria = builder.createQuery(clazz);
    Root<Object> root = criteria.from(clazz);
    criteria.select(root).where(root.get(entityIdPropertyName).in(splittedIds));

    TypedQuery<Object> query = fullTextSession.createQuery(criteria);

    List<Object> results = query.getResultList();

    int index = 0;

    for (Object result : results) {
        index++;
        try {
            fullTextSession.index(result); //index each element
            if (index == splittedIds.size()) {
                fullTextSession.flushToIndexes(); //apply changes to indexes
                fullTextSession.clear(); //free memory since the queue is processed
            }
        } catch (TransientObjectException toEx) {
            LOGGER.info(toEx.getMessage());
            throw toEx;
        }
    }

    transaction.commit();
}
}