Neo4j 使用索引进行批插入后,使用Cypher没有结果
我对neo4j很陌生。我读过这个问题(),但它不起作用。我得到的错误是,找不到自动节点索引。也许是因为我在使用批插入器 在我的实验中,我将使用neo4j 1.8.2和编程语言Java与嵌入式数据库一起使用 我想使用BatchInserter和BatchInserterIndex将一些数据放入数据库,如上所述 首先,我从TGF文件中读取数据,创建节点并将其放入插入器,如下所示:Neo4j 使用索引进行批插入后,使用Cypher没有结果,neo4j,cypher,Neo4j,Cypher,我对neo4j很陌生。我读过这个问题(),但它不起作用。我得到的错误是,找不到自动节点索引。也许是因为我在使用批插入器 在我的实验中,我将使用neo4j 1.8.2和编程语言Java与嵌入式数据库一起使用 我想使用BatchInserter和BatchInserterIndex将一些数据放入数据库,如上所述 首先,我从TGF文件中读取数据,创建节点并将其放入插入器,如下所示: properties = MapUtil.map("name", actualNodeName, "birthd
properties = MapUtil.map("name", actualNodeName, "birthday", birthdayValue);
long node = myInserter.createNode(properties);
nodes.add(node);
persons.flush();
insert工作正常,但是当我想用Cypher搜索节点时,结果是空的
ExecutionEngine engine = new ExecutionEngine( db );
String query =
"start n=node:persons(name='nameToSearch') "
+ " match n-[:KNOWS]->m "
+ " return n.id, m ";
ExecutionResult result = engine.execute( query );
System.out.println(result);
另一方面,当我使用Traverser类并在rootnode上开始搜索时,我会收到由名为“nameToSearch”的节点连接的节点
有人能解释我为什么不能用Cypher获取节点吗
以下是批量插入的完整方法:
public long batchImport() throws IOException{
String actualLine;
ArrayList<Long> nodes = new ArrayList<Long>();
Map<String,Object> properties = new HashMap<String,Object>();
//delete all nodes and edges in the database
FileUtils.deleteRecursively(new File(DB_PATH ));
BatchInserter myInserter = BatchInserters.inserter(DB_PATH);
BatchInserterIndexProvider indexProvider =
new LuceneBatchInserterIndexProvider( myInserter );
BatchInserterIndex persons =
indexProvider.nodeIndex( "persons", MapUtil.stringMap( "type", "exact" ) );
persons.setCacheCapacity( "name", 10000 );
long execTime = 0;
try{
//Get the file which contains the graph informations
FileReader inputFile = new FileReader(UtilFunctions.searchFile(new File(PATH_OUTPUT_MERGED_FILES), "nodesAndEdges").get(0));
LineNumberReader inputLine = new LineNumberReader(inputFile);
// Read nodes up to symbol #
execTime = System.nanoTime();
while ((actualLine=inputLine.readLine()).charAt(0) != '#'){
StringTokenizer myTokenizer = new StringTokenizer(actualLine);
// Read node number
String actualNodeNumber = myTokenizer.nextToken();
// Read node name
String actualNodeName = myTokenizer.nextToken() + " " + myTokenizer.nextToken();
//Read property
myTokenizer.nextToken();
String actualNodePropertyKey = BIRTHDAY_KEY;
String actualNodePropertyValue = myTokenizer.nextToken();
actualNodePropertyValue = actualNodePropertyValue.substring(1, actualNodePropertyValue.length()-1);
// Insert node information
properties = MapUtil.map("name", actualNodeName, "birthday", actualNodePropertyValue, "id", actualNodeNumber);
long node = myInserter.createNode(properties);
nodes.add(node);
persons.flush();
}
// Read edges up to end of file
int countEdges = 0;
while ((actualLine=inputLine.readLine()) != null){
StringTokenizer myTokenizer = new StringTokenizer(actualLine);
// Read start node number
String actualStartNodeNumber = myTokenizer.nextToken();
// Read destination node number
String actualDestinationNodeNumber = myTokenizer.nextToken();
// Read relationship type
String actualRelType = myTokenizer.nextToken();
// Insert node information into ArrayList
int positionStartNode = Integer.parseInt(actualStartNodeNumber);
int positionDestinationNode = Integer.parseInt(actualDestinationNodeNumber);
properties.clear();
if (countEdges == 0) {
myInserter.createRelationship(0, nodes.get(positionStartNode-1), RelType.ROOT, properties);
myInserter.createRelationship(nodes.get(positionStartNode-1), nodes.get(positionDestinationNode-1), RelType.KNOWS, properties);
}
else
{
myInserter.(nodes.get(positionStartNode-1), nodes.get(positionDestinationNode-1), RelType.KNOWS, properties);
}
countEdges++;
}
indexProvider.shutdown();
myInserter.shutdown();
execTime = System.nanoTime() - execTime;
// Close input file
inputLine.close();
inputFile.close();
}
catch (Throwable e){
System.out.println(e.getMessage());
e.printStackTrace();
}
return execTime;
}
public long batchImport()引发IOException{
字符串实际值;
ArrayList节点=新的ArrayList();
映射属性=新的HashMap();
//删除数据库中的所有节点和边
递归删除(新文件(DB_路径));
BatchInserter myInserter=BatchInserters.inserter(DB_路径);
BatchInserteIndexProvider索引提供程序=
新LuceneBatchInserterIndexProvider(myInserter);
BatchInserterIndex人员=
indexProvider.nodeIndex(“persons”,MapUtil.stringMap(“type”,“exact”));
人员。设置缓存容量(“姓名”,10000);
长执行时间=0;
试一试{
//获取包含图形信息的文件
FileReader inputFile=newFileReader(UtilFunctions.searchFile(新文件(路径输出合并文件),“节点索引”).get(0));
LineNumberReader inputLine=新的LineNumberReader(inputFile);
//将节点读取到符号#
execTime=System.nanoTime();
while((actualLine=inputLine.readLine()).charAt(0)!='#'){
StringTokenizer myTokenizer=新的StringTokenizer(actualLine);
//读取节点号
字符串actualNodeNumber=myTokenizer.nextToken();
//读取节点名
字符串actualNodeName=myTokenizer.nextToken()+“”+myTokenizer.nextToken();
//读取属性
myTokenizer.nextToken();
字符串actualNodePropertyKey=生日键;
字符串actualNodePropertyValue=myTokenizer.nextToken();
actualNodePropertyValue=actualNodePropertyValue.substring(1,actualNodePropertyValue.length()-1);
//插入节点信息
properties=MapUtil.map(“name”,actualNodeName,“birth”,actualNodePropertyValue,“id”,actualNodeNumber);
长节点=myInserter.createNode(属性);
nodes.add(node);
人。同花顺();
}
//读取边直到文件结束
int countEdges=0;
而((actualLine=inputLine.readLine())!=null){
StringTokenizer myTokenizer=新的StringTokenizer(actualLine);
//读取起始节点号
字符串actualStartNodeNumber=myTokenizer.nextToken();
//读取目标节点号
字符串actualDestinationNodeNumber=myTokenizer.nextToken();
//读取关系类型
字符串actualRelType=myTokenizer.nextToken();
//将节点信息插入ArrayList
int positionStartNode=Integer.parseInt(actualStartNodeNumber);
int positionDestinationNode=Integer.parseInt(actualDestinationNodeNumber);
properties.clear();
如果(countEdges==0){
myInserter.createRelationship(0,nodes.get(positionStartNode-1),RelType.ROOT,properties);
创建关系(nodes.get(positionStartNode-1)、nodes.get(positionDestinationNode-1)、RelType.KNOWS、properties);
}
其他的
{
myInserter.(nodes.get(positionStartNode-1)、nodes.get(positionDestinationNode-1)、RelType.KNOWS、properties);
}
countEdges++;
}
indexProvider.shutdown();
myInserter.shutdown();
execTime=System.nanoTime()-execTime;
//关闭输入文件
inputLine.close();
inputFile.close();
}
捕获(可丢弃的e){
System.out.println(e.getMessage());
e、 printStackTrace();
}
返回时间;
}
使用Batchinserter API的代码必须在BatchInserterIndexProvider
和Batchinserter
上调用shutdown()
。也许你在代码中漏掉了这个
如果这不能解决问题,请发布您的代码。您缺少对
配置文件的调用。添加(节点,)
。因此,您永远不会向索引中添加任何内容。在第二个代码示例中,您有节点。添加(节点)
,在哪里定义了节点?抱歉,这不是完整的代码段nodes
在myInserter
前面定义了一行:ArrayList nodes=new ArrayList()代码>您应该首先以“start n=node(*)return n”开始,以查看数据库中是否有任何内容!这样,您将知道这是导入问题还是查询问题……查询start n=node(*)返回n
有效。使用Traverser类也可以很好地工作。只有索引不起作用:(不幸的是,这不是我的错。我已经更改了我的帖子,并将完整的方法放在了帖子的末尾。Thx!我像蝙蝠一样瞎了眼,没有看到它!:(我忘记了声明persons.add(node,properties)
public long batchImport() throws IOException{
String actualLine;
ArrayList<Long> nodes = new ArrayList<Long>();
Map<String,Object> properties = new HashMap<String,Object>();
//delete all nodes and edges in the database
FileUtils.deleteRecursively(new File(DB_PATH ));
BatchInserter myInserter = BatchInserters.inserter(DB_PATH);
BatchInserterIndexProvider indexProvider =
new LuceneBatchInserterIndexProvider( myInserter );
BatchInserterIndex persons =
indexProvider.nodeIndex( "persons", MapUtil.stringMap( "type", "exact" ) );
persons.setCacheCapacity( "name", 10000 );
long execTime = 0;
try{
//Get the file which contains the graph informations
FileReader inputFile = new FileReader(UtilFunctions.searchFile(new File(PATH_OUTPUT_MERGED_FILES), "nodesAndEdges").get(0));
LineNumberReader inputLine = new LineNumberReader(inputFile);
// Read nodes up to symbol #
execTime = System.nanoTime();
while ((actualLine=inputLine.readLine()).charAt(0) != '#'){
StringTokenizer myTokenizer = new StringTokenizer(actualLine);
// Read node number
String actualNodeNumber = myTokenizer.nextToken();
// Read node name
String actualNodeName = myTokenizer.nextToken() + " " + myTokenizer.nextToken();
//Read property
myTokenizer.nextToken();
String actualNodePropertyKey = BIRTHDAY_KEY;
String actualNodePropertyValue = myTokenizer.nextToken();
actualNodePropertyValue = actualNodePropertyValue.substring(1, actualNodePropertyValue.length()-1);
// Insert node information
properties = MapUtil.map("name", actualNodeName, "birthday", actualNodePropertyValue, "id", actualNodeNumber);
long node = myInserter.createNode(properties);
nodes.add(node);
persons.flush();
}
// Read edges up to end of file
int countEdges = 0;
while ((actualLine=inputLine.readLine()) != null){
StringTokenizer myTokenizer = new StringTokenizer(actualLine);
// Read start node number
String actualStartNodeNumber = myTokenizer.nextToken();
// Read destination node number
String actualDestinationNodeNumber = myTokenizer.nextToken();
// Read relationship type
String actualRelType = myTokenizer.nextToken();
// Insert node information into ArrayList
int positionStartNode = Integer.parseInt(actualStartNodeNumber);
int positionDestinationNode = Integer.parseInt(actualDestinationNodeNumber);
properties.clear();
if (countEdges == 0) {
myInserter.createRelationship(0, nodes.get(positionStartNode-1), RelType.ROOT, properties);
myInserter.createRelationship(nodes.get(positionStartNode-1), nodes.get(positionDestinationNode-1), RelType.KNOWS, properties);
}
else
{
myInserter.(nodes.get(positionStartNode-1), nodes.get(positionDestinationNode-1), RelType.KNOWS, properties);
}
countEdges++;
}
indexProvider.shutdown();
myInserter.shutdown();
execTime = System.nanoTime() - execTime;
// Close input file
inputLine.close();
inputFile.close();
}
catch (Throwable e){
System.out.println(e.getMessage());
e.printStackTrace();
}
return execTime;
}