Java 将junit测试加载到不同的JVM

Java 将junit测试加载到不同的JVM,java,unit-testing,maven,junit,Java,Unit Testing,Maven,Junit,我从ApacheHBase项目中获得了名为TestColumnPrefixFilter的junit测试用例。在这个测试用例中,有两个测试。当我使用命令行使用maven运行此测试用例时: mvn test -Dtest=org.apache.hadoop.hbase.filter.TestColumnPrefixFilter 我得到这个结果: Running org.apache.hadoop.hbase.filter.TestColumnPrefixFilter Tests run: 2, F

我从ApacheHBase项目中获得了名为
TestColumnPrefixFilter
的junit测试用例。在这个测试用例中,有两个测试。当我使用命令行使用maven运行此测试用例时:

mvn test -Dtest=org.apache.hadoop.hbase.filter.TestColumnPrefixFilter
我得到这个结果:

Running org.apache.hadoop.hbase.filter.TestColumnPrefixFilter
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.585 seconds
我的问题是,有没有办法在不修改代码的情况下从这个测试用例中分别运行这两个测试?maven或java是否可以选择这样做?或者可以在不同的JVM中运行这两个测试吗?因为当我调用上面的命令行时,这个测试用例被加载到一个jvm中,这个jvm用于我不想要的两个测试

我为maven尝试了perTest选项,但没有成功

下面是Java文件

/**
 * Licensed to the Apache Software Foundation (ASF) under one
 * or more contributor license agreements.  See the NOTICE file
 * distributed with this work for additional information
 * regarding copyright ownership.  The ASF licenses this file
 * to you under the Apache License, Version 2.0 (the
 * "License"); you may not use this file except in compliance
 * with the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 */
package org.apache.hadoop.hbase.filter;

import static org.junit.Assert.*;

import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;

import org.apache.hadoop.hbase.*;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes;
import org.junit.Test;
import org.junit.experimental.categories.Category;

@Category(SmallTests.class)
public class TestColumnPrefixFilter {

  private final static HBaseTestingUtility TEST_UTIL = new
      HBaseTestingUtility();

  @Test
  public void testColumnPrefixFilter() throws IOException {
    String family = "Family";
    HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
    htd.addFamily(new HColumnDescriptor(family));
    HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
    HRegion region = HRegion.createHRegion(info, TEST_UTIL.
      getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    try {
      List<String> rows = generateRandomWords(100, "row");
      List<String> columns = generateRandomWords(10000, "column");
      long maxTimestamp = 2;

      List<KeyValue> kvList = new ArrayList<KeyValue>();

      Map<String, List<KeyValue>> prefixMap = new HashMap<String,
          List<KeyValue>>();

      prefixMap.put("p", new ArrayList<KeyValue>());
      prefixMap.put("s", new ArrayList<KeyValue>());

      String valueString = "ValueString";

      for (String row: rows) {
        Put p = new Put(Bytes.toBytes(row));
        p.setWriteToWAL(false);
        for (String column: columns) {
          for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
            KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp,
                valueString);
            p.add(kv);
            kvList.add(kv);
            for (String s: prefixMap.keySet()) {
              if (column.startsWith(s)) {
                prefixMap.get(s).add(kv);
              }
            }
          }
        }
        region.put(p);
      }

      ColumnPrefixFilter filter;
      Scan scan = new Scan();
      scan.setMaxVersions();
      for (String s: prefixMap.keySet()) {
        filter = new ColumnPrefixFilter(Bytes.toBytes(s));

        scan.setFilter(filter);

        InternalScanner scanner = region.getScanner(scan);
        List<KeyValue> results = new ArrayList<KeyValue>();
        while(scanner.next(results));
        assertEquals(prefixMap.get(s).size(), results.size());
      }
    } finally {
      region.close();
      region.getLog().closeAndDelete();
    }

    region.close();
    region.getLog().closeAndDelete();
  }

  @Test
  public void testColumnPrefixFilterWithFilterList() throws IOException {
    String family = "Family";
    HTableDescriptor htd = new HTableDescriptor("TestColumnPrefixFilter");
    htd.addFamily(new HColumnDescriptor(family));
    HRegionInfo info = new HRegionInfo(htd.getName(), null, null, false);
    HRegion region = HRegion.createHRegion(info, TEST_UTIL.
      getDataTestDir(), TEST_UTIL.getConfiguration(), htd);
    try {
      List<String> rows = generateRandomWords(100, "row");
      List<String> columns = generateRandomWords(10000, "column");
      long maxTimestamp = 2;

      List<KeyValue> kvList = new ArrayList<KeyValue>();

      Map<String, List<KeyValue>> prefixMap = new HashMap<String,
          List<KeyValue>>();

      prefixMap.put("p", new ArrayList<KeyValue>());
      prefixMap.put("s", new ArrayList<KeyValue>());

      String valueString = "ValueString";

      for (String row: rows) {
        Put p = new Put(Bytes.toBytes(row));
        p.setWriteToWAL(false);
        for (String column: columns) {
          for (long timestamp = 1; timestamp <= maxTimestamp; timestamp++) {
            KeyValue kv = KeyValueTestUtil.create(row, family, column, timestamp,
                valueString);
            p.add(kv);
            kvList.add(kv);
            for (String s: prefixMap.keySet()) {
              if (column.startsWith(s)) {
                prefixMap.get(s).add(kv);
              }
            }
          }
        }
        region.put(p);
      }

      ColumnPrefixFilter filter;
      Scan scan = new Scan();
      scan.setMaxVersions();
      for (String s: prefixMap.keySet()) {
        filter = new ColumnPrefixFilter(Bytes.toBytes(s));

        //this is how this test differs from the one above
        FilterList filterList = new FilterList(FilterList.Operator.MUST_PASS_ALL);
        filterList.addFilter(filter);
        scan.setFilter(filterList);

        InternalScanner scanner = region.getScanner(scan);
        List<KeyValue> results = new ArrayList<KeyValue>();
        while(scanner.next(results));
        assertEquals(prefixMap.get(s).size(), results.size());
      }
    } finally {
      region.close();
      region.getLog().closeAndDelete();
    }

    region.close();
    region.getLog().closeAndDelete();
  }

  List<String> generateRandomWords(int numberOfWords, String suffix) {
    Set<String> wordSet = new HashSet<String>();
    for (int i = 0; i < numberOfWords; i++) {
      int lengthOfWords = (int) (Math.random()*2) + 1;
      char[] wordChar = new char[lengthOfWords];
      for (int j = 0; j < wordChar.length; j++) {
        wordChar[j] = (char) (Math.random() * 26 + 97);
      }
      String word;
      if (suffix == null) {
        word = new String(wordChar);
      } else {
        word = new String(wordChar) + suffix;
      }
      wordSet.add(word);
    }
    List<String> wordList = new ArrayList<String>(wordSet);
    return wordList;
  }

  @org.junit.Rule
  public org.apache.hadoop.hbase.ResourceCheckerJUnitRule cu =
    new org.apache.hadoop.hbase.ResourceCheckerJUnitRule();
}
/**
*向Apache软件基金会(ASF)授权
*一个或多个参与者许可协议。见通知文件
*与此工作一起分发以获取更多信息
*关于版权所有权。ASF许可此文件
*根据Apache许可证,版本2.0(
*“许可证”);除非符合规定,否则您不得使用此文件
*带着执照。您可以通过以下方式获得许可证副本:
*
*     http://www.apache.org/licenses/LICENSE-2.0
*
*除非适用法律要求或书面同意,软件
*根据许可证进行的分发是按“原样”进行分发的,
*无任何明示或暗示的保证或条件。
*请参阅许可证以了解管理权限和权限的特定语言
*许可证下的限制。
*/
包org.apache.hadoop.hbase.filter;
导入静态org.junit.Assert.*;
导入java.io.IOException;
导入java.util.ArrayList;
导入java.util.HashMap;
导入java.util.HashSet;
导入java.util.List;
导入java.util.Map;
导入java.util.Set;
导入org.apache.hadoop.hbase.*;
导入org.apache.hadoop.hbase.client.Put;
导入org.apache.hadoop.hbase.client.Scan;
导入org.apache.hadoop.hbase.regionserver.HRegion;
导入org.apache.hadoop.hbase.regionserver.InternalScanner;
导入org.apache.hadoop.hbase.util.Bytes;
导入org.junit.Test;
导入org.junit.experimental.categories.Category;
@类别(SmallTests.class)
公共类TestColumnPrefixFilter{
专用最终静态hbasetestinguility测试_UTIL=new
hbasetestinguility();
@试验
public void testColumnPrefixFilter()引发IOException{
字符串family=“family”;
HTableDescriptor htd=新的HTableDescriptor(“TestColumnPrefixFilter”);
htd.addFamily(新的hcolumdescriptor(family));
hregoninfo info=新的hregoninfo(htd.getName(),null,null,false);
HRegion region=HRegion.createHRegion(信息,测试工具)。
getDataTestDir(),TEST_UTIL.getConfiguration(),htd);
试一试{
列表行=GeneratorDomainWords(100,“行”);
列表列=GeneratorDomainWords(10000,“列”);
长maxTimestamp=2;
List kvList=new ArrayList();
Map prefixMap=new HashMap();
prefixMap.put(“p”,新的ArrayList());
prefixMap.put(“s”,新的ArrayList());
String valueString=“valueString”;
用于(字符串行:行){
Put p=新的Put(字节数。toBytes(行));
p、 setWriteToWAL(假);
for(字符串列:列){
用于(长时间戳=1;时间戳)
是否可以在不同的JVM中运行这两个测试

是的,maven有一个选项,可以从文档中选择
reuseForks

指示是否可以重用分叉的VM。如果设置为“false”,则为要执行的每个测试类分叉一个新VM。如果设置为“true”,则最多分叉分叉个VM,然后重用以执行所有测试

默认值为:true

用户属性为:reuseForks

您可以像这样在pom.xml中设置它

<plugin>
    <groupId>org.apache.maven.plugins</groupId>
    <artifactId>maven-surefire-plugin</artifactId>
    <version>2.16</version>
    <configuration>
        <reuseForks>false</reuseForks> <!-- This will make sure to spawn a fresh jvm for each test -->
    </configuration>
</plugin>

org.apache.maven.plugins
maven surefire插件
2.16
假的
编辑

请尝试以下配置

<plugin>
  <groupId>org.apache.maven.plugins</groupId>
  <artifactId>maven-surefire-plugin</artifactId>
  <version>2.9</version>
  <configuration>
    <forkMode>always</forkMode> <!-- try with 'pertest' option as well -->
  </configuration>
</plugin>

org.apache.maven.plugins
maven surefire插件
2.9
总是

我怀疑测试用例必须在单独的类中才能利用这一点。是的,但您可以使用
forkMode
的组合(现在已弃用)与
parallel
选项一起,在方法级别而不是类级别实现JVM分离。我建议不要使用这种不推荐使用的技术。正如Bob提到的,apache不推荐使用ITA肯定是有原因的,对于这种技术,您必须将您的测试分为不同的类。@SaifAsif但正如我所说的,我不认为ant修改代码。因为我有很多。这只是一个测试用例。没有简单的方法吗?@saifas如果我编辑了这个问题,你能检查编辑的部分吗?