Java 如何模拟hadoop文件系统

Java 如何模拟hadoop文件系统,java,hadoop,mockito,powermock,powermockito,Java,Hadoop,Mockito,Powermock,Powermockito,我试图使用powermock和mockito来模拟hadoop文件系统和构造函数。我对莫基托斯还不熟悉,所以我不确定到底出了什么问题 >java.lang.VerifyError: Inconsistent stackmap frames at branch target 42 Exception Details: Location: org/apache/hadoop/conf/Configured.<init>()V @42: aload_1 Reason:

我试图使用powermock和mockito来模拟hadoop文件系统和构造函数。我对莫基托斯还不熟悉,所以我不确定到底出了什么问题

>java.lang.VerifyError: Inconsistent stackmap frames at branch target 42
Exception Details:
  Location:
    org/apache/hadoop/conf/Configured.<init>()V @42: aload_1
  Reason:
    Type uninitializedThis (current frame, locals[1]) is not assignable to 'org/apache/hadoop/conf/Configured' (stack map, locals[1])
  Current Frame:
    bci: @32
    flags: { flagThisUninit }
    locals: { uninitializedThis, uninitializedThis, null, top, 'java/lang/Object' }
    stack: { 'java/lang/Object', 'java/lang/Object' }
  Stackmap Frame:
    bci: @42
    flags: { flagThisUninit }
    locals: { uninitializedThis, 'org/apache/hadoop/conf/Configured', 'org/apache/hadoop/conf/Configuration' }
    at java.lang.Class.getDeclaredConstructors0(Native Method)
    at java.lang.Class.privateGetDeclaredConstructors(Unknown Source)
    at java.lang.Class.getDeclaredConstructors(Unknown Source)
    at org.mockito.internal.creation.jmock.ClassImposterizer.setConstructorsAccessible(ClassImposterizer.java:75)
    at org.mockito.internal.creation.jmock.ClassImposterizer.imposterise(ClassImposterizer.java:70)
    at org.powermock.api.mockito.internal.mockcreation.MockCreator.createMethodInvocationControl(MockCreator.java:111)
    at org.powermock.api.mockito.internal.mockcreation.MockCreator.mock(MockCreator.java:60)
    at org.powermock.api.mockito.PowerMockito.mockStatic(PowerMockito.java:70)
    at com.chubb.pipeline.transformation.TestHadoopTest.testtestme(TestHadoopTest.java:27)




      @RunWith(PowerMockRunner.class)
        @PrepareForTest({TestHadoop.class,FileSystem.class})
        @PowerMockIgnore({"org.apache.hadoop.conf.*","org.apache.hadoop.fs.*"})
     public class TestHadoopTest {

    @Test
    public void testtestme() throws Exception {

        CopyBookReader cbr = PowerMockito.mock(CopyBookReader.class);
        TestHadoop testhad = new TestHadoop();          
        PowerMockito.mockStatic(FileSystem.class);
        Configuration conf = mock(Configuration.class); 
        PowerMockito.when(FileSystem.get(conf)).thenReturn(null); 
        PowerMockito.whenNew(CopyBookReader.class).withArguments(isA(Path.class),isA(FileSystem.class)).thenReturn(cbr);
        testhad.testme();

    }



}



public class TestHadoop {

    public void testme() throws Exception{

    FileSystem fs = FileSystem.get(new Configuration());
    FileStatus[] status = fs.listStatus (new Path ("string"));
    CopyBookReader cBook = new CopyBookReader(status[0].getPath(),fs);
    cBook.toString();
}

}
.java.lang.VerifyError:分支目标42处的堆栈映射帧不一致
例外情况详情:
地点:
org/apache/hadoop/conf/Configured.()V@42:aload_1
原因:
类型uninitializedThis(当前帧,局部变量[1])不可分配给“org/apache/hadoop/conf/Configured”(堆栈映射,局部变量[1])
当前帧:
密件抄送:@32
标志:{flagthisunit}
局部变量:{uninitializedThis,uninitializedThis,null,top,'java/lang/Object'}
堆栈:{'java/lang/Object','java/lang/Object'}
堆栈映射帧:
密件抄送:@42
标志:{flagthisunit}
局部变量:{uninitializedThis'org/apache/hadoop/conf/Configured','org/apache/hadoop/conf/Configuration'}
位于java.lang.Class.getDeclaredConstructors0(本机方法)
位于java.lang.Class.privateGetDeclaredConstructors(未知源)
位于java.lang.Class.getDeclaredConstructors(未知源)
位于org.mockito.internal.creation.jmock.ClassImposterizer.setConstructorsAccessible(ClassImposterizer.java:75)
位于org.mockito.internal.creation.jmock.ClassImposterizer.importerise(ClassImposterizer.java:70)
位于org.powermock.api.mockito.internal.mockcreation.MockCreator.createMethodInvocationControl(MockCreator.java:111)
位于org.powermock.api.mockito.internal.mockcreation.MockCreator.mock(MockCreator.java:60)
位于org.powermock.api.mockito.PowerMockito.mockStatic(PowerMockito.java:70)
位于com.chubb.pipeline.transformation.TestHadoopTest.TestMe(TestHadoopTest.java:27)
@RunWith(PowerMockRunner.class)
@PrepareForTest({TestHadoop.class,FileSystem.class})
@PowerMockIgnore({“org.apache.hadoop.conf.*,”org.apache.hadoop.fs.*})
公共类TestHadoopTest{
@试验
public void testme()引发异常{
CopyBookReader cbr=PowerMockito.mock(CopyBookReader.class);
TestHadoop testhad=新的TestHadoop();
mockStatic(FileSystem.class);
Configuration conf=mock(Configuration.class);
PowerMockito.when(FileSystem.get(conf)).thenReturn(null);
whenNew(CopyBookReader.class)。带有参数(isA(Path.class)、isA(FileSystem.class))。然后返回(cbr);
testhad.testme();
}
}
公共类TestHadoop{
public void testme()引发异常{
FileSystem fs=FileSystem.get(新配置());
FileStatus[]status=fs.listStatus(新路径(“字符串”);
CopyBookReader cBook=新建CopyBookReader(状态[0]。getPath(),fs);
cBook.toString();
}
}
mypom.xml中的依赖项

 <name>MapReduce</name> 
  <url>http://maven.apache.org</url>
  <properties>
    <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>

    <!-- Component versions are defined here -->
    <!-- Note that Hadoop dependencies are defined in hadoop-meta -->
     <avro.version>1.7.6-cdh5.4.5</avro.version>
     <crunch.version>0.11.0-cdh5.4.5</crunch.version>
     <hadoop.version>2.6.0-cdh5.4.5</hadoop.version>
     <hbase.version>1.0.0-cdh5.4.5</hbase.version>
     <hive.version>1.1.0-cdh5.4.5</hive.version>
     <mrunit.version>1.1.0</mrunit.version>
     <parquet.version>1.5.0-cdh5.4.5</parquet.version>
     <pig.version>0.12.0-cdh5.4.5</pig.version>
     <spark.version>1.3.0-cdh5.4.5</spark.version>
     <sqoop.version>1.4.5-cdh5.4.5</sqoop.version>
     <zookeeper.version>3.4.5-cdh5.4.5</zookeeper.version>
    <powermock.version>1.5.4</powermock.version>
  </properties>

  <dependencies>

    <dependency>
   <groupId>jdk.tools</groupId>
   <artifactId>jdk.tools</artifactId> 
   <version>1.7.0</version>
   <scope>system</scope>
   <systemPath>C:\Program Files\Java\jdk1.7.0_79\lib\tools.jar</systemPath>
 </dependency>
         <dependency>
         <groupId>org.apache.avro</groupId>
         <artifactId>avro</artifactId>
         <version>${avro.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.avro</groupId>
         <artifactId>avro-mapred</artifactId>
         <classifier>hadoop2</classifier>
         <version>${avro.version}</version>
       </dependency>
       <dependency>
         <groupId>com.twitter</groupId>
         <artifactId>parquet-avro</artifactId>
         <version>${parquet.version}</version>
         <exclusions>
           <exclusion>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-core</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
         <artifactId>hbase-client</artifactId>
         <version>${hbase.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.hbase</groupId>
         <artifactId>hbase-server</artifactId>
         <version>${hbase.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.hive</groupId>
         <artifactId>hive-common</artifactId>
         <version>${hive.version}</version>
         <exclusions>
           <exclusion>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-core</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.hive</groupId>
         <artifactId>hive-exec</artifactId>
         <version>${hive.version}</version>
         <exclusions>
           <exclusion>
             <groupId>org.apache.hadoop</groupId>
             <artifactId>hadoop-core</artifactId>
           </exclusion>
           <exclusion>
             <groupId>javax.jdo</groupId>
             <artifactId>jdo2-api</artifactId>
           </exclusion>
           <exclusion>
             <groupId>org.datanucleus</groupId>
             <artifactId>datanucleus-connectionpool</artifactId>
           </exclusion>
           <exclusion>
             <groupId>org.datanucleus</groupId>
             <artifactId>datanucleus-core</artifactId>
           </exclusion>
           <exclusion>
             <groupId>org.datanucleus</groupId>
             <artifactId>datanucleus-enhancer</artifactId>
           </exclusion>
           <exclusion>
             <groupId>org.datanucleus</groupId>
             <artifactId>datanucleus-rdbms</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.apache.pig</groupId>
         <artifactId>pig</artifactId>
         <version>${pig.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.crunch</groupId>
         <artifactId>crunch-core</artifactId>
         <version>${crunch.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.spark</groupId>
         <artifactId>spark-core_2.10</artifactId>
         <version>${spark.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.zookeeper</groupId>
         <artifactId>zookeeper</artifactId>
         <version>${zookeeper.version}</version>
       </dependency>
       <dependency>
         <groupId>org.apache.sqoop</groupId>
         <artifactId>sqoop</artifactId>
         <!--  classifier>hadoop200  classifier -->
         <version>${sqoop.version}</version>
       </dependency>
       <dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
         <version>1.2.15</version>
         <exclusions>
           <exclusion>
             <groupId>javax.mail</groupId>
             <artifactId>mail</artifactId>
           </exclusion>
           <exclusion>
             <groupId>javax.jms</groupId>
             <artifactId>jms</artifactId>
           </exclusion>
           <exclusion>
             <groupId>com.sun.jdmk</groupId>
             <artifactId>jmxtools</artifactId>
           </exclusion>
           <exclusion>
             <groupId>com.sun.jmx</groupId>
             <artifactId>jmxri</artifactId>
           </exclusion>
         </exclusions>
       </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-api</artifactId>
         <version>1.7.5</version>
       </dependency>
       <dependency>
         <groupId>org.slf4j</groupId>
         <artifactId>slf4j-log4j12</artifactId>
         <version>1.7.5</version>
       </dependency>
       <dependency>
         <groupId>org.hamcrest</groupId>
         <artifactId>hamcrest-all</artifactId>
         <version>1.3</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>junit-addons</groupId>
         <artifactId>junit-addons</artifactId>
         <version>1.4</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.scalatest</groupId>
         <artifactId>scalatest_2.10</artifactId>
         <version>2.2.0</version>
         <scope>test</scope>
       </dependency>
       <dependency>
         <groupId>org.apache.commons</groupId>
         <artifactId>commons-exec</artifactId>
         <version>1.1</version>
       </dependency>
       <dependency>
         <groupId>commons-io</groupId>
         <artifactId>commons-io</artifactId>
         <version>2.4</version>
       </dependency>
       <dependency>
         <groupId>com.google.guava</groupId>
         <artifactId>guava</artifactId>
         <version>11.0.2</version>
       </dependency>   
 <dependency>
      <groupId>org.powermock.modules</groupId>
      <artifactId>powermock-module-junit4</artifactId>
      <version>${powermock.version}</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.powermock.api</groupId>
      <artifactId>powermock-api-mockito</artifactId>
      <version>${powermock.version}</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>4.6</version>
      <scope>test</scope>
    </dependency>
    <dependency>
      <groupId>org.mockito</groupId>
      <artifactId>mockito-all</artifactId>
      <version>1.9.4</version>
    </dependency>
    <dependency>
    <groupId>org.apache.oozie</groupId>
    <artifactId>oozie-core</artifactId>
    <version>4.1.0</version>
</dependency>
<dependency>
  <groupId>org.apache.oozie</groupId>
  <artifactId>oozie-client</artifactId>
  <version>4.1.0-cdh5.4.3</version>
</dependency>
<dependency>
    <groupId>org.apache.mrunit</groupId>
    <artifactId>mrunit</artifactId>
    <version>1.1.0</version>
    <classifier>hadoop2</classifier> 
</dependency>
<dependency>
    <groupId>xerces</groupId>
    <artifactId>xercesImpl</artifactId>
    <version>2.9.1</version>
</dependency>
<dependency>
    <groupId>xalan</groupId>
    <artifactId>xalan</artifactId>
    <version>2.7.1</version>
</dependency>
  </dependencies>
  <repositories>
    <repository>
        <id>central</id>
        <name>Maven Central</name>
        <url>http://repo1.maven.org/maven2/</url>
    </repository>

    <repository>
        <id>search</id>
        <name>Maven Central search</name>
        <url>http://search.maven.org/</url>
    </repository>


    <repository>
      <id>cloudera</id>
      <url>http://repository.cloudera.com/artifactory/cloudera-repos/</url>
    </repository>
    <repository>
            <id>sonatype-snapshots</id>
            <url>https://oss.sonatype.org/content/repositories/snapshots</url>
     </repository>
     <repository>
      <id>powermock-repo</id>
      <url>http://powermock.googlecode.com/svn/repo/</url>
    </repository>
     </repositories>

    <build>
        <pluginManagement>
            <plugins>
                <plugin>
                    <groupId>org.apache.maven.plugins</groupId>
                    <artifactId>maven-compiler-plugin</artifactId>
                    <version>3.1</version>
                    <configuration>
                        <source>1.7</source>
                        <target>1.7</target>
                    </configuration>
                </plugin>
            </plugins>
        </pluginManagement>
    </build>
</project>
MapReduce
http://maven.apache.org
UTF-8
1.7.6-cdh5.4.5
0.11.0-cdh5.4.5
2.6.0-cdh5.4.5
1.0.0-cdh5.4.5
1.1.0-cdh5.4.5
1.1.0
1.5.0-cdh5.4.5
0.12.0-cdh5.4.5
1.3.0-cdh5.4.5
1.4.5-cdh5.4.5
3.4.5-cdh5.4.5
1.5.4
jdk.tools
jdk.tools
1.7.0
系统
C:\ProgramFiles\Java\jdk1.7.0\U 79\lib\tools.jar
org.apache.avro
阿夫罗
${avro.version}
org.apache.avro
阿夫罗·马普雷德
hadoop2
${avro.version}
com.twitter
镶木地板
${parquet.version}
org.apache.hadoop
hadoop内核
org.apache.hbase
hbase客户端
${hbase.version}
org.apache.hbase
hbase服务器
${hbase.version}
org.apache.hive
普通蜂房
${hive.version}
org.apache.hadoop
hadoop内核
org.apache.hive
蜂巢执行器
${hive.version}
org.apache.hadoop
hadoop内核
javax.jdo
jdo2api
org.datanucleus
datanucleus连接池
org.datanucleus
数据核核心
org.datanucleus
数据核增强子
org.datanucleus
datanucleus关系数据库
org.apache.pig
猪
${pig.version}
org.apache.crunch
挤压芯
${crunch.version}
org.apache.spark
spark-core_2.10
${spark.version}
org.apache.zookeeper
动物园管理员
${zookeeper.version}
org.apache.sqoop
sqoop
${sqoop.version}
log4j
log4j
1.2.15
javax.mail
邮件
javax.jms
jms
com.sun.jdmk
jmxtools
com.sun.jmx
jmxri
org.slf4j
slf4j api
1.7.5
org.slf4j
slf4j-log4j12
1.7.5
org.hamcrest
汉克雷斯特酒店
1.3
测试
junit插件
junit插件
1.4
测试
org.scalatest
比例测试2.10
2.2.0
测试
org.apache.commons
下议院行政长官
1.1
公地io
公地io
2.4
番石榴
番石榴
11.0.2
org.powermock.modules
powermock-module-junit4
${powermoc