Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/382.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 用于测试hadoop map程序错误的MRUnit测试_Java_Hadoop_Mapreduce_Mrunit - Fatal编程技术网

Java 用于测试hadoop map程序错误的MRUnit测试

Java 用于测试hadoop map程序错误的MRUnit测试,java,hadoop,mapreduce,mrunit,Java,Hadoop,Mapreduce,Mrunit,我正在尝试使用MRUnit对映射程序进行单元测试(摘自Hadoop:权威指南,第153页,章节:使用MRUnit:Mapper编写单元测试)。我使用intellij Idea,它显示了方法中的错误 new org.apache.hadoop.mrunit.MapDriver<>().withMapper(myMapper) new org.apache.hadoop.mrunit.MapDriver().withMapper(myMapper) 错误消息说, 无法将MapDriv

我正在尝试使用MRUnit对映射程序进行单元测试(摘自Hadoop:权威指南,第153页,章节:使用MRUnit:Mapper编写单元测试)。我使用intellij Idea,它显示了方法中的错误

new org.apache.hadoop.mrunit.MapDriver<>().withMapper(myMapper)
new org.apache.hadoop.mrunit.MapDriver().withMapper(myMapper)
错误消息说, 无法将MapDriver中的withMapper(org.apache.hadoop.mapreduce.MaxTempMapper)应用于(complexmapreduce.MaxTempMapper)

MaxTempMapper被声明为org.apache.hadoop.mapreduce.Mapper ,所以我不确定这里到底出了什么问题

下面是完整的映射器和单元测试类

MaxTempMapper

package complexmapreduce;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;
import java.io.IOException;

public class MaxTempMapper extends Mapper<LongWritable, Text, Text, IntWritable> {
    private static final int MISSING = 9999;
    private NDCRecordParser myParser = new NDCRecordParser();

    @Override
    public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException {

        myParser.parse(value);
        if (myParser.isValidTemperature()) {
            context.write(new Text(myParser.getYear()), new IntWritable(myParser.getMaxTemperature()));
        }
    }
}
package complexmapreduce;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.LongWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Mapper;
导入java.io.IOException;
公共类MaxTempMapper扩展了Mapper{
缺少专用静态最终整数=9999;
私有NDCRecordParser myParser=新的NDCRecordParser();
@凌驾
公共void映射(LongWritable键、文本值、上下文上下文)引发IOException、InterruptedException{
parse(值);
if(myParser.isValidTemperature()){
write(新文本(myParser.getYear()),新IntWritable(myParser.getMaxTemperature());
}
}
}
MaxTempUnitTest

    package complexmapreduce;
    import org.apache.hadoop.io.IntWritable;
    import org.apache.hadoop.io.LongWritable;
    import org.apache.hadoop.io.Text;
    import org.junit.Test;
    import java.io.IOException;

    public class MaxTempSingleLineUnitTest {

        @Test
        public void testMaxTempMapper() throws IOException {
            Text value = new Text("0029029070999991901010106004+64333+023450FM-12+000599999V0202701N015919999999N0000001N9-00781+99999102001ADDGF108991999999999999999999");
            LongWritable key = new LongWritable(0);
            MaxTempMapper myMapper = new MaxTempMapper();
            new org.apache.hadoop.mrunit.mapreduce.MapDriver<>()
                    .withMapper(myMapper)     // <<<===Error here
                    .withInput(key, value)
                    .withOutput(new Text("1901"), 
                     new IntWritable(0210))
                    .runTest();
        }
    }
package complexmapreduce;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.LongWritable;
导入org.apache.hadoop.io.Text;
导入org.junit.Test;
导入java.io.IOException;
公共类MaxTempSingleLineUnitTest{
@试验
public void testMaxTempMapper()引发IOException{
文本值=新文本(“00290290709991901010106004+64333+023450FM-12+00059999V0020701N0159199999N0000001N9-00781+999102001AddGF10899999999999999”);
LongWritable键=新的LongWritable(0);
MaxTempMapper myMapper=新的MaxTempMapper();
新org.apache.hadoop.mrunit.mapreduce.MapDriver()
.使用Mapper(myMapper)/您需要更改:

neworg.apache.hadoop.mrunit.mapreduce.MapDriver()

neworg.apache.hadoop.mrunit.mapreduce.MapDriver()


您需要添加泛型类型,以便它知道如何运行映射程序。

这没有任何区别…仍然会出现相同的错误。我在原始帖子中添加了一个屏幕截图。您使用的是什么版本的Java?使用Java 8。我确保支持注释,并且正确的hadoop2 API使用了(org.apache.hadoop.mrunit.mapreduce.MapDriver而不是org.apache.hadoop.mrunit.mapred.MapDriver)。编译该程序时会出现以下错误。错误:(14,25)java:找不到符号符号符号:类MaxTempMapper位置:包complexmapreduce