Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/350.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java ChainReducer.setReducer方法抛出错误“;ChainReducer不适用于参数;_Java_Hadoop_Mapreduce_Hdfs - Fatal编程技术网

Java ChainReducer.setReducer方法抛出错误“;ChainReducer不适用于参数;

Java ChainReducer.setReducer方法抛出错误“;ChainReducer不适用于参数;,java,hadoop,mapreduce,hdfs,Java,Hadoop,Mapreduce,Hdfs,我有两个映射器类。因此使用ChainMapper.addMapper方法添加映射器,并使用ChainReducer.setReducer方法设置ReducerChainMapper.addMapper方法正常,但Chain.setReducer方法抛出语法错误 The method setReducer(Job, Class<? extends Reducer>, Class<?>, Class<?>, Class<?>, Class<?&g

我有两个映射器类。因此使用
ChainMapper.addMapper
方法添加映射器,并使用
ChainReducer.setReducer
方法设置Reducer
ChainMapper.addMapper
方法正常,但
Chain.setReducer
方法抛出语法错误

The method setReducer(Job, Class<? extends Reducer>, Class<?>, Class<?>, Class<?>, Class<?>, Configuration) in the type ChainReducer is not applicable for the arguments (JobConf, Class<FileComparisionReduce>, Class<LongWritable>, Class<Text>, Class<LongWritable>, Class<Text>, boolean, JobConf)
还尝试删除布尔参数“true”

最后,我找到了一个解决办法。 导入了错误的包,即导入org.apache.hadoop.mapreduce.lib.chain.ChainReducer;而不是导入org.apache.hadoop.mapred.lib.ChainReducer

package fileComparision;

import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.lib.ChainMapper;
import org.apache.hadoop.mapreduce.lib.chain.ChainReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class DriverComparision extends Configured implements Tool{


    @Override
    public int run(String[] arg0) throws Exception {
        JobConf conf = new JobConf(true);
        conf.setJobName("Comaprision of 2 file ");
        JobConf Mapper1 = new JobConf(false);


        ChainMapper.addMapper(conf, FileComparisionMapper1.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true, Mapper1);

        JobConf Mapper2 = new JobConf(false);
        ChainMapper.addMapper(conf, FileComparisionMapper2.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true, Mapper2);


    JobConf Reduc = new JobConf(false);
        ChainReducer.setReducer(conf, FileComparisionReduce.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true , Reduc);


        FileInputFormat.setInputPaths(conf, new Path(arg0[0]));
        FileOutputFormat.setOutputPath(conf, new Path(arg0[1]));

        conf.setMapOutputKeyClass(LongWritable.class);
        conf.setMapOutputValueClass(Text.class);
        conf.setOutputKeyClass(LongWritable.class);
        conf.setOutputValueClass(Text.class);


        JobClient.runJob(conf);

        return 0;
    }
JobConf Reduc = new JobConf(false);
        ChainReducer.setReducer(conf, FileComparisionReduce.class, LongWritable.class, Text.class, LongWritable.class, Text.class, true , Reduc);