Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/hadoop/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 配置对象时出现链映射器错误_Java_Hadoop_Mapreduce - Fatal编程技术网

Java 配置对象时出现链映射器错误

Java 配置对象时出现链映射器错误,java,hadoop,mapreduce,Java,Hadoop,Mapreduce,我是Hadoop新手,我正在尝试使用链映射器。我使用了本文中提供的代码,但是当我运行jar时,我得到了这个错误 14/04/11 12:06:39 INFO util.NativeCodeLoader: Loaded the native-hadoop library 14/04/11 12:06:39 WARN snappy.LoadSnappy: Snappy native library not loaded 14/04/11 12:06:39 INFO mapred.FileInputF

我是Hadoop新手,我正在尝试使用链映射器。我使用了本文中提供的代码,但是当我运行jar时,我得到了这个错误

14/04/11 12:06:39 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/04/11 12:06:39 WARN snappy.LoadSnappy: Snappy native library not loaded
14/04/11 12:06:39 INFO mapred.FileInputFormat: Total input paths to process : 1
14/04/11 12:06:39 INFO mapred.JobClient: Running job: job_201404111150_0004
14/04/11 12:06:40 INFO mapred.JobClient:  map 0% reduce 0%
14/04/11 12:07:02 INFO mapred.JobClient: Task Id : attempt_201404111150_0004_m_000000_0, Status : FAILED
java.lang.RuntimeException: Error in configuring object
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:432)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:372)
    at org.apache.hadoop.mapred.Child$4.run(Child.java:255)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:416)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1121)
    at org.apache.hadoop.mapred.Child.main(Child.java:249)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:622)
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
    ... 9 more
Caused by: java.lang.RuntimeException: Error in configuring object
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:93)
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:64)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:117)
    at org.apache.hadoop.mapred.MapRunner.configure(MapRunner.java:34)
    ... 14 more
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:622)
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:88)
    ... 17 more
Caused by: java.lang.RuntimeException: java.lang.NoSuchMethodException: ChainDriver$UpperCaserMapper.<init>()
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:115)
    at org.apache.hadoop.mapred.lib.Chain.configure(Chain.java:330)
    at org.apache.hadoop.mapred.lib.ChainMapper.configure(ChainMapper.java:152)
    ... 22 more
Caused by: java.lang.NoSuchMethodException: ChainDriver$UpperCaserMapper.<init>()
    at java.lang.Class.getConstructor0(Class.java:2813)
    at java.lang.Class.getDeclaredConstructor(Class.java:2053)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:109)
    ... 24 more
我的全部代码是:

import java.io.IOException;
import java.net.URI;
import java.util.Iterator;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.mapred.lib.ChainMapper;
import org.apache.hadoop.mapred.lib.ChainReducer;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

public class ChainDriver extends Configured implements Tool {
    public static class TokenizerMapper extends MapReduceBase implements
            Mapper<LongWritable, Text, Text, IntWritable> {
        private final static IntWritable one = new IntWritable(1);
        private Text word = new Text();

        public void map(LongWritable key, Text value, OutputCollector output,
                Reporter reporter) throws IOException {
            String line = value.toString();
            System.out.println("Line:" + line);
            StringTokenizer itr = new StringTokenizer(line);
            while (itr.hasMoreTokens()) {
                word.set(itr.nextToken());
                output.collect(word, one);
            }
        }
    }

    public class UpperCaserMapper extends MapReduceBase implements
            Mapper<Text, IntWritable, Text, IntWritable> {

        public void map(Text key, IntWritable value, OutputCollector output,
                Reporter reporter) throws IOException {
            String word = key.toString().toUpperCase();
            System.out.println("Upper Case:" + word);
            output.collect(new Text(word), value);
        }
    }

    public class WordCountReducer extends MapReduceBase implements
            Reducer<Text, IntWritable, Text, IntWritable> {

        public void reduce(Text key, Iterator values, OutputCollector output,
                Reporter reporter) throws IOException {
            int sum = 0;
            output.collect(key, new IntWritable(sum));
        }
    }

    static int printUsage() {
        System.out.println("wordcount ");
        ToolRunner.printGenericCommandUsage(System.out);
        return -1;
    }

    public int run(String[] args) throws Exception {
        JobConf conf = new JobConf(getConf(), ChainDriver.class);
        conf.setJobName("wordcount");

        Path outputPath = new Path("/user/hduser/output");
        FileSystem fs = FileSystem.get(new URI(outputPath.toString()), conf);
        // It will delete the output directory if it already exists. don't need
        // to delete it manually
        fs.delete(outputPath);

        // Setting the input and output path
        FileInputFormat.setInputPaths(conf, "/user/hduser/chain");
        FileOutputFormat.setOutputPath(conf, outputPath);

        // Considering the input and output as text file set the input & output
        // format to TextInputFormat
        conf.setInputFormat(TextInputFormat.class);
        conf.setOutputFormat(TextOutputFormat.class);

        JobConf mapAConf = new JobConf(false);
        ChainMapper.addMapper(conf, TokenizerMapper.class, LongWritable.class,
                Text.class, Text.class, IntWritable.class, true, mapAConf);

        // addMapper will take global conf object and mapper class ,input and
        // output type for this mapper and output key/value have to be sent by
        // value or by reference and localJObconf specific to this call

        JobConf mapBConf = new JobConf(false);
        ChainMapper.addMapper(conf, UpperCaserMapper.class, Text.class,
                IntWritable.class, Text.class, IntWritable.class, true,
                mapBConf);

        JobConf reduceConf = new JobConf(false);
        ChainReducer.setReducer(conf, WordCountReducer.class, Text.class,
                IntWritable.class, Text.class, IntWritable.class, true,
                reduceConf);

    //  JobConf mapCConf = new JobConf(false);
    //  ChainReducer.addMapper(conf, LastMapper.class, Text.class,
        //      IntWritable.class, Text.class, IntWritable.class, true,
            //  mapCConf);
        JobClient.runJob(conf);
        return 0;
    }

    public static void main(String[] args) throws Exception {
        int res = ToolRunner.run(new Configuration(),
                new ChainDriver(), args);
        System.exit(res);
    }
}

我发现错误在哪里,我所需要的只是将所有映射器和还原器更改为静态类