Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/hadoop/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/hibernate/5.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Reduce DataJoin中的java.lang.reflect.InvocationTargetException(Hadoop正在运行)_Java_Hadoop - Fatal编程技术网

Reduce DataJoin中的java.lang.reflect.InvocationTargetException(Hadoop正在运行)

Reduce DataJoin中的java.lang.reflect.InvocationTargetException(Hadoop正在运行),java,hadoop,Java,Hadoop,在Hadoop中运行DataJoin示例时遇到问题。运行作业时,似乎抛出了java.lang.reflect.InvocationTargetException。我试了一天,但没用。我做错了什么 以下是例外 12/12/30 01:54:06 INFO mapred.JobClient: Task Id : attempt_201212280853_0032_m_000000_2, Status : FAILED java.lang.RuntimeException: Error in conf

在Hadoop中运行DataJoin示例时遇到问题。运行作业时,似乎抛出了java.lang.reflect.InvocationTargetException。我试了一天,但没用。我做错了什么

以下是例外

12/12/30 01:54:06 INFO mapred.JobClient: Task Id : attempt_201212280853_0032_m_000000_2, Status : FAILED
java.lang.RuntimeException: Error in configuring object
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:106)
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:72)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:130)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:389)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:327)
    at org.apache.hadoop.mapred.Child$4.run(Child.java:268)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:396)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1332)
    at org.apache.hadoop.mapred.Child.main(Child.java:262)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.ja
下面是从Hadoop中获取的代码。我的输入目录中有customers.txt和orders.txt。我尝试将输入目录中的这两个文件重命名为part-0000.txt和part-0001.txt,但仍然不起作用

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import org.apache.hadoop.contrib.utils.join.DataJoinMapperBase;
import org.apache.hadoop.contrib.utils.join.DataJoinReducerBase;
import org.apache.hadoop.contrib.utils.join.TaggedMapOutput;

/**
 * DataJoinMapperBase has a method method - http://hadoop.apache.org/docs/mapreduce/r0.21.0/api/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.html
 * 
 * map(Object key, Object value, OutputCollector output, Reporter reporter) 
 *  
 */

public class DataJoin extends Configured implements Tool {

    public static class MapClass extends DataJoinMapperBase {


        protected Text generateInputTag(String inputFile) {
            String datasource = inputFile.split("-")[0];
            return new Text(datasource);

        }


        protected Text generateGroupKey(TaggedMapOutput aRecord) {
            String line = ((Text) aRecord.getData()).toString();
            String[] tokens = line.split(",");
            String groupKey = tokens[0];
            return new Text(groupKey);
        }

        protected TaggedMapOutput generateTaggedMapOutput(Object value) {
            TaggedWritable retv = new TaggedWritable();
            retv.setData((Text) value);
            retv.setTag(this.inputTag);
            return retv;
        }
    }

    public static class Reduce extends DataJoinReducerBase {

        protected TaggedMapOutput combine(Object[] tags, Object[] values) {
            if (tags.length < 2) return null;  
            String joinedStr = ""; 
            for (int i=0; i<values.length; i++) {
                if (i > 0) joinedStr += ",";
                TaggedWritable tw = (TaggedWritable) values[i];
                String line = ((Text) tw.getData()).toString();
                String[] tokens = line.split(",", 2);
                joinedStr += tokens[1];
            }
            TaggedWritable retv = new TaggedWritable();
            retv.setData(new Text(joinedStr));
            retv.setTag((Text) tags[0]); 
            return retv;
        }
    }

    public static class TaggedWritable extends TaggedMapOutput {

        private Writable data;

//        public TaggedWritable(Writable data) {
//            this.tag = new Text("");
//            this.data = data;
//        }
        public TaggedWritable() {
            this.tag = new Text();
        }        

        public Writable getData() {
            return data;
        }

        public void setData(Writable data) {
            this.data = data;
        }        

        public void write(DataOutput out) throws IOException {
            this.tag.write(out);
            this.data.write(out);
        }

        public void readFields(DataInput in) throws IOException {
            this.tag.readFields(in);
            String dataClz = in.readUTF();
            if (this.data == null
                    || !this.data.getClass().getName().equals(dataClz)) {
                try {
                    this.data = (Writable) ReflectionUtils.newInstance(
                            Class.forName(dataClz), null);
                } catch (ClassNotFoundException e) {
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                }
            }
            this.data.readFields(in);
        }        

//        public void readFields(DataInput in) throws IOException {
//            this.tag.readFields(in);
//            this.data.readFields(in);
//        }
    }

    public int run(String[] args) throws Exception {
        Configuration conf = getConf();

        JobConf job = new JobConf(conf, DataJoin.class);

        Path in = new Path(args[0]);
        Path out = new Path(args[1]);
        FileInputFormat.setInputPaths(job, in);
        FileOutputFormat.setOutputPath(job, out);

        job.setJobName("DataJoin");
        job.setMapperClass(MapClass.class);
        job.setReducerClass(Reduce.class);

        job.setInputFormat(TextInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(TaggedWritable.class);
        job.set("mapred.textoutputformat.separator", ",");

        JobClient.runJob(job); 
        return 0;
    }

    public static void main(String[] args) throws Exception { 
        int res = ToolRunner.run(new Configuration(),
                                 new DataJoin(),
                                 args);

        System.exit(res);
    }
}


I hit the similar problem and java.lang.reflect.InvocationTargetException was the root cause...what did i do wrong?

12/12/30 01:54:06 INFO mapred.JobClient: Task Id : attempt_201212280853_0032_m_000000_2, Status : FAILED
java.lang.RuntimeException: Error in configuring object
    at org.apache.hadoop.util.ReflectionUtils.setJobConf(ReflectionUtils.java:106)
    at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:72)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:130)
    at org.apache.hadoop.mapred.MapTask.runOldMapper(MapTask.java:389)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:327)
    at org.apache.hadoop.mapred.Child$4.run(Child.java:268)
    at java.security.AccessController.doPrivileged(Native Method)
    at javax.security.auth.Subject.doAs(Subject.java:396)
    at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1332)
    at org.apache.hadoop.mapred.Child.main(Child.java:262)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.ja
Below is the code taken from the Hadoop In Action

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Iterator;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.KeyValueTextInputFormat;
import org.apache.hadoop.mapred.MapReduceBase;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.TextInputFormat;
import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;

import org.apache.hadoop.contrib.utils.join.DataJoinMapperBase;
import org.apache.hadoop.contrib.utils.join.DataJoinReducerBase;
import org.apache.hadoop.contrib.utils.join.TaggedMapOutput;

/**
 * DataJoinMapperBase has a method method - http://hadoop.apache.org/docs/mapreduce/r0.21.0/api/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.html
 * 
 * map(Object key, Object value, OutputCollector output, Reporter reporter) 
 *  
 */

public class DataJoin extends Configured implements Tool {

    public static class MapClass extends DataJoinMapperBase {


        protected Text generateInputTag(String inputFile) {
            String datasource = inputFile.split("-")[0];
            return new Text(datasource);

        }


        protected Text generateGroupKey(TaggedMapOutput aRecord) {
            String line = ((Text) aRecord.getData()).toString();
            String[] tokens = line.split(",");
            String groupKey = tokens[0];
            return new Text(groupKey);
        }

        protected TaggedMapOutput generateTaggedMapOutput(Object value) {
            TaggedWritable retv = new TaggedWritable();
            retv.setData((Text) value);
            retv.setTag(this.inputTag);
            return retv;
        }
    }

    public static class Reduce extends DataJoinReducerBase {

        protected TaggedMapOutput combine(Object[] tags, Object[] values) {
            if (tags.length < 2) return null;  
            String joinedStr = ""; 
            for (int i=0; i<values.length; i++) {
                if (i > 0) joinedStr += ",";
                TaggedWritable tw = (TaggedWritable) values[i];
                String line = ((Text) tw.getData()).toString();
                String[] tokens = line.split(",", 2);
                joinedStr += tokens[1];
            }
            TaggedWritable retv = new TaggedWritable();
            retv.setData(new Text(joinedStr));
            retv.setTag((Text) tags[0]); 
            return retv;
        }
    }

    public static class TaggedWritable extends TaggedMapOutput {

        private Writable data;

//        public TaggedWritable(Writable data) {
//            this.tag = new Text("");
//            this.data = data;
//        }
        public TaggedWritable() {
            this.tag = new Text();
        }        

        public Writable getData() {
            return data;
        }

        public void setData(Writable data) {
            this.data = data;
        }        

        public void write(DataOutput out) throws IOException {
            this.tag.write(out);
            this.data.write(out);
        }

        /***

          //  public void readFields(DataInput in) throws IOException {
          //  this.tag.readFields(in);
          //  String dataClz = in.readUTF();
          //  if (this.data == null
          //          || !this.data.getClass().getName().equals(dataClz)) {
          //      try {
          //          this.data = (Writable) ReflectionUtils.newInstance(
          //                  Class.forName(dataClz), null);
          //      } catch (ClassNotFoundException e) {
          //          // TODO Auto-generated catch block
          //          e.printStackTrace();
          //      }
          //  }
          //this.data.readFields(in);
        //}

        *****/        

        public void readFields(DataInput in) throws IOException {
            this.tag.readFields(in);
            this.data.readFields(in);
        }
    }

    public int run(String[] args) throws Exception {
        Configuration conf = getConf();

        JobConf job = new JobConf(conf, DataJoin.class);

        Path in = new Path(args[0]);
        Path out = new Path(args[1]);
        FileInputFormat.setInputPaths(job, in);
        FileOutputFormat.setOutputPath(job, out);

        job.setJobName("DataJoin");
        job.setMapperClass(MapClass.class);
        job.setReducerClass(Reduce.class);

        job.setInputFormat(TextInputFormat.class);
        job.setOutputFormat(TextOutputFormat.class);
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(TaggedWritable.class);
        job.set("mapred.textoutputformat.separator", ",");

        JobClient.runJob(job); 
        return 0;
    }

    public static void main(String[] args) throws Exception { 
        int res = ToolRunner.run(new Configuration(),
                                 new DataJoin(),
                                 args);

        System.exit(res);
    }
}
导入java.io.DataInput;
导入java.io.DataOutput;
导入java.io.IOException;
导入java.util.Iterator;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.conf.Configured;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.io.writeable;
导入org.apache.hadoop.mapred.FileInputFormat;
导入org.apache.hadoop.mapred.FileOutputFormat;
导入org.apache.hadoop.mapred.JobClient;
导入org.apache.hadoop.mapred.JobConf;
导入org.apache.hadoop.mapred.KeyValueTextInputFormat;
导入org.apache.hadoop.mapred.MapReduceBase;
导入org.apache.hadoop.mapred.Mapper;
导入org.apache.hadoop.mapred.OutputCollector;
导入org.apache.hadoop.mapred.Reducer;
导入org.apache.hadoop.mapred.Reporter;
导入org.apache.hadoop.mapred.TextInputFormat;
导入org.apache.hadoop.mapred.TextOutputFormat;
导入org.apache.hadoop.util.ReflectionUtils;
导入org.apache.hadoop.util.Tool;
导入org.apache.hadoop.util.ToolRunner;
导入org.apache.hadoop.contrib.utils.join.DataJoinMapperBase;
导入org.apache.hadoop.contrib.utils.join.DataJoinReducerBase;
导入org.apache.hadoop.contrib.utils.join.TaggedMapOutput;
/**
*DataJoinMapperBase有一个方法-http://hadoop.apache.org/docs/mapreduce/r0.21.0/api/org/apache/hadoop/contrib/utils/join/DataJoinMapperBase.html
* 
*映射(对象键、对象值、输出收集器输出、报告器)
*  
*/
公共类DataJoin扩展配置的实现工具{
公共静态类MapClass扩展了DataJoinMapperBase{
受保护的文本生成器输入标记(字符串输入文件){
字符串数据源=inputFile.split(“-”[0];
返回新文本(数据源);
}
受保护的文本生成器组键(TaggedMapOutput记录){
字符串行=((文本)aRecord.getData()).toString();
String[]tokens=line.split(“,”);
字符串groupKey=tokens[0];
返回新文本(groupKey);
}
受保护的TaggedMapOutput generateTaggedMapOutput(对象值){
TaggedWritable retv=新的TaggedWritable();
retv.setData((文本)值);
retv.setTag(this.inputTag);
返回电视;
}
}
公共静态类Reduce扩展了DataJoinReducerBase{
受保护的TaggedMapOutput组合(对象[]标记,对象[]值){
if(tags.length<2)返回null;
字符串joinedStr=“”;
对于(inti=0;i0)joinedStr+=“,”;
TaggedWritable tw=(TaggedWritable)值[i];
字符串行=((文本)tw.getData()).toString();
String[]tokens=line.split(“,”,2);
joinedStr+=令牌[1];
}
TaggedWritable retv=新的TaggedWritable();
retv.setData(新文本(joinedStr));
retv.setTag((文本)标记[0]);
返回电视;
}
}
公共静态类TaggedWritable扩展TaggedMapOutput{
私有可写数据;
//公共标记可写(可写数据){
//this.tag=新文本(“”);
//这个数据=数据;
//        }
公共标记可写(){
this.tag=新文本();
}        
公共可写getData(){
返回数据;
}
公共void setData(可写数据){
这个数据=数据;
}        
public void write(DataOutput out)引发IOException{
这个.标签.写(出);
这个。数据。写(出);
}
public void readFields(DataInput in)引发IOException{
this.tag.readFields(in);
字符串dataClz=in.readUTF();
如果(this.data==null
||!this.data.getClass().getName().equals(dataClz)){
试一试{
this.data=(可写)ReflectionUtils.newInstance(
Class.forName(dataClz),null);
}catch(classnotfounde异常){
//TODO自动生成的捕捉块
e、 printStackTrace();
}
}
this.data.readFields(in);
}        
//public void readFields(DataInput in)引发IOException{
//this.tag.readFields(in);
//this.data.readFields(in);
//        }
}
公共int运行(字符串[]args)引发异常{
配置conf=getConf();
JobConf job=newjobconf(conf,DataJoin.class);
路径输入=新路径(args[0]);
路径输出=新路径(args[1]);
setInputPath(作业,在中);
setOutputPath(作业,输出);
job.setJobName(“数据连接”);
job.setMapperClass(MapClass.class);
job.setReducerClass(Reduce.class);
setInputFormat(TextInputFormat.class);
setOutputFormat(TextOutputFormat.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(TaggedWritable.class);
job.set(“mapred.textoutputformat.separator”、“,”);
JobClient.runJob(作业);
返回0;
}
公共静态void main(字符串[]args)引发异常{
int res=ToolRunner.run(新配置(),
新建DataJoin(),
args);