Warning: file_get_contents(/data/phpspider/zhask/data//catemap/0/hadoop/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Hadoop 还原不能在映射100%后运行_Hadoop_Mapreduce - Fatal编程技术网

Hadoop 还原不能在映射100%后运行

Hadoop 还原不能在映射100%后运行,hadoop,mapreduce,Hadoop,Mapreduce,今天我在两个节点的hadoop上运行了一个作业。我的程序在map 100%后挂起,reduce无法运行。 返回的信息包括: 14/01/15 20:30:49 INFO input.FileInputFormat: Total input paths to process : 25 14/01/15 20:30:49 INFO util.NativeCodeLoader: Loaded the native-hadoop library 14/01/15 20:30:49 WARN snappy

今天我在两个节点的hadoop上运行了一个作业。我的程序在map 100%后挂起,reduce无法运行。 返回的信息包括:

14/01/15 20:30:49 INFO input.FileInputFormat: Total input paths to process : 25
14/01/15 20:30:49 INFO util.NativeCodeLoader: Loaded the native-hadoop library
14/01/15 20:30:49 WARN snappy.LoadSnappy: Snappy native library not loaded
14/01/15 20:30:51 INFO mapred.JobClient: Running job: job_201401152029_0001
14/01/15 20:30:52 INFO mapred.JobClient:  map 0% reduce 0%
14/01/15 20:30:58 INFO mapred.JobClient:  map 7% reduce 0%
14/01/15 20:31:01 INFO mapred.JobClient:  map 15% reduce 0%
14/01/15 20:31:22 INFO mapred.JobClient:  map 100% reduce 0%
我的源代码是:

PollingImage.java:

package imagePolling;
import java.io.IOException;
import java.util.*;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.conf.*;

import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.util.*;

public class PollingImage extends Configured implements Tool{
public int run(String[] args) throws Exception{
    Job job = new Job(getConf());
    job.setJarByClass(PollingImage.class);
    job.setJobName("polling_image");

    job.setMapperClass(PollingMapper.class);
    job.setReducerClass(PollingReducer.class);
    job.setMapOutputKeyClass(Text.class);
    job.setMapOutputValueClass(Text.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    //job.setNumReduceTasks(0);
    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    FileInputFormat.setInputPaths(job,new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));

    boolean success = job.waitForCompletion(true);

    return success?0:1;
}
public static void main(String[] args) {
    try{
        int ret = ToolRunner.run(new PollingImage(), args);
        System.exit(ret);
    }catch(Exception ex){
        System.out.println(ex.getMessage());
    }
}
}
PollingMapper.java:

package imagePolling;
import java.io.IOException;

import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.mapreduce.lib.input.*;

public class PollingMapper extends Mapper<LongWritable, Text, Text, Text> {
private Text nameKey=new Text();
public void map(LongWritable key,Text value,Context ctx) throws                 IOException,InterruptedException {
    Path filePath=((FileSplit)ctx.getInputSplit()).getPath();
    String name=filePath.getName().replaceAll(".txt", "");
    nameKey = new Text(name);
    ctx.write(nameKey, value);
}
}
包映像轮询;
导入java.io.IOException;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.*;
导入org.apache.hadoop.mapreduce.*;
导入org.apache.hadoop.mapreduce.lib.input.*;
公共类PollingMapper扩展了Mapper{
私有文本名称键=新文本();
公共void映射(LongWritable键、文本值、上下文ctx)引发IOException、InterruptedException{
路径filePath=((FileSplit)ctx.getInputSplit()).getPath();
字符串名称=filePath.getName().replaceAll(“.txt”,”);
nameKey=新文本(名称);
ctx.write(名称键、值);
}
}
PollingReducer.java:

package imagePolling;
import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.Reducer;

public class PollingReducer extends Reducer<Text, Text, Text, Text> {
@Override
public void reduce(Text key,Iterable<Text> values,Context ctx) throws IOException,InterruptedException {
    Text v=new Text("a fool");
    ctx.write(key, v);
}
}
包映像轮询;
导入java.io.IOException;
导入java.util.StringTokenizer;
导入org.apache.hadoop.io.*;
导入org.apache.hadoop.mapreduce.Reducer;
公共类PollingReducer扩展了Reducer{
@凌驾
公共void reduce(文本键、Iterable值、上下文ctx)引发IOException、InterruptedException{
文本v=新文本(“傻瓜”);
ctx.写入(键,v);
}
}

我已经阅读了日志。它只是显示映射已经完成,没有任何关于reduce的内容。

检查您的/etc/hosts文件并检查主机名的IP条目,如果是伪模式,请提供环回地址。

这些日志帮助不大,您将在jobtracker页面上找到更多有用的日志。通常jobtracker位于主节点上: