Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/348.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Java 平均地图减少_Java_Hadoop_Mapreduce - Fatal编程技术网

Java 平均地图减少

Java 平均地图减少,java,hadoop,mapreduce,Java,Hadoop,Mapreduce,我正在尝试计算hadoop单机版设置中的平均数。我无法运行该程序。但是程序编译没有任何错误,jar文件也创建了。我想我在hadoop设置中使用了正确的命令来执行程序。有人请检查我的代码,告诉我有什么问题。这是我的密码 import java.io.IOException; import java.util.StringTokenizer; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path;

我正在尝试计算hadoop单机版设置中的平均数。我无法运行该程序。但是程序编译没有任何错误,jar文件也创建了。我想我在hadoop设置中使用了正确的命令来执行程序。有人请检查我的代码,告诉我有什么问题。这是我的密码

import java.io.IOException;
import java.util.StringTokenizer;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
  class sum_count{
    int sum;
    int count;  
  }
public class Average {


  public static class TokenizerMapper
       extends Mapper<Object, Text, Text, Object>{

    private final static IntWritable valueofkey = new IntWritable();
    private Text word = new Text();
    sum_count sc=new sum_count();
    public void map(Object key, Text value, Context context
                    ) throws IOException, InterruptedException {
      StringTokenizer itr = new StringTokenizer(value.toString());
      int sum=0;
      int count=0;
      int v;
      while (itr.hasMoreTokens()) {
        word.set(itr.nextToken());
        v=Integer.parseInt(word.toString());
        count=count+1;
        sum=sum+v;       
      }
      //valueofkey.set(sum);
      word.set("average");

      sc.sum=sum;
      sc.count=count;

    // context.write(word, valueofkey);
     // valueofkey.set(count);
      //  word.set("count");
      context.write(word,sc);
    }
  }

  public static class IntSumReducer
       extends Reducer<Text,Object,Text,IntWritable> {
    private IntWritable result = new IntWritable();
    private IntWritable test=new IntWritable();

    public void reduce(Text key, Iterable<sum_count> values,Context context) throws IOException, InterruptedException {
      int sum = 0;
      int count=0;
      int wholesum=0;
      int wholecount=0;
      for (sum_count val : values) {
        //value=val.get();
        wholesum=wholesum+val.sum;
        wholecount=wholecount+val.count;
      }
      int res=wholesum/wholecount;
      result.set(res);
      context.write(key, result );
    }
  }

  public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "");
    job.setJarByClass(Average.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}
import java.io.IOException;
导入java.util.StringTokenizer;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.Reducer;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
类和计数{
整数和;
整数计数;
}
公共课平均分{
公共静态类令牌映射器
扩展映射器{
private final static IntWritable valueofkey=new IntWritable();
私有文本字=新文本();
sum_count sc=新的sum_count();
公共无效映射(对象键、文本值、上下文
)抛出IOException、InterruptedException{
StringTokenizer itr=新的StringTokenizer(value.toString());
整数和=0;
整数计数=0;
INTV;
而(itr.hasMoreTokens()){
set(itr.nextToken());
v=Integer.parseInt(word.toString());
计数=计数+1;
sum=sum+v;
}
//key.set的值(总和);
单词集(“平均值”);
sc.sum=总和;
sc.count=计数;
//上下文。写(单词、键值);
//设置键的值(计数);
//单词集(“计数”);
上下文。写(单词,sc);
}
}
公共静态类IntSumReducer
伸缩减速机{
私有IntWritable结果=新的IntWritable();
私有IntWritable测试=新的IntWritable();
公共void reduce(文本键、Iterable值、上下文上下文)引发IOException、InterruptedException{
整数和=0;
整数计数=0;
整数=0;
整数整数=0;
用于(总和计数值:值){
//value=val.get();
整体=整体+价值总和;
wholecount=wholecount+val.count;
}
int res=整体/整体计数;
结果集(res);
编写(键、结果);
}
}
公共静态void main(字符串[]args)引发异常{
Configuration conf=新配置();
Job Job=Job.getInstance(conf,“”);
job.setJarByClass(Average.class);
setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(IntSumReducer.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
addInputPath(作业,新路径(args[0]);
setOutputPath(作业,新路径(args[1]);
系统退出(作业等待完成(真)?0:1;
}
}
以下是我的输出:

manu@manu-Latitude-E5430-vPro:~/hadoop-2.7.2$ ./bin/hadoop jar av.jar Average bin/user/hduser/input bin/user/hduser/out12
Picked up JAVA_TOOL_OPTIONS: -javaagent:/usr/share/java/jayatanaag.jar 
16/07/01 11:19:05 INFO Configuration.deprecation: session.id is deprecated. Instead, use dfs.metrics.session-id
16/07/01 11:19:05 INFO jvm.JvmMetrics: Initializing JVM Metrics with processName=JobTracker, sessionId=
16/07/01 11:19:05 WARN mapreduce.JobResourceUploader: Hadoop command-line option parsing not performed. Implement the Tool interface and execute your application with ToolRunner to remedy this.
16/07/01 11:19:05 INFO input.FileInputFormat: Total input paths to process : 2
16/07/01 11:19:05 INFO mapreduce.JobSubmitter: number of splits:2
16/07/01 11:19:05 INFO mapreduce.JobSubmitter: Submitting tokens for job: job_local276107063_0001
16/07/01 11:19:05 INFO mapreduce.Job: The url to track the job: http://localhost:8080/
16/07/01 11:19:05 INFO mapreduce.Job: Running job: job_local276107063_0001
16/07/01 11:19:05 INFO mapred.LocalJobRunner: OutputCommitter set in config null
16/07/01 11:19:05 INFO output.FileOutputCommitter: File Output Committer Algorithm version is 1
16/07/01 11:19:05 INFO mapred.LocalJobRunner: OutputCommitter is org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
16/07/01 11:19:05 INFO mapred.LocalJobRunner: Waiting for map tasks
16/07/01 11:19:05 INFO mapred.LocalJobRunner: Starting task: attempt_local276107063_0001_m_000000_0
16/07/01 11:19:06 INFO output.FileOutputCommitter: File Output Committer Algorithm version is 1
16/07/01 11:19:06 INFO mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
16/07/01 11:19:06 INFO mapred.LocalJobRunner: Starting task: attempt_local276107063_0001_m_000001_0
16/07/01 11:19:06 INFO output.FileOutputCommitter: File Output Committer Algorithm version is 1
16/07/01 11:19:06 INFO mapred.Task:  Using ResourceCalculatorProcessTree : [ ]
16/07/01 11:19:06 INFO mapred.LocalJobRunner: map task executor complete.
16/07/01 11:19:06 WARN mapred.LocalJobRunner: job_local276107063_0001
java.lang.Exception: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
    at org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
    at org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)
Caused by: java.lang.RuntimeException: java.lang.reflect.InvocationTargetException
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:134)
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
    at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
    at java.util.concurrent.FutureTask.run(FutureTask.java:262)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.reflect.InvocationTargetException
    at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
    at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:132)
    ... 8 more
Caused by: java.lang.NoClassDefFoundError: sum_count
    at Average$TokenizerMapper.<init>(Average.java:24)
    ... 13 more
Caused by: java.lang.ClassNotFoundException: sum_count
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    at java.security.AccessController.doPrivileged(Native Method)
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    ... 14 more
16/07/01 11:19:06 INFO mapreduce.Job: Job job_local276107063_0001 running in uber mode : false
16/07/01 11:19:06 INFO mapreduce.Job:  map 0% reduce 0%
16/07/01 11:19:06 INFO mapreduce.Job: Job job_local276107063_0001 failed with state FAILED due to: NA
16/07/01 11:19:06 INFO mapreduce.Job: Counters: 0
manu@manu-Latitude-E5430-vPro:~/hadoop-2.7.2$./bin/hadoop jar av.jar Average bin/user/hduser/input bin/user/hduser/out12
选择JAVA工具选项:-javaagent:/usr/share/JAVA/jayatanaag.jar
16/07/01 11:19:05信息配置。弃用:session.id已弃用。而是使用dfs.metrics.session-id
16/07/01 11:19:05 INFO jvm.JvmMetrics:使用processName=JobTracker,sessionId初始化jvm度量=
16/07/01 11:19:05警告mapreduce.JobResourceUploader:未执行Hadoop命令行选项解析。实现工具接口并使用ToolRunner执行应用程序以解决此问题。
16/07/01 11:19:05信息输入。文件输入格式:处理的总输入路径:2
16/07/01 11:19:05信息mapreduce.JobSubmitter:拆分数:2
16/07/01 11:19:05信息mapreduce.JobSubmitter:为作业提交令牌:job\u local276107063\u 0001
16/07/01 11:19:05信息mapreduce.作业:跟踪作业的url:http://localhost:8080/
16/07/01 11:19:05信息mapreduce。作业:正在运行作业:作业\u local276107063\u 0001
16/07/01 11:19:05 INFO mapred.LocalJobRunner:OutputCommitter在配置中设置为null
16/07/01 11:19:05信息输出。文件输出提交人:文件输出提交人算法版本为1
16/07/01 11:19:05 INFO mapred.LocalJobRunner:OutputCommitter是org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
16/07/01 11:19:05信息映射。LocalJobRunner:正在等待映射任务
16/07/01 11:19:05信息映射。LocalJobRunner:启动任务:尝试\u local276107063\u 0001\u m\u000000\u 0
16/07/01 11:19:06信息输出。文件输出提交人:文件输出提交人算法版本为1
16/07/01 11:19:06信息映射。任务:使用ResourceCalculatorProcessTree:[]
16/07/01 11:19:06信息映射。本地JobRunner:启动任务:尝试\u local276107063\u 0001\u m\u000001\u 0
16/07/01 11:19:06信息输出。文件输出提交人:文件输出提交人算法版本为1
16/07/01 11:19:06信息映射。任务:使用ResourceCalculatorProcessTree:[]
16/07/01 11:19:06信息映射。LocalJobRunner:映射任务执行器已完成。
16/07/01 11:19:06警告映射。本地JobRunner:job_local276107063_0001
java.lang.Exception:java.lang.RuntimeException:java.lang.reflect.InvocationTargetException
位于org.apache.hadoop.mapred.LocalJobRunner$Job.runTasks(LocalJobRunner.java:462)
在org.apache.hadoop.mapred.LocalJobRunner$Job.run(LocalJobRunner.java:522)上
原因:java.lang.RuntimeException:java.lang.reflect.InvocationTargetException
位于org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:134)
位于org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:745)
位于org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
位于org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
位于java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
在java.util.concurrent.FutureTask.run(FutureTask.java:262)处
位于java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
位于java.util.concurrent.ThreadPoolExecutor$W
public class SumCount implements Writable {

    public int sum;
    public int count;

    @Override
    public void write(DataOutput out) throws IOException {
        out.writeInt(sum);
        out.writeInt(count);
    }

    @Override
    public void readFields(DataInput in) throws IOException {
        sum = in.readInt();
        count = in.readInt();
    }  
}
job.setMapOutputKeyClass(Text.class);
job.setMapOutputValueClass(SumCount.class);