Java 在hadoop map reduce上查找百分比

Java 在hadoop map reduce上查找百分比,java,hadoop,mapreduce,Java,Hadoop,Mapreduce,我试图在MapReduce框架上分析一个飞行数据(大约20GB) 我需要找出航班延误的百分比 如果航班最多提前或延迟5分钟起飞,我是说它没有延迟,否则它就延迟了 我是用地图法计算的 我确信class IntSumReducer(延迟航班和未延迟航班之和)的map方法和reduce方法工作正常,但我不知道如何找到延迟航班的百分比 所以我想我需要编辑reduce类reduce的方法 import java.io.IOException; 导入java.util.StringTokenizer; 导入

我试图在MapReduce框架上分析一个飞行数据(大约20GB)

我需要找出航班延误的百分比

如果航班最多提前或延迟5分钟起飞,我是说它没有延迟,否则它就延迟了

我是用地图法计算的

我确信
class IntSumReducer
(延迟航班和未延迟航班之和)的map
方法和
reduce
方法工作正常,但我不知道如何找到延迟航班的百分比

所以我想我需要编辑
reduce
类reduce的方法

import java.io.IOException;
导入java.util.StringTokenizer;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.FloatWritable;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.Reducer;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
公共类航班分析{
公共静态类令牌映射器
扩展映射器{
私有最终静态IntWritable one=新的IntWritable(1);
私有文本delayOrNot=新文本();
公共无效映射(对象键、文本值、上下文
)抛出IOException、InterruptedException{
String[]columns=value.toString().split(“,”);
如果(columns.length>5){
int-actualDepTime=0;
int scheduledeptime=0;
试试{
ActualOptime=(int)Double.parseDouble(列[4]);
ScheduledOptime=(int)Double.parseDouble(列[5]);
}  
捕获(NumberFormatException nfe){
返回;
}  
//将时间转换为分钟
实际时间=((int)实际时间/100)*60+实际时间%100;
ScheduledOptime=((int)ScheduledOptime/100)*60+ScheduledOptime%100;
int diff=实际时间-计划时间;
//如果差异小于5分钟
如果(差异=-5)
延迟或不设置(“不延迟”);
其他的
延迟或不设置(“延迟”);
写(delayOrNot,一);
}
}
}
公共静态类IntSumReducer
伸缩减速机{
私有IntWritable结果=新的IntWritable();
public void reduce(文本键、Iterable值、,
语境
)抛出IOException、InterruptedException{
整数和=0;
for(可写入值:值){
sum+=val.get();
}
结果集(总和);
编写(键、结果);
}
}
公共静态类减少
伸缩减速机{
private floatwriteable result=新的floatwriteable();
浮点数=0f;
Float numOfonTime=0f;
浮点数=0f;
public void reduce(文本键、Iterable值、,
语境
)抛出IOException、InterruptedException{
for(可写入值:值){
count+=val.get();
if(key.toString()=“NotDelay”)
numOfonTime+=val.get();
}
persentage=numfontime/count;
结果集(百分比);
Text sumText=新文本(“persentage:”);
编写(sumText,result);
}
}
公共静态void main(字符串[]args)引发异常{
Configuration conf=新配置();
Job Job=Job.getInstance(conf,“飞行分析”);
job.setJarByClass(flightanalysis.class);
setMapperClass(TokenizerMapper.class);
job.setCombinerClass(IntSumReducer.class);
job.setReducerClass(Reduce.class);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(IntWritable.class);
addInputPath(作业,新路径(args[0]);
setOutputPath(作业,新路径(args[1]);
系统退出(作业等待完成(真)?0:1;
}
}

错误在于未使用equals方法,该方法非常简单。 我不会结束这个问题,因为稍后代码可能会对某些人有用

下面是正确的代码,但有一些小改动:

import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class FlightAnalyse {

  public static class TokenizerMapper
          extends Mapper<Object, Text, Text, IntWritable>{

      private final static IntWritable one = new IntWritable(1);
      private Text delayOrNot = new Text();

      public void map(Object key, Text value, Context context
      ) throws IOException, InterruptedException {

        String[] columns = value.toString().split(",");
        if(columns.length > 15){
            int delay = 0;

            try{  
                delay = (int) Double.parseDouble(columns[15]); //delay in minutes

            }  
            catch(NumberFormatException nfe){  
                return;  
            }  

            //if the differecen is less than 5 minutes
            if(delay <= 5 && delay >= -5)
                delayOrNot.set("NotDelay");
            else
                delayOrNot.set("Delay");

            context.write(delayOrNot, one);
          }
      }
  }

  public static class IntSumReducer
          extends Reducer<Text,IntWritable,Text,IntWritable> {
      private IntWritable result = new IntWritable();

      public void reduce(Text key, Iterable<IntWritable> values,
                         Context context
      ) throws IOException, InterruptedException {
          int sum = 0;
          for (IntWritable val : values) {
              sum += val.get();
          }

          result.set(sum);
          context.write(key, result);
      }
  }

  public static class Reduce
          extends Reducer<Text,IntWritable,Text,FloatWritable> {
      private FloatWritable result = new FloatWritable();
      Float persentage = 0f;
      Float numOfonTime = 0f;
      Float count = 0f;
      public void reduce(Text key, Iterable<IntWritable> values,
                         Context context
      ) throws IOException, InterruptedException {

          for (IntWritable val : values) {
              count += val.get();
              if(key.toString().equals("NotDelay"))
                numOfonTime += val.get();
          }

          persentage = numOfonTime/count;
          result.set(persentage);

          Text sumText = new Text("persentage of  " + key + ": ");
          context.write(sumText, result);
      }
  }

  public static void main(String[] args) throws Exception {
      Configuration conf = new Configuration();
      Job job = Job.getInstance(conf, "Flight Analysis");
      job.setJarByClass(FlightAnalyse.class);
      job.setMapperClass(TokenizerMapper.class);
      job.setCombinerClass(IntSumReducer.class);
      job.setReducerClass(Reduce.class);
      job.setOutputKeyClass(Text.class);
      job.setOutputValueClass(IntWritable.class);
      FileInputFormat.addInputPath(job, new Path(args[0]));
      FileOutputFormat.setOutputPath(job, new Path(args[1]));
      System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}
import java.io.IOException;
导入java.util.StringTokenizer;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.FloatWritable;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.Reducer;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
公共类航班分析{
公共静态类令牌映射器
扩展映射器{
私有最终静态IntWritable one=新的IntWritable(1);
私有文本delayOrNot=新文本();
公共无效映射(对象键、文本值、上下文
)抛出IOException、InterruptedException{
String[]columns=value.toString().split(“,”);
如果(columns.length>15){
int延迟=0;
试试{
delay=(int)Double.parseDouble(列[15]);//以分钟为单位的延迟
}  
捕获(NumberFormatException nfe){
返回;
}  
//如果差异小于5分钟
如果(延迟=-5)
延迟或不设置(“不延迟”);
其他的
延迟或不设置(“延迟”);
写(delayOrNot,一);
}
}
}
公共静态类IntSumReducer
伸缩减速机{
私人IntW
import java.io.IOException;
import java.util.StringTokenizer;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.FloatWritable;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class FlightAnalyse {

  public static class TokenizerMapper
          extends Mapper<Object, Text, Text, IntWritable>{

      private final static IntWritable one = new IntWritable(1);
      private Text delayOrNot = new Text();

      public void map(Object key, Text value, Context context
      ) throws IOException, InterruptedException {

        String[] columns = value.toString().split(",");
        if(columns.length > 15){
            int delay = 0;

            try{  
                delay = (int) Double.parseDouble(columns[15]); //delay in minutes

            }  
            catch(NumberFormatException nfe){  
                return;  
            }  

            //if the differecen is less than 5 minutes
            if(delay <= 5 && delay >= -5)
                delayOrNot.set("NotDelay");
            else
                delayOrNot.set("Delay");

            context.write(delayOrNot, one);
          }
      }
  }

  public static class IntSumReducer
          extends Reducer<Text,IntWritable,Text,IntWritable> {
      private IntWritable result = new IntWritable();

      public void reduce(Text key, Iterable<IntWritable> values,
                         Context context
      ) throws IOException, InterruptedException {
          int sum = 0;
          for (IntWritable val : values) {
              sum += val.get();
          }

          result.set(sum);
          context.write(key, result);
      }
  }

  public static class Reduce
          extends Reducer<Text,IntWritable,Text,FloatWritable> {
      private FloatWritable result = new FloatWritable();
      Float persentage = 0f;
      Float numOfonTime = 0f;
      Float count = 0f;
      public void reduce(Text key, Iterable<IntWritable> values,
                         Context context
      ) throws IOException, InterruptedException {

          for (IntWritable val : values) {
              count += val.get();
              if(key.toString().equals("NotDelay"))
                numOfonTime += val.get();
          }

          persentage = numOfonTime/count;
          result.set(persentage);

          Text sumText = new Text("persentage of  " + key + ": ");
          context.write(sumText, result);
      }
  }

  public static void main(String[] args) throws Exception {
      Configuration conf = new Configuration();
      Job job = Job.getInstance(conf, "Flight Analysis");
      job.setJarByClass(FlightAnalyse.class);
      job.setMapperClass(TokenizerMapper.class);
      job.setCombinerClass(IntSumReducer.class);
      job.setReducerClass(Reduce.class);
      job.setOutputKeyClass(Text.class);
      job.setOutputValueClass(IntWritable.class);
      FileInputFormat.addInputPath(job, new Path(args[0]));
      FileOutputFormat.setOutputPath(job, new Path(args[1]));
      System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
}