Java hadoop程序未执行。。。。正在引发空指针异常

Java hadoop程序未执行。。。。正在引发空指针异常,java,hadoop,nullpointerexception,Java,Hadoop,Nullpointerexception,} 映射器类: public class signal_identifier { private static final Log LOG = LogFactory.getLog(signal_identifier.class); public static void main(String[] args) throws Exception { long t = cvGetTickCount(); Configuration conf = new Configuration()

}

映射器类:

public class signal_identifier {
private static final Log LOG = LogFactory.getLog(signal_identifier.class);

public static void main(String[] args) throws Exception {
    long t = cvGetTickCount();
    Configuration conf = new Configuration();
    long milliSeconds = 1800000;
    conf.setLong("mapred.task.timeout", milliSeconds);
    Job job = new Job(conf, "TrafficSignalProcessing");
    job.setJarByClass(signal_identifier.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(signal_mapper.class);
    job.setReducerClass(signal_reducer.class);

    job.setInputFormatClass(VideoInputFormat.class);
    job.setOutputValueClass(TextOutputFormat.class);

    FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/tmp/traffic_signal.mp4"));

    FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/tmp/ouputv"));
    job.waitForCompletion(true);

}
  public class signal_mapper extends Mapper<Text, VideoObject, Text, IntWritable> {

private static final Log LOG = LogFactory.getLog(signal_mapper.class); 
private static OpenCVFrameGrabber grabber;
private static IplImage currentFrame;
private static IplImage frame;
private static IplImage imgHSV;
private static IplImage imgThresholdr;
private static IplImage imgThresholdg;
private static IplImage imgC;

static int LowerRed = 160;
static int UpperRed = 180;
static int LowerGreen = 40;
static int UpperGreen = 80;

CvArr mask;
//private static final int FOURCC = CV_FOURCC('X', 'V', 'I', 'D');
public void map(Text key, VideoObject value, Context context, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException, InterruptedException {

    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(value.getVideoByteArray());
    LOG.info("Log__VideoConverter__byteArray: "+ byteArrayInputStream.available());

    String fileName = key.toString();
    int id = value.getId();
    long differencer = 0;
    long differenceg = 0;
    long lStartTime = 0;
    String flag = "start";
    //LocalFileSystem fs = FileSystem.getLocal(context.getConfiguration());
    Path filePath = new Path("/tmp", fileName);
    //Path resFile = new Path("/tmp", "res_"+fileName);
    System.out.println("File to Process :"+filePath.toString());
    //FSDataOutputStream out = fs.create(filePath, true);
    //out.write(value.getVideoByteArray());
    //out.close();
    try{
        grabber = new OpenCVFrameGrabber(filePath.toString());
        grabber.start();
        CvMemStorage storage = CvMemStorage.create();
        CvSize frameSize = new CvSize(grabber.getImageWidth(), grabber.getImageHeight());
        currentFrame = cvCreateImage(frameSize, 8, 3);
        IplImage cropped;// = cvCreateImage(frameSize, 8, 3);

        CvRect r = new CvRect(250, 40, 350, 350);

        System.out.println("Video processing .........started");

        while(queryFrame()) {
            cvClearMemStorage(storage);
            if(flag.equals("start")){
                lStartTime = new Date().getTime();
            }
            cvSetImageROI(currentFrame, r);
            cropped = cvCreateImage(cvGetSize(currentFrame), currentFrame.depth(),currentFrame.nChannels());
            // Copy original image (only ROI) to the cropped image
            cvCopy(currentFrame, cropped);

            imgHSV = cvCreateImage(cvGetSize(cropped), 8, 3);

            cvCvtColor(cropped, imgHSV, CV_BGR2HSV);


            imgThresholdr = cvCreateImage(cvGetSize(cropped), 8, 1);


            imgThresholdg = cvCreateImage(cvGetSize(cropped), 8, 1);


            imgC = cvCreateImage(cvGetSize(cropped),8,1);

            cvInRangeS(imgHSV, cvScalar(LowerRed,150,75,0), cvScalar(UpperRed, 255, 255, 0), imgThresholdr);
            cvInRangeS(imgHSV, cvScalar(LowerGreen,150,75,0), cvScalar(UpperGreen, 255, 255, 0), imgThresholdg);


            Dimension positionr = getCoordinates(imgThresholdr);
            int posr = positionr.width+positionr.height;
            Dimension positiong = getCoordinates(imgThresholdg);
            int posg = positiong.width+positiong.height;
            //&& !flag.equalsIgnoreCase("red")      && !flag.equalsIgnoreCase("green")  
            if(posr > 255 && posr < 265 ){

                flag = "red";

            }else {

                long lEndTime = new Date().getTime();

                differenceg = (lEndTime - lStartTime) - differencer;
                output.collect(new Text("Green Color found at second- => "),new IntWritable((int)differenceg/1000));
                //System.out.println("Green Color found at second: " + differenceg/1000);

            }

            if(posg > 430 && posg < 440){
                flag = "green"; 

            }else{
                long lEndTime = new Date().getTime();

                differencer = (lEndTime - lStartTime) - differenceg;
                output.collect(new Text("Red Color found at second- => "),new IntWritable((int)differencer/1000));
                //System.out.println("Red Color found at second: " + differencer/1000);

            }
        }
        grabber.stop();
        System.out.println("Video processing .........Completed");

    }catch(Exception e) {
        e.printStackTrace();
    }

}

private static boolean queryFrame() throws Exception {
 try {  
    IplImage frame = grabber.grab();

    if (frame != null) {
        cvConvertImage(frame, currentFrame, 0);
        return true;
    } else {
        return false;
    }
 }catch(com.googlecode.javacv.FrameGrabber.Exception fge)  {
     return false;
 }
 catch(Exception e) {
     return false;
 }
}

static Dimension getCoordinates(IplImage thresholdImage) {
      int posX = 0;
      int posY = 0;
      CvMoments moments = new CvMoments();
      cvMoments(thresholdImage, moments, 1);
      double momX10 = cvGetSpatialMoment(moments, 1, 0);
      double momY01 = cvGetSpatialMoment(moments, 0, 1);
      double area = cvGetCentralMoment(moments, 0, 0);
      posX = (int) (momX10 / area);
      posY = (int) (momY01 / area);
      return new Dimension(posX, posY);
  }
公共类信号映射器扩展映射器{
私有静态最终日志=LogFactory.getLog(信号映射器.class);
私有静态OpenCVFrameGrabber抓取器;
私有静态IplImage-currentFrame;
专用静态IplImage框架;
专用静态IplImage imgHSV;
私有静态IplImage imgThresholdr;
私有静态IplImage imgThresholdg;
专用静态IplImage-imgC;
静态积分下限=160;
静态int UpperRed=180;
静态int-LowerGreen=40;
静态int UpperGreen=80;
CvArr掩模;
//私有静态final int-FOURCC=CV_-FOURCC('X','V','I','D');
公共void映射(文本键、VideoObject值、上下文上下文、OutputCollector输出、Reporter报告器)抛出IOException、InterruptedException{
ByteArrayInputStream ByteArrayInputStream=新的ByteArrayInputStream(value.getVideoByteArray());
LOG.info(“LOG\uu VideoConverter\uuu byteArray:+byteArrayInputStream.available());
字符串文件名=key.toString();
int id=value.getId();
长差=0;
长差g=0;
长lStartTime=0;
String flag=“开始”;
//LocalFileSystem fs=FileSystem.getLocal(context.getConfiguration());
路径filePath=新路径(“/tmp”,文件名);
//路径resFile=新路径(“/tmp”、“res_u2;”+文件名);
System.out.println(“要处理的文件:+filePath.toString());
//FSDataOutputStream out=fs.create(filePath,true);
//out.write(value.getVideoByteArray());
//out.close();
试一试{
grabber=新的OpenCVFrameGrabber(filePath.toString());
grabber.start();
CvMemStorage=CvMemStorage.create();
CvSize frameSize=新的CvSize(grabber.getImageWidth(),grabber.getImageHeight());
currentFrame=cvCreateImage(frameSize,8,3);
IplImage裁剪;/=cvCreateImage(帧大小,8,3);
CvRect r=新的CvRect(250,40,350,350);
System.out.println(“视频处理……已启动”);
while(queryFrame()){
cvClearMemStorage(存储);
if(标志等于(“开始”)){
lStartTime=新日期().getTime();
}
cvSetImageROI(当前帧,r);
裁剪=cvCreateImage(cvGetSize(currentFrame)、currentFrame.depth()、currentFrame.nChannels());
//将原始图像(仅ROI)复制到裁剪后的图像
cvCopy(当前帧,裁剪);
imgHSV=cvCreateImage(cvGetSize(裁剪),8,3);
CVT颜色(裁剪、imgHSV、CV_BGR2HSV);
imgThresholdr=cvCreateImage(cvGetSize(裁剪),8,1);
imgThresholdg=cvCreateImage(cvGetSize(裁剪),8,1);
imgC=cvCreateImage(cvGetSize(裁剪),8,1);
cvInRangeS(imgHSV、cvScalar(下标150,75,0)、cvScalar(上标255,255,0)、imgThresholdr);
cvInRangeS(imgHSV,cvScalar(低绿色,150,75,0),cvScalar(高绿色,255,255,0),imgThresholdg);
尺寸定位器=获取坐标(imgThresholdr);
int posr=定位器宽度+定位器高度;
尺寸位置g=获取坐标(imgThresholdg);
int posg=位置G.宽度+位置G.高度;
//&&!flag.equalsIgnoreCase(“红色”)和&!flag.equalsIgnoreCase(“绿色”)
if(posr>255&&posr<265){
flag=“红色”;
}否则{
long lEndTime=new Date().getTime();
差分g=(lEndTime-lstartime)-差分器;
output.collect(新文本(“第二个绿色-=>”),新的intwriteable((int)differenceg/1000));
//System.out.println(“第二个绿色:“+differenceg/1000”);
}
如果(posg>430&&posg<440){
flag=“绿色”;
}否则{
long lEndTime=new Date().getTime();
差分器=(lEndTime-lstartime)-differenceg;
output.collect(新文本(“第二个红色-=>”),新的intwriteable((int)differenticer/1000));
//System.out.println(“第二个红色:“+differencer/1000”);
}
}
抓取器。停止();
System.out.println(“视频处理……完成”);
}捕获(例外e){
e、 printStackTrace();
}
}
私有静态布尔queryFrame()引发异常{
试试{
IplImage frame=grabber.grab();
如果(帧!=null){
cvConvertImage(帧,当前帧,0);
返回true;
}否则{
返回false;
}
}catch(com.googlecode.javacv.FrameGrabber.Exception fge){
返回false;
}
捕获(例外e){
返回false;
}
}
静态标注getCoordinates(IplImage thresholdImage){
int posX=0;
int-posY=0;
CV矩=新CV矩();
CVM矩(阈值图像,矩,1);
双momX10=cvGetSpatialMoment(矩,1,0);
双momY01=cvGetSpatialMoment(矩,0,1);
双面积=cvGetCentralMoment(矩,0,0);
posX=(int)(momX10/面积);
posY=(int)(momY01/区域);
返回新维度(posX、posY);
}
}

减速器等级:

public class signal_identifier {
private static final Log LOG = LogFactory.getLog(signal_identifier.class);

public static void main(String[] args) throws Exception {
    long t = cvGetTickCount();
    Configuration conf = new Configuration();
    long milliSeconds = 1800000;
    conf.setLong("mapred.task.timeout", milliSeconds);
    Job job = new Job(conf, "TrafficSignalProcessing");
    job.setJarByClass(signal_identifier.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(signal_mapper.class);
    job.setReducerClass(signal_reducer.class);

    job.setInputFormatClass(VideoInputFormat.class);
    job.setOutputValueClass(TextOutputFormat.class);

    FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/tmp/traffic_signal.mp4"));

    FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/tmp/ouputv"));
    job.waitForCompletion(true);

}
  public class signal_mapper extends Mapper<Text, VideoObject, Text, IntWritable> {

private static final Log LOG = LogFactory.getLog(signal_mapper.class); 
private static OpenCVFrameGrabber grabber;
private static IplImage currentFrame;
private static IplImage frame;
private static IplImage imgHSV;
private static IplImage imgThresholdr;
private static IplImage imgThresholdg;
private static IplImage imgC;

static int LowerRed = 160;
static int UpperRed = 180;
static int LowerGreen = 40;
static int UpperGreen = 80;

CvArr mask;
//private static final int FOURCC = CV_FOURCC('X', 'V', 'I', 'D');
public void map(Text key, VideoObject value, Context context, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException, InterruptedException {

    ByteArrayInputStream byteArrayInputStream = new ByteArrayInputStream(value.getVideoByteArray());
    LOG.info("Log__VideoConverter__byteArray: "+ byteArrayInputStream.available());

    String fileName = key.toString();
    int id = value.getId();
    long differencer = 0;
    long differenceg = 0;
    long lStartTime = 0;
    String flag = "start";
    //LocalFileSystem fs = FileSystem.getLocal(context.getConfiguration());
    Path filePath = new Path("/tmp", fileName);
    //Path resFile = new Path("/tmp", "res_"+fileName);
    System.out.println("File to Process :"+filePath.toString());
    //FSDataOutputStream out = fs.create(filePath, true);
    //out.write(value.getVideoByteArray());
    //out.close();
    try{
        grabber = new OpenCVFrameGrabber(filePath.toString());
        grabber.start();
        CvMemStorage storage = CvMemStorage.create();
        CvSize frameSize = new CvSize(grabber.getImageWidth(), grabber.getImageHeight());
        currentFrame = cvCreateImage(frameSize, 8, 3);
        IplImage cropped;// = cvCreateImage(frameSize, 8, 3);

        CvRect r = new CvRect(250, 40, 350, 350);

        System.out.println("Video processing .........started");

        while(queryFrame()) {
            cvClearMemStorage(storage);
            if(flag.equals("start")){
                lStartTime = new Date().getTime();
            }
            cvSetImageROI(currentFrame, r);
            cropped = cvCreateImage(cvGetSize(currentFrame), currentFrame.depth(),currentFrame.nChannels());
            // Copy original image (only ROI) to the cropped image
            cvCopy(currentFrame, cropped);

            imgHSV = cvCreateImage(cvGetSize(cropped), 8, 3);

            cvCvtColor(cropped, imgHSV, CV_BGR2HSV);


            imgThresholdr = cvCreateImage(cvGetSize(cropped), 8, 1);


            imgThresholdg = cvCreateImage(cvGetSize(cropped), 8, 1);


            imgC = cvCreateImage(cvGetSize(cropped),8,1);

            cvInRangeS(imgHSV, cvScalar(LowerRed,150,75,0), cvScalar(UpperRed, 255, 255, 0), imgThresholdr);
            cvInRangeS(imgHSV, cvScalar(LowerGreen,150,75,0), cvScalar(UpperGreen, 255, 255, 0), imgThresholdg);


            Dimension positionr = getCoordinates(imgThresholdr);
            int posr = positionr.width+positionr.height;
            Dimension positiong = getCoordinates(imgThresholdg);
            int posg = positiong.width+positiong.height;
            //&& !flag.equalsIgnoreCase("red")      && !flag.equalsIgnoreCase("green")  
            if(posr > 255 && posr < 265 ){

                flag = "red";

            }else {

                long lEndTime = new Date().getTime();

                differenceg = (lEndTime - lStartTime) - differencer;
                output.collect(new Text("Green Color found at second- => "),new IntWritable((int)differenceg/1000));
                //System.out.println("Green Color found at second: " + differenceg/1000);

            }

            if(posg > 430 && posg < 440){
                flag = "green"; 

            }else{
                long lEndTime = new Date().getTime();

                differencer = (lEndTime - lStartTime) - differenceg;
                output.collect(new Text("Red Color found at second- => "),new IntWritable((int)differencer/1000));
                //System.out.println("Red Color found at second: " + differencer/1000);

            }
        }
        grabber.stop();
        System.out.println("Video processing .........Completed");

    }catch(Exception e) {
        e.printStackTrace();
    }

}

private static boolean queryFrame() throws Exception {
 try {  
    IplImage frame = grabber.grab();

    if (frame != null) {
        cvConvertImage(frame, currentFrame, 0);
        return true;
    } else {
        return false;
    }
 }catch(com.googlecode.javacv.FrameGrabber.Exception fge)  {
     return false;
 }
 catch(Exception e) {
     return false;
 }
}

static Dimension getCoordinates(IplImage thresholdImage) {
      int posX = 0;
      int posY = 0;
      CvMoments moments = new CvMoments();
      cvMoments(thresholdImage, moments, 1);
      double momX10 = cvGetSpatialMoment(moments, 1, 0);
      double momY01 = cvGetSpatialMoment(moments, 0, 1);
      double area = cvGetCentralMoment(moments, 0, 0);
      posX = (int) (momX10 / area);
      posY = (int) (momY01 / area);
      return new Dimension(posX, posY);
  }
公共类信号减速机扩展减速机{
public void reduce(文本键、迭代器值、,
OutputCollector输出,报告器(报告器)
抛出IOException{
整数和=0;
while(values.hasNext()){
sum+=values.next().get();
}
collect(key,newintwriteable(sum));
}
}

等等,我将发布异常的堆栈跟踪

请发布异常的堆栈延迟也发布异常,这样我们就可以获得有关空指针的信息…14/04/21 16:34:43 WARN mapred.LocalJobRunner:job_local492819261_0001 java.lang.Exception:java.lang.NullPointerException位于org.apache.hadoop.mapred.LocalJobRunner$job.runn(LocalJobRunner.java:354)由以下原因引起:org.apache.hadoop.io.s上的java.lang.NullPointerException