HadoopImageProcessingInterface(HIPI)采样程序

HadoopImageProcessingInterface(HIPI)采样程序,hadoop,mapreduce,hipi,Hadoop,Mapreduce,Hipi,我正在研究HIPI并开始一个示例程序 我无法执行它,因为它总是给出以下异常: hadoop jar Desktop/edureka/workspace/jars/SampleProgramHIPI.jar hdfs:/video/sampleimages.hib hdfs:/video/sampleimages.output 15/10/16 15:59:53 WARN util.NativeCodeLoader: Unable to load native-hadoop library for

我正在研究HIPI并开始一个示例程序

我无法执行它,因为它总是给出以下异常:

hadoop jar Desktop/edureka/workspace/jars/SampleProgramHIPI.jar hdfs:/video/sampleimages.hib hdfs:/video/sampleimages.output
15/10/16 15:59:53 WARN util.NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
Exception in thread "main" java.lang.NoClassDefFoundError: hipi/imagebundle/mapreduce/ImageBundleInputFormat
    at SampleProgram.run(SampleProgram.java:67)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:70)
    at org.apache.hadoop.util.ToolRunner.run(ToolRunner.java:84)
    at SampleProgram.main(SampleProgram.java:86)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:497)
    at org.apache.hadoop.util.RunJar.run(RunJar.java:221)
    at org.apache.hadoop.util.RunJar.main(RunJar.java:136)
Caused by: java.lang.ClassNotFoundException: hipi.imagebundle.mapreduce.ImageBundleInputFormat
    at java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    ... 10 more
构建路径已经用给定的库(hipi/core/Build/libs/hipi-2.1.0.jar)更新

几乎到处检查!!请帮帮我

以下是我正在尝试执行的示例程序:

package hipi.image.examples;
import hipi.imagebundle.mapreduce.ImageBundleInputFormat;
import hipi.image.FloatImage;
import hipi.image.ImageHeader;
//import org.hipi.imagebundle.mapreduce.HibInputFormat;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import java.io.IOException; 
@SuppressWarnings("unused")
public class SampleProgram extends Configured implements Tool { 
  public static class HelloWorldMapper extends Mapper<ImageHeader, FloatImage, IntWritable, FloatImage> {
    public void map(ImageHeader key, FloatImage value, Context context) 
      throws IOException, InterruptedException {
         if (value != null && value.getWidth() > 1 && value.getHeight() > 1 && value.getBands() == 3) {           
            int w = value.getWidth();
            int h = value.getHeight();
            float[] valData = value.getData();
            float[] avgData = {0,0,0};
            for (int j = 0; j < h; j++) {
              for (int i = 0; i < w; i++) {
                avgData[0] += valData[(j*w+i)*3+0]; 
                avgData[1] += valData[(j*w+i)*3+1]; 
                avgData[2] += valData[(j*w+i)*3+2]; 
              }
            }
            FloatImage avg = new FloatImage(1, 1, 3, avgData);
            avg.scale(1.0f/(float)(w*h));          
            context.write(new IntWritable(1), avg);
          } 
    }
  }
  public static class HelloWorldReducer extends Reducer<IntWritable, FloatImage, IntWritable, Text> {
    public void reduce(IntWritable key, Iterable<FloatImage> values, Context context) 
      throws IOException, InterruptedException {
        FloatImage avg = new FloatImage(1, 1, 3);
        int total = 0;
        for (FloatImage val : values) {
          avg.add(val);
          total++;
        }
        if (total > 0) {
          avg.scale(1.0f / total);         
    float[] avgData = avg.getData();
          String result = String.format("Average pixel value: %f %f %f", avgData[0], avgData[1], avgData[2]);       
          context.write(key, new Text(result));
        }
    }
  }
  public int run(String[] args) throws Exception {
    if (args.length != 2) {
      System.out.println("Usage: helloWorld <input HIB> <output directory>");
      System.exit(0);
    }  
    Job job = Job.getInstance();
    job.setInputFormatClass(ImageBundleInputFormat.class);  
    job.setOutputFormatClass(TextOutputFormat.class);

    job.setJarByClass(SampleProgram.class);
    job.setMapperClass(HelloWorldMapper.class);
    job.setReducerClass(HelloWorldReducer.class);

    job.setMapOutputKeyClass(IntWritable.class);
    job.setMapOutputValueClass(FloatImage.class);

    job.setOutputKeyClass(IntWritable.class);
    job.setOutputValueClass(Text.class);    

    FileInputFormat.setInputPaths(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, new Path(args[1]));
    boolean success = job.waitForCompletion(true);    
    return success ? 0 : 1;
  }  
  public static void main(String[] args) throws Exception {
    ToolRunner.run(new SampleProgram(), args);
    System.exit(0);
  }
}
package hipi.image.examples;
导入hipi.imagebundle.mapreduce.ImageBundleInputFormat;
导入hipi.image.FloatImage;
导入hipi.image.ImageHeader;
//导入org.hipi.imagebundle.mapreduce.HiInputFormat;
导入org.apache.hadoop.conf.Configured;
导入org.apache.hadoop.util.Tool;
导入org.apache.hadoop.util.ToolRunner;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
导入org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.Reducer;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
导入java.io.IOException;
@抑制警告(“未使用”)
公共类SampleProgram扩展配置的实现工具{
公共静态类HelloWorldMapper扩展了Mapper{
公共无效映射(ImageHeader键、FloatImage值、上下文)
抛出IOException、InterruptedException{
如果(value!=null&&value.getWidth()>1&&value.getHeight()>1&&value.getBands()==3){
int w=value.getWidth();
int h=value.getHeight();
float[]valData=value.getData();
float[]avgData={0,0,0};
对于(int j=0;j0){
平均刻度(1.0f/总刻度);
float[]avgData=avg.getData();
字符串结果=String.format(“平均像素值:%f%f%f”,avgData[0],avgData[1],avgData[2]);
编写(键,新文本(结果));
}
}
}
公共int运行(字符串[]args)引发异常{
如果(参数长度!=2){
System.out.println(“用法:helloWorld”);
系统出口(0);
}  
Job Job=Job.getInstance();
setInputFormatClass(ImageBundleInputFormat.class);
setOutputFormatClass(TextOutputFormat.class);
setJarByClass(SampleProgram.class);
setMapperClass(HelloWorldMapper.class);
setReducerClass(HelloWorldReducer.class);
setMapOutputKeyClass(IntWritable.class);
setMapOutputValueClass(FloatImage.class);
job.setOutputKeyClass(IntWritable.class);
job.setOutputValueClass(Text.class);
setInputPaths(作业,新路径(args[0]);
setOutputPath(作业,新路径(args[1]);
布尔值success=job.waitForCompletion(true);
返回成功?0:1;
}  
公共静态void main(字符串[]args)引发异常{
运行(新的SampleProgram(),args);
系统出口(0);
}
}

请尝试使用libjars选项,如下所示。libjar将给定的jar上传到集群,然后使它们在每个映射器、reducer实例的类路径上可用。如果要向驱动程序客户机类路径添加其他库(JAR),可以使用HADOOP_CLASSPATH env变量

export HADOOP_CLASSPATH=hipi-0.0.1.jar
hadoop jar ~/Desktop/edureka/workspace/jars/SampleProgramHIPI.jar hipi.image.examples.SampleProgram hdfs:/video/sampleimages.hib hdfs:/video/sampleimages.output

感谢Sarath的宝贵意见。我已将HIPI更改为以前的2.0版,并使用buld.xml构建。