Java Hadoop错误。ClassCastException:org.apache.Hadoop.io.LongWritable无法强制转换为org.apache.Hadoop.io.Text

Java Hadoop错误。ClassCastException:org.apache.Hadoop.io.LongWritable无法强制转换为org.apache.Hadoop.io.Text,java,hadoop,mapreduce,Java,Hadoop,Mapreduce,我的计划如下: public static class MapClass extends Mapper<Text, Text, Text, LongWritable> { public void map(Text key, Text value, Context context) throws IOException, InterruptedException { // your map code goes here

我的计划如下:

    public static class MapClass extends Mapper<Text, Text, Text, LongWritable> {

        public void map(Text key, Text value, Context context) throws IOException, InterruptedException {
            // your map code goes here
            String[] fields = value.toString().split(",");

            for(String str : fields) {
                context.write(new Text(str), new LongWritable(1L));
            }
        }
    }
   public int run(String args[]) throws Exception {
        Job job = new Job();
        job.setJarByClass(TopOS.class);

        job.setMapperClass(MapClass.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.setJobName("TopOS");
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(LongWritable.class);
        job.setNumReduceTasks(0);
        boolean success = job.waitForCompletion(true);
        return success ? 0 : 1;
    }

    public static void main(String args[]) throws Exception {
        int ret = ToolRunner.run(new TopOS(), args);
        System.exit(ret);
    }
}
为什么会出现以下错误?我怎样才能避开这件事

Hadoop : java.lang.ClassCastException: org.apache.hadoop.io.LongWritable cannot be cast to org.apache.hadoop.io.Text
你能用吗

//Set the key class for the job output data.
job.setOutputKeyClass(Class<?> theClass)
//Set the value class for job outputs
job.setOutputValueClass(Class<?> theClass) 
//设置作业输出数据的键类。
job.setOutputKeyClass(类的类)
//设置作业输出的值类
job.setOutputValueClass(类的类)

而不是setMapOutputKeyClass和setMapOutputValueClass方法。

在我看来,代码中只有一个小错误

当您使用平面文本文件作为输入时,fixed key类是LongWritable(您不需要/使用的),value类是ist Text

将映射器中的keyClass设置为Object,以便在不使用它的情况下加下划线,就可以消除错误

这是我稍微修改过的代码

package org.woopi.stackoverflow.q22853574;

import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;

public class MapReduceJob {

  public static class MapClass extends Mapper<Object, Text, Text, LongWritable> {

    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
        // your map code goes here
        String[] fields = value.toString().split(",");

        for(String str : fields) {
            context.write(new Text(str), new LongWritable(1L));
        }
    }
  }

    public int run(String args[]) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJarByClass(MapReduceJob.class);

        job.setMapperClass(MapClass.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.setJobName("MapReduceJob");
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);
        job.setNumReduceTasks(0);
        job.setInputFormatClass(TextInputFormat.class);
        boolean success = job.waitForCompletion(true);
        return success ? 0 : 1;
    }

  public static void main(String args[]) throws Exception {
    MapReduceJob j = new MapReduceJob();
    int ret = j.run(args);
    System.exit(ret);
  }
package org.woopi.stackoverflow.q22853574;
导入org.apache.hadoop.io.*;
导入org.apache.hadoop.mapreduce.lib.input.*;
导入org.apache.hadoop.mapreduce.lib.output.*;
导入org.apache.hadoop.mapreduce.*;
导入org.apache.hadoop.fs.Path;
导入java.io.IOException;
导入org.apache.hadoop.conf.Configuration;
公共类MapReduceJob{
公共静态类映射器类扩展映射器{
公共void映射(对象键、文本值、上下文上下文)引发IOException、InterruptedException{
//你的地图代码在这里
字符串[]字段=value.toString().split(“,”);
for(字符串str:fields){
编写(新文本(str)、新长可写(1L));
}
}
}
公共int运行(字符串args[])引发异常{
Configuration conf=新配置();
Job Job=Job.getInstance(conf);
job.setJarByClass(MapReduceJob.class);
job.setMapperClass(MapClass.class);
setInputPaths(作业,新路径(args[0]);
setOutputPath(作业,新路径(args[1]);
job.setJobName(“MapReduceJob”);
job.setOutputKeyClass(Text.class);
job.setOutputValueClass(LongWritable.class);
job.setNumReduceTasks(0);
setInputFormatClass(TextInputFormat.class);
布尔值success=job.waitForCompletion(true);
返回成功?0:1;
}
公共静态void main(字符串args[])引发异常{
MapReduceJob j=新的MapReduceJob();
int-ret=j.run(args);
系统退出(ret);
}
我希望这有帮助


Martin

我看不出哪里违反了继承树。输出键是文本,而LongWritable是输出值。你能解释一下吗?我不明白你的意思。
Mapper
应该改为
Mapper
。键是当前行的文件偏移量。它的类型是LongWritable。
package org.woopi.stackoverflow.q22853574;

import org.apache.hadoop.io.*;
import org.apache.hadoop.mapreduce.lib.input.*;
import org.apache.hadoop.mapreduce.lib.output.*;
import org.apache.hadoop.mapreduce.*;
import org.apache.hadoop.fs.Path;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;

public class MapReduceJob {

  public static class MapClass extends Mapper<Object, Text, Text, LongWritable> {

    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {
        // your map code goes here
        String[] fields = value.toString().split(",");

        for(String str : fields) {
            context.write(new Text(str), new LongWritable(1L));
        }
    }
  }

    public int run(String args[]) throws Exception {
        Configuration conf = new Configuration();
        Job job = Job.getInstance(conf);
        job.setJarByClass(MapReduceJob.class);

        job.setMapperClass(MapClass.class);

        FileInputFormat.setInputPaths(job, new Path(args[0]));
        FileOutputFormat.setOutputPath(job, new Path(args[1]));

        job.setJobName("MapReduceJob");
        job.setOutputKeyClass(Text.class);
        job.setOutputValueClass(LongWritable.class);
        job.setNumReduceTasks(0);
        job.setInputFormatClass(TextInputFormat.class);
        boolean success = job.waitForCompletion(true);
        return success ? 0 : 1;
    }

  public static void main(String args[]) throws Exception {
    MapReduceJob j = new MapReduceJob();
    int ret = j.run(args);
    System.exit(ret);
  }