Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/java/335.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/csharp-4.0/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
MapReduce:java.lang.ClassCastException:class nyc311.NYC311u可写在java.lang.class.asSubclass(class.java:3404)_Java_Hadoop_Mapreduce - Fatal编程技术网

MapReduce:java.lang.ClassCastException:class nyc311.NYC311u可写在java.lang.class.asSubclass(class.java:3404)

MapReduce:java.lang.ClassCastException:class nyc311.NYC311u可写在java.lang.class.asSubclass(class.java:3404),java,hadoop,mapreduce,Java,Hadoop,Mapreduce,以下是我的基本MapReduce程序: 我试图创建一个可写对象来保存数据,即NYC311_可写类,并收到一个java.lang.ClassCastException: 有什么建议吗 以下是错误: 17/04/11 14:54:05 INFO mapred.MapTask: kvstart = 26214396; length = 6553600 17/04/11 14:54:05 WARN mapred.MapTask: Unable to initialize MapOutputCollect

以下是我的基本MapReduce程序: 我试图创建一个可写对象来保存数据,即NYC311_可写类,并收到一个java.lang.ClassCastException: 有什么建议吗

以下是错误:

17/04/11 14:54:05 INFO mapred.MapTask: kvstart = 26214396; length = 6553600
17/04/11 14:54:05 WARN mapred.MapTask: Unable to initialize MapOutputCollector org.apache.hadoop.mapred.MapTask$MapOutputBuffer
java.lang.ClassCastException: class nyc311.NYC311_Writable
    at java.lang.Class.asSubclass(Class.java:3404)
    at org.apache.hadoop.mapred.JobConf.getOutputKeyComparator(JobConf.java:887)
    at org.apache.hadoop.mapred.MapTask$MapOutputBuffer.init(MapTask.java:1004)
    at org.apache.hadoop.mapred.MapTask.createSortingCollector(MapTask.java:402)
    at org.apache.hadoop.mapred.MapTask.access$100(MapTask.java:81)
    at org.apache.hadoop.mapred.MapTask$NewOutputCollector.<init>(MapTask.java:698)
    at org.apache.hadoop.mapred.MapTask.runNewMapper(MapTask.java:770)
    at org.apache.hadoop.mapred.MapTask.run(MapTask.java:341)
    at org.apache.hadoop.mapred.LocalJobRunner$Job$MapTaskRunnable.run(LocalJobRunner.java:243)
    at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
    at java.util.concurrent.FutureTask.run(FutureTask.java:266)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
可写对象类:

/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package nyc311;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;

/**
 *
 * @author dhaval
 */
public class NYC311_Writable implements Writable{

    private String Incident_Zip;


    public NYC311_Writable() {
    }

    public NYC311_Writable(String Incident_Zip) {
        this.Incident_Zip = Incident_Zip;
    }

    public String getIncident_Zip() {
        return Incident_Zip;
    }

    public void setIncident_Zip(String Incident_Zip) {
        this.Incident_Zip = Incident_Zip;
    }


    @Override
    public void write(DataOutput d) throws IOException {
        WritableUtils.writeString(d,Incident_Zip);
    }

    @Override
    public void readFields(DataInput di) throws IOException {
        Incident_Zip = WritableUtils.readString(di);

    }

    public static NYC311_Writable read(DataInput in) throws IOException {
         NYC311_Writable w = new NYC311_Writable();
         w.readFields(in);
         return w;
       }

}
制图员:

/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package nyc311;

import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

/**
 *
 * @author dhaval
 */
public class NYC311_Mapper extends Mapper<Object, Text, NYC311_Writable, IntWritable>{

       private IntWritable count = new IntWritable(1);
       private Text zip = new Text();

    @Override
    protected void map(Object key, Text value, Context context) throws IOException, InterruptedException {

        String[] line = value.toString().split(",");    

        if (line[8].matches(".*[a-z].*")) { 

        }
        else{        

            NYC311_Writable nyc_data = new NYC311_Writable();

            nyc_data.setIncident_Zip(line[8]);
            context.write(nyc_data, count);       

        }

    }

}
/*
*要更改此许可证标题,请在“项目属性”中选择“许可证标题”。
*要更改此模板文件,请选择工具|模板
*然后在编辑器中打开模板。
*/
包装nyc311;
导入java.io.IOException;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Mapper;
/**
*
*@作者达瓦尔
*/
公共类NYC311_Mapper扩展了Mapper{
私有IntWritable计数=新的IntWritable(1);
私有文本zip=新文本();
@凌驾
受保护的空映射(对象键、文本值、上下文)引发IOException、InterruptedException{
String[]line=value.toString().split(“,”);
如果(第[8]行)匹配(“.[a-z].*){
}
否则{
NYC311_可写nyc_数据=新的NYC311_可写();
nyc_data.setIncident_Zip(第[8]行);
写入(纽约大学数据,计数);
}
}
}
减速器:

/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package nyc311;

import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

/**
 *
 * @author dhaval
 */
public class NYC311_Reducer extends Reducer<NYC311_Writable, IntWritable, NYC311_Writable, IntWritable>{

    private IntWritable count = new IntWritable();

    @Override
    protected void reduce(NYC311_Writable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {

    int sum = 0;

    for (IntWritable val : values) {
        sum += val.get();
    }

    count.set(sum);
    context.write(key, count);

    }

}
/*
*要更改此许可证标题,请在“项目属性”中选择“许可证标题”。
*要更改此模板文件,请选择工具|模板
*然后在编辑器中打开模板。
*/
包装nyc311;
导入java.io.IOException;
导入org.apache.hadoop.io.IntWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Reducer;
/**
*
*@作者达瓦尔
*/
公共级NYC311_异径管延伸异径管{
私有IntWritable计数=新的IntWritable();
@凌驾
受保护的void reduce(NYC311_可写键、可写值、上下文上下文)引发IOException、InterruptedException{
整数和=0;
for(可写入值:值){
sum+=val.get();
}
计数。设置(总和);
context.write(键、计数);
}
}

我想你错过了可比的,这是一个指导你的博客

你能列出运行这个jar文件所使用的命令吗?hadoop jar/home/dhaval/NetBeansProjects/NYC311/dist/NYC311.jar/NY311\u 2011/Y\u NY311\u Out/NY311\u 2011-是我的csv输入文件/Y\u NY311\u 2011\u Out-输出文件我认为你缺少可比性,这是一个指导你的博客,有必要使用WritableComparable吗?我现在不想做二次排序。当我使用WritableComparable时,这个程序就可以工作了-非常感谢你的帮助。但我仍然不清楚,当我只使用可写时,它为什么会失败。
/*
 * To change this license header, choose License Headers in Project Properties.
 * To change this template file, choose Tools | Templates
 * and open the template in the editor.
 */
package nyc311;

import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

/**
 *
 * @author dhaval
 */
public class NYC311_Reducer extends Reducer<NYC311_Writable, IntWritable, NYC311_Writable, IntWritable>{

    private IntWritable count = new IntWritable();

    @Override
    protected void reduce(NYC311_Writable key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {

    int sum = 0;

    for (IntWritable val : values) {
        sum += val.get();
    }

    count.set(sum);
    context.write(key, count);

    }

}