Warning: file_get_contents(/data/phpspider/zhask/data//catemap/6/eclipse/8.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Linux MapReduce包错误_Linux_Eclipse_Hadoop_Mapreduce_Java Package - Fatal编程技术网

Linux MapReduce包错误

Linux MapReduce包错误,linux,eclipse,hadoop,mapreduce,java-package,Linux,Eclipse,Hadoop,Mapreduce,Java Package,//这3个项目需要哪些软件包??? 如何将这3个程序合并为一个程序?? 如何在eclipse中使用此3程序进行mapreduce 请帮助我成功运行此程序 操作系统:linux 面临的例外情况: 类型的方法TryParseInt(String)未定义 MaxPYear.maxpubyear减速机 2.中的方法setInputFormatClass(Class) 类型作业不适用于参数 (班级) 映射程序代码: public static class MaxPubYearMapper extends

//这3个项目需要哪些软件包??? 如何将这3个程序合并为一个程序?? 如何在eclipse中使用此3程序进行mapreduce

请帮助我成功运行此程序

操作系统:linux

面临的例外情况:

  • 类型的方法TryParseInt(String)未定义 MaxPYear.maxpubyear减速机
  • 2.中的方法setInputFormatClass(Class) 类型作业不适用于参数 (班级)

    映射程序代码:

    public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
        {
            public void map(LongWritable key, Text value , Context context)
    
                    throws IOException, InterruptedException 
                    {
                String delim = "\t";
                Text valtosend = new Text(); 
                String tokens[] = value.toString().split(delim);
                if (tokens.length == 2)
                {
                    valtosend.set(tokens[0] + ";"+ tokens[1]);
                    context.write(new IntWritable(1), valtosend);
                }
    
                    }       
        }
    
    public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
        {
    
            public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
            {
                int maxiValue = Integer.MIN_VALUE;
                String maxiYear = "";
                for(Text value:values)          {
                    String token[] = value.toString().split(";");
                    if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                    {
                        maxiValue = TryParseInt(token[1]);
                        maxiYear = token[0];
                    }
                }
                context.write(new Text(maxiYear), new IntWritable(maxiValue));
            }
        }
    

    在一节课上把它们写在一起

    所需的软件包包括:

    package org.myorg;
    
    import java.io.IOException;
    import java.util.*;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.conf.*;
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapred.*;
    import org.apache.hadoop.util.*;
    import java.io.DataInput;
    import java.io.DataOutput;
    
    这里可能有一些额外的东西,因为我从代码中复制了它们

    package org.myorg;
    
    import java.util.*;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.conf.*;
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapred.*;
    import org.apache.hadoop.util.*;
    import java.text.SimpleDateFormat;
    import java.util.Date;
    import java.io.DataInput;
    import java.io.DataOutput;
    import java.io.IOException;
    
    public class <your classname as well as filename> { 
        public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
        {
            public void map(LongWritable key, Text value , Context context)
    
                    throws IOException, InterruptedException 
                    {
                String delim = "\t";
                Text valtosend = new Text(); 
                String tokens[] = value.toString().split(delim);
                if (tokens.length == 2)
                {
                    valtosend.set(tokens[0] + ";"+ tokens[1]);
                    context.write(new IntWritable(1), valtosend);
                }
    
                    }       
        }
    
        public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
        {
    
            public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
            {
                int maxiValue = Integer.MIN_VALUE;
                String maxiYear = "";
                for(Text value:values)          {
                    String token[] = value.toString().split(";");
                    if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                    {
                        maxiValue = TryParseInt(token[1]);
                        maxiYear = token[0];
                    }
                }
                context.write(new Text(maxiYear), new IntWritable(maxiValue));
            }
        }
        public static void main(String[] args) throws Exception  {
            Configuration conf = new Configuration(); 
            Job job = new Job(conf , "Frequency`enter code here`");
            job.setJarByClass(MaxPubYear.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
    
            job.setMapperClass(FrequencyMapper.class);
            job.setCombinerClass(FrequencyReducer.class);
            job.setReducerClass(FrequencyReducer.class);
    
    
            job.setOutputFormatClass(TextOutputFormat.class);
            job.setInputFormatClass(TextInputFormat.class);
    
            FileInputFormat.addInputPath(job,new Path(args[0]));
            FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
            int exitCode = job.waitForCompletion(true)?0:1; 
    
            if (exitCode == 0 )
            {
                Job SecondJob = new Job(conf, "Maximum Publication year");
                SecondJob.setJarByClass(MaxPubYear.class);
    
                SecondJob.setOutputKeyClass(Text.class);
                SecondJob.setOutputValueClass(IntWritable.class);
    
                SecondJob.setMapOutputKeyClass(IntWritable.class);
                SecondJob.setMapOutputValueClass(Text.class);
    
                SecondJob.setMapperClass(MaxPubYearMapper.class);               
                SecondJob.setReducerClass(MaxPubYearReducer.class);
    
                FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
                FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
                System.exit(SecondJob.waitForCompletion(true)?0:1);                 
    
            }
        }
    }
    
    package org.myorg;
    导入java.util.*;
    导入org.apache.hadoop.fs.Path;
    导入org.apache.hadoop.conf.*;
    导入org.apache.hadoop.io.*;
    导入org.apache.hadoop.mapred.*;
    导入org.apache.hadoop.util.*;
    导入java.text.simpleDataFormat;
    导入java.util.Date;
    导入java.io.DataInput;
    导入java.io.DataOutput;
    导入java.io.IOException;
    公共类{
    公共静态类MaxPubYearMapper扩展了Mapper
    {
    公共void映射(可长写键、文本值、上下文)
    抛出IOException、InterruptedException
    {
    字符串delim=“\t”;
    Text valtosend=新文本();
    字符串标记[]=value.toString().split(delim);
    if(tokens.length==2)
    {
    valtosend.set(令牌[0]+“;”+令牌[1]);
    write(新的intwriteable(1),valtosend);
    }
    }       
    }
    公共静态类MaxPubYearReducer扩展了Reducer
    {
    public void reduce(IntWritable键、Iterable值、上下文)抛出IOException、InterruptedException
    {
    int maxiValue=Integer.MIN_值;
    字符串maxiYear=“”;
    用于(文本值:值){
    字符串标记[]=value.toString().split(;);
    if(token.length==2&&TryParseInt(token[1]).intValue()>maxiValue)
    {
    maxiValue=TryParseInt(令牌[1]);
    maxiYear=token[0];
    }
    }
    write(新文本(maxiYear),新intwriteable(maxiValue));
    }
    }
    公共静态void main(字符串[]args)引发异常{
    Configuration conf=新配置();
    Job Job=新作业(conf,“Frequency`enter code here`”);
    job.setJarByClass(MaxPubYear.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setMapperClass(FrequencyMapper.class);
    job.setCombinerClass(FrequencyReducer.class);
    job.setReducerClass(FrequencyReducer.class);
    setOutputFormatClass(TextOutputFormat.class);
    setInputFormatClass(TextInputFormat.class);
    addInputPath(作业,新路径(args[0]);
    setOutputPath(作业,新路径(args[1]+“_temp”);
    int exitCode=job.waitForCompletion(true)?0:1;
    if(exitCode==0)
    {
    Job SecondJob=新作业(conf,“最长出版年”);
    SecondJob.setJarByClass(MaxPubYear.class);
    SecondJob.setOutputKeyClass(Text.class);
    SecondJob.setOutputValueClass(IntWritable.class);
    setMapOutputKeyClass(IntWritable.class);
    SecondJob.setMapOutputValueClass(Text.class);
    setMapperClass(MaxPubYearMapper.class);
    SecondJob.setReducerClass(MaxPubYearReducer.class);
    addInputPath(第二个作业,新路径(args[1]+“_temp”);
    setOutputPath(第二个作业,新路径(args[1]);
    系统退出(第二个作业等待完成(真)?0:1;
    }
    }
    }
    
    我不确定您的问题是什么:标题提到了一个错误:我们在谈论什么错误?我想将上述3个代码写入一个程序并创建一个jar文件。我不知道怎么做,需要什么软件包。你好,Antariksha,我是java新手,我发现很难将所有三个代码组合成一个…你能告诉我如何将它们放在一个类中吗?编辑了我的答案。您还需要将jar文件hadoop core-*.jar添加到ClassPath中,我尝试了并发现了一些错误。我已经拍了一张屏幕截图。你能告诉我如何在这里添加屏幕截图吗?我不知道。为什么不通过编辑问题来粘贴异常呢。也可以尝试将其拖到此处
    package org.myorg;
    
    import java.util.*;
    import org.apache.hadoop.fs.Path;
    import org.apache.hadoop.conf.*;
    import org.apache.hadoop.io.*;
    import org.apache.hadoop.mapred.*;
    import org.apache.hadoop.util.*;
    import java.text.SimpleDateFormat;
    import java.util.Date;
    import java.io.DataInput;
    import java.io.DataOutput;
    import java.io.IOException;
    
    public class <your classname as well as filename> { 
        public static class MaxPubYearMapper extends Mapper<LongWritable , Text, IntWritable,Text>
        {
            public void map(LongWritable key, Text value , Context context)
    
                    throws IOException, InterruptedException 
                    {
                String delim = "\t";
                Text valtosend = new Text(); 
                String tokens[] = value.toString().split(delim);
                if (tokens.length == 2)
                {
                    valtosend.set(tokens[0] + ";"+ tokens[1]);
                    context.write(new IntWritable(1), valtosend);
                }
    
                    }       
        }
    
        public static class MaxPubYearReducer extends Reducer<IntWritable ,Text, Text, IntWritable>
        {
    
            public void reduce(IntWritable key, Iterable<Text> values , Context context) throws IOException, InterruptedException
            {
                int maxiValue = Integer.MIN_VALUE;
                String maxiYear = "";
                for(Text value:values)          {
                    String token[] = value.toString().split(";");
                    if(token.length == 2 && TryParseInt(token[1]).intValue()> maxiValue)
                    {
                        maxiValue = TryParseInt(token[1]);
                        maxiYear = token[0];
                    }
                }
                context.write(new Text(maxiYear), new IntWritable(maxiValue));
            }
        }
        public static void main(String[] args) throws Exception  {
            Configuration conf = new Configuration(); 
            Job job = new Job(conf , "Frequency`enter code here`");
            job.setJarByClass(MaxPubYear.class);
            job.setOutputKeyClass(Text.class);
            job.setOutputValueClass(IntWritable.class);
    
            job.setMapperClass(FrequencyMapper.class);
            job.setCombinerClass(FrequencyReducer.class);
            job.setReducerClass(FrequencyReducer.class);
    
    
            job.setOutputFormatClass(TextOutputFormat.class);
            job.setInputFormatClass(TextInputFormat.class);
    
            FileInputFormat.addInputPath(job,new Path(args[0]));
            FileOutputFormat.setOutputPath(job,new Path(args[1]+ "_temp"));
            int exitCode = job.waitForCompletion(true)?0:1; 
    
            if (exitCode == 0 )
            {
                Job SecondJob = new Job(conf, "Maximum Publication year");
                SecondJob.setJarByClass(MaxPubYear.class);
    
                SecondJob.setOutputKeyClass(Text.class);
                SecondJob.setOutputValueClass(IntWritable.class);
    
                SecondJob.setMapOutputKeyClass(IntWritable.class);
                SecondJob.setMapOutputValueClass(Text.class);
    
                SecondJob.setMapperClass(MaxPubYearMapper.class);               
                SecondJob.setReducerClass(MaxPubYearReducer.class);
    
                FileInputFormat.addInputPath(SecondJob,new Path(args[1]+ "_temp"));
                FileOutputFormat.setOutputPath(SecondJob,new Path(args[1]));
                System.exit(SecondJob.waitForCompletion(true)?0:1);                 
    
            }
        }
    }