在Hadoop MapReduce中为多个输出目录使用多个映射器

在Hadoop MapReduce中为多个输出目录使用多个映射器,hadoop,mapreduce,multipleoutputs,Hadoop,Mapreduce,Multipleoutputs,我想运行两个在不同目录中生成两个不同输出的映射程序。第一个映射程序(Send as参数)的输出应该发送到第二个映射程序的输入。我在驱动程序类中有此代码 import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop

我想运行两个在不同目录中生成两个不同输出的映射程序。第一个映射程序(Send as参数)的输出应该发送到第二个映射程序的输入。我在驱动程序类中有此代码

import java.io.IOException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;


public class Export_Column_Mapping 
{
    private static String[] Detail_output_column_array = new String[27];
    private static String[] Shop_output_column_array = new String[8];
    private static String details_output = null ;
    private static String Shop_output = null;

    public static void main(String[] args) throws Exception 
    {

        String Output_filetype = args[3];
        String Input_column_number = args[4];
        String Output_column_number = args[5];

        Configuration Detailsconf = new Configuration(false);

        Detailsconf.setStrings("output_filetype",Output_filetype);
        Detailsconf.setStrings("Input_column_number",Input_column_number);
        Detailsconf.setStrings("Output_column_number",Output_column_number);

        Job Details = new Job(Detailsconf," Export_Column_Mapping");

        Details.setJarByClass(Export_Column_Mapping.class);
        Details.setJobName("DetailsFile_Job");

        Details.setMapperClass(DetailFile_Mapper.class);
        Details.setNumReduceTasks(0);

        Details.setInputFormatClass(TextInputFormat.class);
        Details.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.setInputPaths(Details, new Path(args[0])); 
        FileOutputFormat.setOutputPath(Details, new Path(args[1]));

        if(Details.waitForCompletion(true))
        {

        Configuration Shopconf = new Configuration();

        Job Shop = new Job(Shopconf,"Export_Column_Mapping");
        Shop.setJarByClass(Export_Column_Mapping.class);
        Shop.setJobName("ShopFile_Job");

        Shop.setMapperClass(ShopFile_Mapper.class);
        Shop.setNumReduceTasks(0);

        Shop.setInputFormatClass(TextInputFormat.class);
        Shop.setOutputFormatClass(TextOutputFormat.class);

        FileInputFormat.setInputPaths(Shop, new Path(args[1])); 
        FileOutputFormat.setOutputPath(Shop, new Path(args[2]));

        MultipleOutputs.addNamedOutput(Shop, "text", TextOutputFormat.class,LongWritable.class, Text.class);
        System.exit(Shop.waitForCompletion(true) ? 0 : 1);
        }
    }

    public static class DetailFile_Mapper extends Mapper<LongWritable,Text,Text,Text>
    {   
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
        {
            String str_Output_filetype = context.getConfiguration().get("output_filetype"); 

            String str_Input_column_number = context.getConfiguration().get("Input_column_number");
            String[] input_columns_number = str_Input_column_number.split(",");

            String str_Output_column_number= context.getConfiguration().get("Output_column_number");    
            String[] output_columns_number = str_Output_column_number.split(",");

            String str_line = value.toString();
            String[] input_column_array = str_line.split(",");

            try
            {

                for(int i = 0;i<=input_column_array.length+1; i++)
                {
                    int int_outputcolumn = Integer.parseInt(output_columns_number[i]);
                    int int_inputcolumn = Integer.parseInt(input_columns_number[i]);

                    if((int_inputcolumn != 0) && (int_outputcolumn != 0) && output_columns_number.length == input_columns_number.length)
                    {

                        Detail_output_column_array[int_outputcolumn-1] = input_column_array[int_inputcolumn-1];


                        if(details_output != null)
                        {
                            details_output = details_output+"       "+ Detail_output_column_array[int_outputcolumn-1];
                            Shop_output = Shop_output+"     "+ Shop_output_column_array[int_outputcolumn-1];

                        }else
                        {
                            details_output = Detail_output_column_array[int_outputcolumn-1];
                            Shop_output =  Shop_output_column_array[int_outputcolumn-1];

                        }
                    }
                }

            }catch (Exception e)
            {

            }
            context.write(null,new Text(details_output));
        }
    }
    public static class ShopFile_Mapper extends Mapper<LongWritable,Text,Text,Text>
    {   
        public void map(LongWritable key, Text value, Context context) throws IOException, InterruptedException 
        {
            try
            {

                for(int i = 0;i<=Shop_output_column_array.length; i++)
                {
                    Shop_output_column_array[0] = Detail_output_column_array[0];
                    Shop_output_column_array[1] = Detail_output_column_array[1];
                    Shop_output_column_array[2] = Detail_output_column_array[2];
                    Shop_output_column_array[3] = Detail_output_column_array[3];
                    Shop_output_column_array[4] = Detail_output_column_array[14];

                    if(details_output != null)
                    {
                        Shop_output = Shop_output+"     "+ Shop_output_column_array[i];

                    }else
                    {
                        Shop_output =  Shop_output_column_array[i-1];

                    }
                }
            }catch (Exception e){

            }
            context.write(null,new Text(Shop_output));
        }
    }

}
import java.io.IOException;
导入org.apache.hadoop.conf.Configuration;
导入org.apache.hadoop.fs.Path;
导入org.apache.hadoop.io.LongWritable;
导入org.apache.hadoop.io.Text;
导入org.apache.hadoop.mapreduce.Job;
导入org.apache.hadoop.mapreduce.Mapper;
导入org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
导入org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
导入org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
导入org.apache.hadoop.mapreduce.lib.output.MultipleOutputs;
导入org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
公共类导出列映射
{
私有静态字符串[]详细信息\输出\列\数组=新字符串[27];
私有静态字符串[]车间输出列数组=新字符串[8];
私有静态字符串详细信息\u输出=null;
私有静态字符串Shop_output=null;
公共静态void main(字符串[]args)引发异常
{
字符串输出_filetype=args[3];
字符串输入\列\编号=args[4];
字符串输出_列_编号=args[5];
配置详细信息conf=新配置(false);
Detailsconf.SetString(“输出文件类型”,输出文件类型);
Detailsconf.SetString(“输入列号”,输入列号);
Detailsconf.SetString(“输出列号”,输出列号);
作业详细信息=新作业(Detailsconf,“导出列映射”);
setJarByClass(Export\u Column\u Mapping.class);
Details.setJobName(“DetailsFile_作业”);
setMapperClass(DetailFile\u Mapper.class);
详细信息。setNumReduceTasks(0);
setInputFormatClass(TextInputFormat.class);
setOutputFormatClass(TextOutputFormat.class);
setInputPath(详细信息,新路径(args[0]);
setOutputPath(详细信息,新路径(args[1]);
if(详细信息。等待完成(真))
{
Configuration Shopconf=新配置();
Job Shop=新作业(Shopconf,“导出列映射”);
setJarByClass(Export\u Column\u Mapping.class);
Shop.setJobName(“ShopFile_Job”);
setMapperClass(ShopFile_Mapper.class);
车间设置数(0);
setInputFormatClass(TextInputFormat.class);
setOutputFormatClass(TextOutputFormat.class);
setInputPath(商店,新路径(args[1]);
setOutputPath(商店,新路径(args[2]);
MultipleOutputs.addNamedOutput(Shop,“text”,TextOutputFormat.class,LongWritable.class,text.class);
系统退出(车间等待完成(真)?0:1;
}
}
公共静态类DetailFile_Mapper扩展了Mapper
{   
公共void映射(LongWritable键、文本值、上下文上下文)引发IOException、InterruptedException
{
字符串str_Output_filetype=context.getConfiguration().get(“Output_filetype”);
字符串str_Input_column_number=context.getConfiguration().get(“Input_column_number”);
String[]input\u columns\u number=str\u input\u column\u number.split(“,”);
字符串str_Output_column_number=context.getConfiguration().get(“Output_column_number”);
字符串[]输出列数=str输出列数。拆分(“,”);
字符串str_line=value.toString();
String[]input\u column\u array=str\u line.split(“,”);
尝试
{

对于(inti=0;i有一种称为jobcontrol的东西,您可以用它来实现它

假设有两个工作A和B

ControlledJob A= new ControlledJob(JobConf for A);
ControlledJob B= new ControlledJob(JobConf for B);
B.addDependingJob(A);

JobControl jControl = newJobControl("Name");
jControl.addJob(A);
jControl.addJob(B);
Thread runJControl = new Thread(jControl);
runJControl.start();
while (!jControl.allFinished()) {
code = jControl.getFailedJobList().size() == 0 ? 0 : 1;
Thread.sleep(1000);
}
System.exit(1);
在开始时初始化代码,如下所示:

int code =1;

在您的情况下,第一个作业是第一个带零减缩器的映射器,第二个作业是第二个带零减缩器的映射器。配置应确保B的输入路径和A的输出路径相同。

code=jControl.getFailedJobList().size()==0?0:1;您能解释一下什么代码吗?我在运行此程序时遇到此错误,但我已设置了所有必需的参数错误:org.apache.hadoop.mapred.invalidJobConfeption:未设置输出目录。当您使用fileoutputformat.setoutputpath时,请指定conf值和路径。当您使用textinputformat时,请指定作业值和路径。使更改…这是r8吗?Details.setInputFormatClass(TextInputFormat.class);FileOutputFormat.setOutputPath(详细信息,新路径(args[1]);它应该是FileOutputFormat.setOutputPath(详细信息,新路径(args[1]))