Hadoop 从减速器输出命名零件文件

Hadoop 从减速器输出命名零件文件,hadoop,mapreduce,Hadoop,Mapreduce,我试图根据输入文件中某个字段的值,将HDFS中的一个大型avro文件拆分为多个avro文件。以下部分显示我的映射程序、减速器和驱动程序 现在一切正常,但输出文件被命名为 01-r-00000.avro,02-r-00000.avro 而不是stock-r-00000.avro,stock-r-00001.avro 我错过了什么 谢谢 制图员: public static class CustomFileSplitMapper extends Mapper<AvroKey<Gene

我试图根据输入文件中某个字段的值,将HDFS中的一个大型avro文件拆分为多个avro文件。以下部分显示我的映射程序、减速器和驱动程序

现在一切正常,但输出文件被命名为 01-r-00000.avro,02-r-00000.avro

而不是stock-r-00000.avro,stock-r-00001.avro

我错过了什么

谢谢

制图员:

  public static class CustomFileSplitMapper extends Mapper<AvroKey<GenericRecord>, NullWritable, AvroKey<String>, AvroValue<GenericRecord>> {
    @Override
    public void map(AvroKey<GenericRecord> key, NullWritable value, Context context)
    throws IOException, InterruptedException {
      GenericRecord record = key.datum();
      LOGGER.info(record);
      AvroValue<GenericRecord> outValue = new AvroValue<GenericRecord>(record);
      context.write(new AvroKey<String>((String) record.get("date")), outValue);
    }
  }

必须为多重输出中的零件文件附加所需的前缀

multipleOutput.write(multipleOutput\u名称、输出、NullWritable.get(), 偶键路径+“股票”

请关注博客,了解更多详细信息

  public static class CustomFileSplitReducer extends Reducer<AvroKey<String>, AvroValue<GenericRecord>, AvroKey<GenericRecord>, NullWritable> {
    private AvroMultipleOutputs amos;
    private String outputPath;

    @Override
    protected void setup(Context context) {
      outputPath = context.getConfiguration().get("outputPath");
      amos = new AvroMultipleOutputs(context);
    }

    @Override
    public void reduce(AvroKey<String> key, Iterable<AvroValue<GenericRecord>> values, Context context)
    throws IOException, InterruptedException {
      for (AvroValue<GenericRecord> value : values) {
        String datePath = "daily" + File.separator + LocalDate.parse(new String(key.datum().getBytes()),
            DateTimeFormatter.ofPattern("yyyyMMdd")).format(DateTimeFormatter.ofPattern("yyyy/MM/dd"));
        GenericRecord record = value.datum();
        amos.write("stock", new AvroKey<GenericRecord>(record), NullWritable.get(),
        outputPath + File.separator + datePath);
     }
   }
   @Override
   public void cleanup(Context context) throws IOException, InterruptedException {
     amos.close();
   }
 }
Configuration conf = new Configuration();
conf.set("outputPath", props.getString("outputPath"));
Job job = Job.getInstance(conf, "CustomFileSplitter");
job.setJarByClass(CustomFileSplitter.class);
job.setMapperClass(CustomFileSplitMapper.class);
job.setReducerClass(CustomFileSplitReducer.class);
FileInputFormat.addInputPath(job, new Path(props.getString("inputPath")));
FileOutputFormat.setOutputPath(job, new Path(props.getString("outputPath")));
job.setInputFormatClass(AvroKeyInputFormat.class);
LazyOutputFormat.setOutputFormatClass(job, AvroKeyOutputFormat.class);
job.setMapOutputKeyClass(AvroKey.class);
job.setMapOutputValueClass(AvroValue.class);
Schema schema = SchemaExtractor.extract(new Path(props.getString("inputPath")));

AvroJob.setMapOutputKeySchema(job, Schema.create(Schema.Type.STRING));
AvroJob.setMapOutputValueSchema(job, schema);
AvroJob.setOutputKeySchema(job, schema);
AvroJob.setInputKeySchema(job, schema);
AvroMultipleOutputs.addNamedOutput(job, "stock", AvroKeyOutputFormat.class, schema);