Spring 能够读取多个文件,然后为每个源文件写入内容
读取文件工作正常,但写入文件工作不正常 我希望读取多个文件,然后使用Spring 能够读取多个文件,然后为每个源文件写入内容,spring,spring-boot,spring-batch,Spring,Spring Boot,Spring Batch,读取文件工作正常,但写入文件工作不正常 我希望读取多个文件,然后使用MultiResourceItemWriter分别编写,如: 读取文件: source/abc.csv source/cbd.csv source/efg.csv 应分别编写文件,如: target/abc.csv target/cbd.csv target/efg.csv 但目前它将所有数据放在一个文件中 @Bean public MultiResourceItemWriter<FooCsv>
MultiResourceItemWriter
分别编写,如:
读取文件:
source/abc.csv
source/cbd.csv
source/efg.csv
应分别编写文件,如:
target/abc.csv
target/cbd.csv
target/efg.csv
但目前它将所有数据放在一个文件中
@Bean
public MultiResourceItemWriter<FooCsv> multipleCsvWriter(@Value("${directory.destination}") Resource folder) throws Exception {
MultiResourceItemWriter<FooCsv> writer = new MultiResourceItemWriter<>();
writer.setResource(folder);
writer.setDelegate(csvWriter(file));
return writer;
}
@Bean
公共MultiResourceItemWriter MultipecsWriter(@Value(${directory.destination}”)资源文件夹)引发异常{
MultiResourceItemWriter writer=新的MultiResourceItemWriter();
writer.setResource(文件夹);
writer.setDelegate(csvWriter(文件));
返回作者;
}
注意,这类似于从源文件夹复制并粘贴到目标文件夹。有两种方法-
有两种方法可以做到这一点-
您可以使用
MultiResourcePartitioner
实现同样的功能。下面是批处理配置示例
@Configuration
@EnableBatchProcessing
public class BatchConfig {
@Autowired
private ResourcePatternResolver resourcePatternResolver;
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
public DataSource dataSource;
@Autowired
ApplicationContext context;
@Bean
@JobScope
public MultiResourcePartitioner paritioner(@Value("#{jobParameters[srcDir]}") String src) throws IOException {
Resource[] resources = resourcePatternResolver.getResources(src);
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
partitioner.partition(1);
partitioner.setResources(resources);
return partitioner;
}
@Bean
@StepScope
public FlatFileItemReader<String> reader(@Value("#{stepExecutionContext[fileName]}") Resource file) {
FlatFileItemReader<String> reader = new FlatFileItemReader<String>();
reader.setResource(file);
reader.setLineMapper(new PassThroughLineMapper());
return reader;
}
@Bean
@StepScope
public FlatFileItemWriter<String> writer(@Value("#{jobParameters[destDir]}") String dest,
@Value("#{stepExecutionContext[fileName]}") Resource file) {
String destFile = dest + file.getFilename();
System.out.println(destFile);
FlatFileItemWriter<String> writer = new FlatFileItemWriter<String>();
writer.setLineAggregator(new PassThroughLineAggregator<>());
writer.setResource(resourcePatternResolver.getResource(destFile));
return writer;
}
@Bean
public Job kpJob() {
return jobBuilderFactory.get("kpJob").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
@Bean
public Step step1() {
Partitioner partitioner = context.getBean(MultiResourcePartitioner.class);
return stepBuilderFactory.get("step1").partitioner(slaveStep()).partitioner("step1.slave", partitioner).build();
}
@Bean
public Step slaveStep() {
ItemReader<String> reader = context.getBean(FlatFileItemReader.class);
ItemWriter<String> writer = context.getBean(FlatFileItemWriter.class);
return stepBuilderFactory.get("step1.slave").<String, String>chunk(10).reader(reader).writer(writer).build();
}
}
@配置
@启用批处理
公共类BatchConfig{
@自动连线
私有ResourcePatternResolver ResourcePatternResolver;
@自动连线
私人JobBuilderFactory JobBuilderFactory;
@自动连线
私人StepBuilderFactory StepBuilderFactory;
@自动连线
公共数据源;
@自动连线
应用上下文上下文;
@豆子
@工作范围
公共MultiResourcePartitioner分区器(@Value(“#{jobParameters[srcDir]}”)字符串src)引发IOException{
Resource[]resources=resourcepatternsolver.getResources(src);
MultiResourcePartitioner partitioner=新的MultiResourcePartitioner();
分区器。分区(1);
partitioner.setResources(资源);
返回分区器;
}
@豆子
@步进镜
公共FlatFileItemReader读取器(@Value(“#{stepExecutionContext[fileName]}”)资源文件){
FlatFileItemReader=新的FlatFileItemReader();
reader.setResource(文件);
setLineMapper(新的PassThroughLineMapper());
返回读取器;
}
@豆子
@步进镜
公共FlatFileItemWriter写入程序(@Value(“#{jobParameters[destDir]}”)字符串dest,
@值(“#{stepExecutionContext[fileName]}”)资源文件){
字符串destFile=dest+file.getFilename();
System.out.println(destFile);
FlatFileItemWriter writer=新的FlatFileItemWriter();
writer.setLineAggregator(新的PassThroughLineAggregator());
writer.setResource(resourcepatternsolver.getResource(destFile));
返回作者;
}
@豆子
公共作业kpJob(){
返回jobBuilderFactory.get(“kpJob”).incrementer(新的RunIdIncrementer()).flow(step1()).end().build();
}
@豆子
公共步骤第1步(){
Partitioner Partitioner=context.getBean(MultiResourcePartitioner.class);
返回stepBuilderFactory.get(“step1”).partitioner(slaveStep()).partitioner(“step1.slave”,partitioner.build();
}
@豆子
公共步骤slaveStep(){
ItemReader=context.getBean(FlatFileItemReader.class);
ItemWriter=context.getBean(FlatFileItemWriter.class);
返回stepBuilderFactory.get(“step1.slave”).chunk(10.reader(reader.writer.build();
}
}
并将
srcDir
和dstDir
作为作业参数传递。您可以使用MultiResourcePartitioner
实现相同的功能。下面是批处理配置示例
@Configuration
@EnableBatchProcessing
public class BatchConfig {
@Autowired
private ResourcePatternResolver resourcePatternResolver;
@Autowired
private JobBuilderFactory jobBuilderFactory;
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
public DataSource dataSource;
@Autowired
ApplicationContext context;
@Bean
@JobScope
public MultiResourcePartitioner paritioner(@Value("#{jobParameters[srcDir]}") String src) throws IOException {
Resource[] resources = resourcePatternResolver.getResources(src);
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
partitioner.partition(1);
partitioner.setResources(resources);
return partitioner;
}
@Bean
@StepScope
public FlatFileItemReader<String> reader(@Value("#{stepExecutionContext[fileName]}") Resource file) {
FlatFileItemReader<String> reader = new FlatFileItemReader<String>();
reader.setResource(file);
reader.setLineMapper(new PassThroughLineMapper());
return reader;
}
@Bean
@StepScope
public FlatFileItemWriter<String> writer(@Value("#{jobParameters[destDir]}") String dest,
@Value("#{stepExecutionContext[fileName]}") Resource file) {
String destFile = dest + file.getFilename();
System.out.println(destFile);
FlatFileItemWriter<String> writer = new FlatFileItemWriter<String>();
writer.setLineAggregator(new PassThroughLineAggregator<>());
writer.setResource(resourcePatternResolver.getResource(destFile));
return writer;
}
@Bean
public Job kpJob() {
return jobBuilderFactory.get("kpJob").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
@Bean
public Step step1() {
Partitioner partitioner = context.getBean(MultiResourcePartitioner.class);
return stepBuilderFactory.get("step1").partitioner(slaveStep()).partitioner("step1.slave", partitioner).build();
}
@Bean
public Step slaveStep() {
ItemReader<String> reader = context.getBean(FlatFileItemReader.class);
ItemWriter<String> writer = context.getBean(FlatFileItemWriter.class);
return stepBuilderFactory.get("step1.slave").<String, String>chunk(10).reader(reader).writer(writer).build();
}
}
@配置
@启用批处理
公共类BatchConfig{
@自动连线
私有ResourcePatternResolver ResourcePatternResolver;
@自动连线
私人JobBuilderFactory JobBuilderFactory;
@自动连线
私人StepBuilderFactory StepBuilderFactory;
@自动连线
公共数据源;
@自动连线
应用上下文上下文;
@豆子
@工作范围
公共MultiResourcePartitioner分区器(@Value(“#{jobParameters[srcDir]}”)字符串src)引发IOException{
Resource[]resources=resourcepatternsolver.getResources(src);
MultiResourcePartitioner partitioner=新的MultiResourcePartitioner();
分区器。分区(1);
partitioner.setResources(资源);
返回分区器;
}
@豆子
@步进镜
公共FlatFileItemReader读取器(@Value(“#{stepExecutionContext[fileName]}”)资源文件){
FlatFileItemReader=新的FlatFileItemReader();
reader.setResource(文件);
setLineMapper(新的PassThroughLineMapper());
返回读取器;
}
@豆子
@步进镜
公共FlatFileItemWriter写入程序(@Value(“#{jobParameters[destDir]}”)字符串dest,
@值(“#{stepExecutionContext[fileName]}”)资源文件){
字符串destFile=dest+file.getFilename();
System.out.println(destFile);
FlatFileItemWriter writer=新的FlatFileItemWriter();
writer.setLineAggregator(新的PassThroughLineAggregator());
writer.setResource(resourcepatternsolver.getResource(destFile));
返回作者;
}