Java 如何在spring批处理中划分步骤?

Java 如何在spring批处理中划分步骤?,java,spring,spring-boot,spring-batch,Java,Spring,Spring Boot,Spring Batch,我正在学习SpringBatch,并编写了简单的应用程序来使用它。 根据我的要求,我从单个csv文件中读取数据,进行一些转换并插入到数据库中 我有以下配置: @Bean public Step step1(JdbcBatchItemWriter<Person> writer) { return stepBuilderFactory.get("step1") .<Person, Person>chunk(10)

我正在学习SpringBatch,并编写了简单的应用程序来使用它。 根据我的要求,我从单个csv文件中读取数据,进行一些转换并插入到数据库中

我有以下配置:

    @Bean
    public Step step1(JdbcBatchItemWriter<Person> writer) {
        return stepBuilderFactory.get("step1")
                .<Person, Person>chunk(10)
                .reader(reader())
                .processor(processor())
                .writer(writer)
                .build();
    }

   @Bean
    public Job importUserJob(JobCompletionNotificationListener listener, Step step1, Step step2) {
        return jobBuilderFactory.get("importUserJob")
                .incrementer(new RunIdIncrementer())
                .listener(listener)
                .listener(new JobExecutionListener() {
                    @Override
                    public void beforeJob(JobExecution jobExecution) {
                        System.out.println("!!!!!!!!!!!!!SECOND_LISTENER_BEFORE!!!!!!!!!!!!!!!!");
                    }

                    @Override
                    public void afterJob(JobExecution jobExecution) {
                        System.out.println("!!!!!!!!!!!!!SECOND_LISTENER_AFTER!!!!!!!!!!!!!!!!");

                    }
                })
                .flow(step1)
                .next(step2)
                .end()
                .build();
    }

public FlatFileItemReader reader() {
    return new FlatFileItemReaderBuilder()
        .name("csvPersonReader")
        .resource(csvResource)
        .delimited()
        .names(new String[]{"firstName", "lastName"})
        .fieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
            setTargetType(Person.class);
        }})
        .build();

}
@Bean
公共步骤步骤1(JdbcBatchItemWriter编写器){
返回stepBuilderFactory.get(“step1”)
.chunk(10)
.reader(reader())
.processor(处理器())
.作者(作者)
.build();
}
@豆子
公共作业导入器作业(作业完成通知侦听器侦听器,步骤1,步骤2){
返回jobBuilderFactory.get(“importUserJob”)
.incrementer(新的RunIdIncrementer())
.listener(侦听器)
.listener(新作业执行listener(){
@凌驾
作业前公共无效(作业执行作业执行){
System.out.println(“!!!!!!!!!!!!!!!!!!!!!!!!!第二个侦听器之前!!!!!!!!!!!!!!!!!!!”;
}
@凌驾
公共无效后作业(作业执行作业执行){
System.out.println(“!!!!!!!!!!!!!!!!!!!!!第二个侦听器!!!!!!!!!!!!!!!!!!!!!!!!!!!”);
}
})
.流程(步骤1)
.下一步(第2步)
(完)
.build();
}
公共FlatFileItemReader()读取器{
返回新的FlatFileItemReaderBuilder()
.名称(“csvPersonReader”)
.资源(csvResource)
.delimited()
.names(新字符串[]{“firstName”,“lastName”})
.fieldSetMapper(新的BeanRapperFieldSetMapper(){{
setTargetType(Person.class);
}})
.build();
}
现在我想使用10个线程执行该步骤。据我所知,我需要使用分区feaure。我已经找到了几个关于它的示例,但它包含XML配置。我更喜欢使用java配置

我怎样才能做到呢

附笔。 我尝试了以下方法:

@Bean
public Step step1(JdbcBatchItemWriter<Person> writer) {
    ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
    taskExecutor.setCorePoolSize(1);
    TaskletStep step1 = stepBuilderFactory.get("step1")
            .<Person, Person>chunk(10)
            .reader(reader())
            .processor(processor())
            .writer(writer)
            .taskExecutor(taskExecutor)
            .build();

    return step1;
}
@Bean
公共步骤步骤1(JdbcBatchItemWriter编写器){
ThreadPoolTaskExecutor taskExecutor=新的ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(1);
TaskletStep step1=stepBuilderFactory.get(“step1”)
.chunk(10)
.reader(reader())
.processor(处理器())
.作者(作者)
.taskExecutor(taskExecutor)
.build();
返回步骤1;
}

但我的申请挂起了。此外,它不是一个分区,只能在一台PC上工作。您的配置是错误的。遵循以下配置。您需要决定要划分的逻辑。看看partitioner的partition方法是如何创建映射并添加到执行上下文的

遵循下面的代码

@Bean
public Step step1(JdbcBatchItemWriter<Person> writer) {
    TaskletStep step1 = stepBuilderFactory.get("partionerStep")
            .partitioner("slaveStep", partitioner())
            .step(slaveStep())
            .taskExecutor(taskExecutor())
            .build();
}

@Bean
public CustomPartitioner partitioner() {
    CustomPartitioner partitioner = new CustomPartitioner();
    return partitioner;
}
@Bean
公共步骤步骤1(JdbcBatchItemWriter编写器){
TaskletStep step1=stepBuilderFactory.get(“PartitionerStep”)
.partitioner(“slaveStep”,partitioner())
.步骤(slaveStep())
.taskExecutor(taskExecutor())
.build();
}
@豆子
公共CustomPartitioner分区器(){
CustomPartitioner partitioner=新CustomPartitioner();
返回分区器;
}

公共类CustomPartitioner实现分区器{
@凌驾
公共地图分区(int gridSize){
Map Map=新的HashMap(gridSize);
int i=0,k=1;
for(资源:资源){
ExecutionContext=新的ExecutionContext();
context.putString(“keyName”,“”);//取决于要用于拆分的逻辑
map.put(“分区键”+i,上下文);
i++;
}
返回图;
}
}
@豆子
公共任务执行器任务执行器(){
ThreadPoolTaskExecutor taskExecutor=新的ThreadPoolTaskExecutor();
taskExecutor.setMaxPoolSize(10);
taskExecutor.setCorePoolSize(10);
taskExecutor.setQueueCapacity(10);
taskExecutor.AfterPropertieSet();
返回任务执行器;
}
@豆子
公共步骤slaveStep()
引发意外的PutException、MalformedUrlexException、ParseException{
返回步骤。获取(“slaveStep”)
.chunk(10)
.reader(reader())
.processor(处理器())
.作者(作者)
.build();
}
@豆子
公共作业导入器作业(作业完成通知侦听器侦听器,步骤1,步骤2){
返回jobBuilderFactory.get(“importUserJob”)
.incrementer(新的RunIdIncrementer())
.listener(侦听器)
.listener(新作业执行listener(){
@凌驾
作业前公共无效(作业执行作业执行){
System.out.println(“!!!!!!!!!!!!!!!!!!!!!!!!!第二个侦听器之前!!!!!!!!!!!!!!!!!!!”;
}
@凌驾
公共无效后作业(作业执行作业执行){
System.out.println(“!!!!!!!!!!!!!!!!!!!!!第二个侦听器!!!!!!!!!!!!!!!!!!!!!!!!!!!”);
}
})
.流程(步骤1)
.下一步(第2步)
(完)
.build();
}
返回步骤1;
}

您可以使用下面的代码来实现批处理分区

@Configuration
public class DemoJobBatchConfiguration {

    private static final Logger LOGGER = LoggerFactory.getLogger(DemoJobBatchConfiguration.class);

    @Autowired
    private JobBuilderFactory jobBuilderFactory;

    @Autowired
    private StepBuilderFactory stepBuilderFactory;

    @Autowired
    @Qualifier("applicaionDS")
    public DataSource dataSource;

    @Autowired
    UserWritter userWriter;

    @Bean("demoJob")
    public Job partitionJob(JobNotificationListener listener, JobBuilderFactory jobBuilderFactory,
            @Qualifier("demoPartitionStep") Step demoPartitionStep) {
        return jobBuilderFactory.get("demoJob").incrementer(new RunIdIncrementer()).listener(listener)
                .start(demoPartitionStep).build();
    }

    @Bean(name = "demoPartitionStep")
    public Step demoPartitionStep(Step demoSlaveStep, StepBuilderFactory stepBuilderFactory) {
        return stepBuilderFactory.get("demoPartitionStep").partitioner("demoPartitionStep", demoPartitioner())
                .gridSize(21).step(demoSlaveStep).taskExecutor(jobTaskExecutor()).build();
    }

    @Bean(name = "demoPartitioner", destroyMethod = "")
    public Partitioner demoPartitioner() {
        DemoPartitioner partitioner = new DemoPartitioner();
        // partitioner.partition(20);
        return partitioner;
    }

    @Bean
    public Step demoSlaveStep(ItemReader<User> demoReader, ItemProcessor<User, User> demoJobProcessor) {
        return stepBuilderFactory.get("demoSlaveStep").<User, User>chunk(3).reader(demoReader)
                .processor(demoJobProcessor).writer(userWriter).build();
    }

    @Bean(name = "demoReader")
    @StepScope
    public JdbcCursorItemReader<User> demoReader(@Value("#{stepExecutionContext[SQL]}") String SQL,
            @Value("#{jobParameters[JOB_PARM]}") String jobParm,
            @Value("#{jobExecutionContext[jobExecutionParameter]}") String jobExecutionParameter) {
        LOGGER.info("---------------------- demoReader ------------------------------- " + SQL);
        LOGGER.info(" jobParm : " + jobParm);
        LOGGER.info(" jobExecutionParameter : " + jobExecutionParameter);

        JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<>();
        reader.setDataSource(this.dataSource);
        reader.setFetchSize(200);
        reader.setRowMapper(new BeanPropertyRowMapper<>(User.class));
        reader.setSql(SQL);
        return reader;
    }

    @Bean(name = "demoJobProcessor")
    @StepScope
    public ItemProcessor<User, User> demoJobProcessor() throws Exception {
        LOGGER.info(" DemoJobBatchConfiguration: demoJobProcessor  ");
        return new UserProcessor();
    }

    /*
     * @Bean public ItemWriter<User> demoWriter() { return users -> { for (User user
     * : users) { if (LOGGER.isInfoEnabled()) { LOGGER.info("user read is :: " +
     * user.toString()); } } if (LOGGER.isInfoEnabled()) {
     * LOGGER.info("%%%%%%%%%%%%%%%%%%%%% demoWriter %%%%%%%%%%%%%%%%%%%%% "); } };
     * }
     */

    @Bean
    public TaskExecutor jobTaskExecutor() {
        ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
        // there are 21 sites currently hence we have 21 threads
        taskExecutor.setMaxPoolSize(30);
        taskExecutor.setCorePoolSize(25);
        taskExecutor.afterPropertiesSet();
        return taskExecutor;
    }

}
@配置
公共类DemoJobBatchConfiguration{
私有静态最终记录器Logger=LoggerFactory.getLogger(DemoJobBatchConfiguration.class);
@自动连线
私人JobBuilderFactory JobBuilderFactory;
@自动连线
私人StepBuilderFactory StepBuilderFactory;
@自动连线
@限定词(“应用程序”)
公共数据源;
@自动连线
用户编写器用户编写器;
@Bean(“demoJob”)
公共作业分区作业(JobNotificationListener侦听器、JobBuilderFactory JobBuilderFactory、,
@限定符(“demoPartitionStep”)步骤demoPartitionStep){
复述
@Configuration
public class DemoJobBatchConfiguration {

    private static final Logger LOGGER = LoggerFactory.getLogger(DemoJobBatchConfiguration.class);

    @Autowired
    private JobBuilderFactory jobBuilderFactory;

    @Autowired
    private StepBuilderFactory stepBuilderFactory;

    @Autowired
    @Qualifier("applicaionDS")
    public DataSource dataSource;

    @Autowired
    UserWritter userWriter;

    @Bean("demoJob")
    public Job partitionJob(JobNotificationListener listener, JobBuilderFactory jobBuilderFactory,
            @Qualifier("demoPartitionStep") Step demoPartitionStep) {
        return jobBuilderFactory.get("demoJob").incrementer(new RunIdIncrementer()).listener(listener)
                .start(demoPartitionStep).build();
    }

    @Bean(name = "demoPartitionStep")
    public Step demoPartitionStep(Step demoSlaveStep, StepBuilderFactory stepBuilderFactory) {
        return stepBuilderFactory.get("demoPartitionStep").partitioner("demoPartitionStep", demoPartitioner())
                .gridSize(21).step(demoSlaveStep).taskExecutor(jobTaskExecutor()).build();
    }

    @Bean(name = "demoPartitioner", destroyMethod = "")
    public Partitioner demoPartitioner() {
        DemoPartitioner partitioner = new DemoPartitioner();
        // partitioner.partition(20);
        return partitioner;
    }

    @Bean
    public Step demoSlaveStep(ItemReader<User> demoReader, ItemProcessor<User, User> demoJobProcessor) {
        return stepBuilderFactory.get("demoSlaveStep").<User, User>chunk(3).reader(demoReader)
                .processor(demoJobProcessor).writer(userWriter).build();
    }

    @Bean(name = "demoReader")
    @StepScope
    public JdbcCursorItemReader<User> demoReader(@Value("#{stepExecutionContext[SQL]}") String SQL,
            @Value("#{jobParameters[JOB_PARM]}") String jobParm,
            @Value("#{jobExecutionContext[jobExecutionParameter]}") String jobExecutionParameter) {
        LOGGER.info("---------------------- demoReader ------------------------------- " + SQL);
        LOGGER.info(" jobParm : " + jobParm);
        LOGGER.info(" jobExecutionParameter : " + jobExecutionParameter);

        JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<>();
        reader.setDataSource(this.dataSource);
        reader.setFetchSize(200);
        reader.setRowMapper(new BeanPropertyRowMapper<>(User.class));
        reader.setSql(SQL);
        return reader;
    }

    @Bean(name = "demoJobProcessor")
    @StepScope
    public ItemProcessor<User, User> demoJobProcessor() throws Exception {
        LOGGER.info(" DemoJobBatchConfiguration: demoJobProcessor  ");
        return new UserProcessor();
    }

    /*
     * @Bean public ItemWriter<User> demoWriter() { return users -> { for (User user
     * : users) { if (LOGGER.isInfoEnabled()) { LOGGER.info("user read is :: " +
     * user.toString()); } } if (LOGGER.isInfoEnabled()) {
     * LOGGER.info("%%%%%%%%%%%%%%%%%%%%% demoWriter %%%%%%%%%%%%%%%%%%%%% "); } };
     * }
     */

    @Bean
    public TaskExecutor jobTaskExecutor() {
        ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
        // there are 21 sites currently hence we have 21 threads
        taskExecutor.setMaxPoolSize(30);
        taskExecutor.setCorePoolSize(25);
        taskExecutor.afterPropertiesSet();
        return taskExecutor;
    }

}
public class DemoPartitioner implements Partitioner {

    @Override
    public Map<String, ExecutionContext> partition(int gridSize) {

        Map<String, ExecutionContext> result = new HashMap<String, ExecutionContext>();

        int range = 3;
        int fromId = 1;
        int toId = range;

        for (int i = fromId; i <= gridSize;) {
            ExecutionContext executionContext = new ExecutionContext();
            String SQL = "SELECT * FROM CUSTOMER WHERE ID BETWEEN " + fromId + " AND " + toId;
            System.out.println("SQL : " + SQL);
            executionContext.putInt("fromId", fromId);
            executionContext.putInt("toId", toId);
            executionContext.putString("SQL", SQL);
            executionContext.putString("name", "Thread" + i);
            result.put("partition" + i, executionContext);
            fromId = toId + 1;
            i = fromId;
            toId += range;
        }
        return result;
    }

}