Java 如何在Spring Boot中并行运行两个批处理文件
我是Spring Batch的新手,我有两个批处理文件:一个批处理文件从用户表读取数据,另一个从员工表读取数据。如何并行运行这两个批处理文件 这是否可能运行此批处理文件,并安排不同的cron作业?如果可能的话,有人建议我最好的方法 Application.java 批处理文件1Java 如何在Spring Boot中并行运行两个批处理文件,java,spring,spring-batch,Java,Spring,Spring Batch,我是Spring Batch的新手,我有两个批处理文件:一个批处理文件从用户表读取数据,另一个从员工表读取数据。如何并行运行这两个批处理文件 这是否可能运行此批处理文件,并安排不同的cron作业?如果可能的话,有人建议我最好的方法 Application.java 批处理文件1 @配置 @启用批处理 公共类BatchConfig2{ @自动连线 公共建筑商建筑商工厂; @自动连线 公共StepBuilderFactory StepBuilderFactory; @自动连线 公共数据源; @豆子
@配置
@启用批处理
公共类BatchConfig2{
@自动连线
公共建筑商建筑商工厂;
@自动连线
公共StepBuilderFactory StepBuilderFactory;
@自动连线
公共数据源;
@豆子
公共JdbcCursorItemReader(){
JdbcCursorItemReader=新的JdbcCursorItemReader();
reader.setDataSource(数据源);
reader.setSql(“从用户选择id、名称”);
setRowMapper(新的UserRowMapper());
返回读取器;
}
公共类UserRowMapper实现了RowMapper{
@凌驾
公共用户mapRow(ResultSet rs,int rowNum)抛出SQLException{
用户=新用户();
user.setId(rs.getInt(“id”);
user.setName(rs.getString(“name”);
返回用户;
}
}
@豆子
公共UserItemProcessor处理器(){
返回新的UserItemProcessor();
}
@豆子
公共FlatFileItemWriter编写器(){
FlatFileItemWriter writer=新的FlatFileItemWriter();
writer.setResource(新类路径资源(“users.csv”);
writer.setLineAggregator(新的DelimitedLineAggregator(){
{
setDelimiter(“,”);
setFieldExtractor(新BeanWrapperFieldExtractor(){
{
集合名(新字符串[]{“id”,“name”});
}
});
}
});
返回作者;
}
@豆子
公共步骤第1步(){
返回stepBuilderFactory.get(“step1”).chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
@豆子
公共作业exportUserJob(){
返回jobBuilderFactory.get(“exportUserJob2”).incrementer(新的RunIdIncrementer()).flow(step1()).end().build();
}
}
批处理文件2
@配置
@启用批处理
公共类BatchConfig2{
@自动连线
公共建筑商建筑商工厂;
@自动连线
公共StepBuilderFactory StepBuilderFactory;
@自动连线
公共数据源;
@豆子
公共JdbcCursorItemReader(){
JdbcCursorItemReader=新的JdbcCursorItemReader();
reader.setDataSource(数据源);
reader.setSql(“从员工中选择id和姓名”);
setRowMapper(新的UserRowMapper());
返回读取器;
}
公共类UserRowMapper实现了RowMapper{
@凌驾
公共用户mapRow(ResultSet rs,int rowNum)抛出SQLException{
用户=新用户();
user.setId(rs.getInt(“id”);
user.setName(rs.getString(“name”);
返回用户;
}
}
@豆子
公共UserItemProcessor处理器(){
返回新的UserItemProcessor();
}
@豆子
公共FlatFileItemWriter编写器(){
FlatFileItemWriter writer=新的FlatFileItemWriter();
writer.setResource(新类路径资源(“users.csv”);
writer.setLineAggregator(新的DelimitedLineAggregator(){
{
setDelimiter(“,”);
setFieldExtractor(新BeanWrapperFieldExtractor(){
{
集合名(新字符串[]{“id”,“name”});
}
});
}
});
返回作者;
}
@豆子
公共步骤第1步(){
返回stepBuilderFactory.get(“step1”).chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
@豆子
公共作业exportUserJob(){
返回jobBuilderFactory.get(“exportUserJob2”).incrementer(新的RunIdIncrementer()).flow(step1()).end().build();
}
}
您可以使用TaskExecutor
手动定义作业以实现此目的。请使用代码进行检查。没有人知道这一点吗?您的应用程序现在出了什么问题?它现在是如何工作的?您可以使用TaskExecutor
手动定义作业来实现这一点。请使用代码进行检查。没有人知道这一点?您的应用程序现在出了什么问题?它现在是如何工作的?可能是
@SpringBootApplication
@EnableScheduling
public class SpringBatchJobApplication {
@Autowired
JobLauncher jobLauncher;
@Autowired
@Qualifier("BatchConfig")
private Job job1;
@Autowired
@Qualifier("BatchConfig2")
private Job job2;
public static void main(String[] args) {
SpringApplication.run(SpringBatchJobApplication.class, args);
}
@Bean
public ThreadPoolTaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(15);
taskExecutor.setMaxPoolSize(20);
taskExecutor.setQueueCapacity(30);
return taskExecutor;
}
@Bean
public JobLauncher jobLauncher(ThreadPoolTaskExecutor taskExecutor, JobRepository jobRepository) {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setTaskExecutor(taskExecutor);
jobLauncher.setJobRepository(jobRepository);
return jobLauncher;
}
@Scheduled(cron = "*/1 * * * * *")
public void run1(){
JobParameters params = new JobParametersBuilder().addString("JobID", String.valueOf(System.currentTimeMillis()))
.toJobParameters();
jobLauncher.run(job1, params);
}
@Scheduled(cron = "*/2 * * * * *")
public void run2(){
JobParameters params = new JobParametersBuilder().addString("JobID", String.valueOf(System.currentTimeMillis()))
.toJobParameters();
jobLauncher.run(job2, params);
}
}
@Configuration
@EnableBatchProcessing
public class BatchConfig2 {
@Autowired
public JobBuilderFactory jobBuilderFactory;
@Autowired
public StepBuilderFactory stepBuilderFactory;
@Autowired
public DataSource dataSource;
@Bean
public JdbcCursorItemReader<User> reader() {
JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<User>();
reader.setDataSource(dataSource);
reader.setSql("SELECT id,name FROM user");
reader.setRowMapper(new UserRowMapper());
return reader;
}
public class UserRowMapper implements RowMapper<User> {
@Override
public User mapRow(ResultSet rs, int rowNum) throws SQLException {
User user = new User();
user.setId(rs.getInt("id"));
user.setName(rs.getString("name"));
return user;
}
}
@Bean
public UserItemProcessor processor() {
return new UserItemProcessor();
}
@Bean
public FlatFileItemWriter<User> writer() {
FlatFileItemWriter<User> writer = new FlatFileItemWriter<User>();
writer.setResource(new ClassPathResource("users.csv"));
writer.setLineAggregator(new DelimitedLineAggregator<User>() {
{
setDelimiter(",");
setFieldExtractor(new BeanWrapperFieldExtractor<User>() {
{
setNames(new String[] { "id", "name" });
}
});
}
});
return writer;
}
@Bean
public Step step1() {
return stepBuilderFactory.get("step1").<User, User>chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
@Bean
public Job exportUserJob() {
return jobBuilderFactory.get("exportUserJob2").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
}
@Configuration
@EnableBatchProcessing
public class BatchConfig2 {
@Autowired
public JobBuilderFactory jobBuilderFactory;
@Autowired
public StepBuilderFactory stepBuilderFactory;
@Autowired
public DataSource dataSource;
@Bean
public JdbcCursorItemReader<User> reader() {
JdbcCursorItemReader<User> reader = new JdbcCursorItemReader<User>();
reader.setDataSource(dataSource);
reader.setSql("SELECT id,name FROM employee");
reader.setRowMapper(new UserRowMapper());
return reader;
}
public class UserRowMapper implements RowMapper<User> {
@Override
public User mapRow(ResultSet rs, int rowNum) throws SQLException {
User user = new User();
user.setId(rs.getInt("id"));
user.setName(rs.getString("name"));
return user;
}
}
@Bean
public UserItemProcessor processor() {
return new UserItemProcessor();
}
@Bean
public FlatFileItemWriter<User> writer() {
FlatFileItemWriter<User> writer = new FlatFileItemWriter<User>();
writer.setResource(new ClassPathResource("users.csv"));
writer.setLineAggregator(new DelimitedLineAggregator<User>() {
{
setDelimiter(",");
setFieldExtractor(new BeanWrapperFieldExtractor<User>() {
{
setNames(new String[] { "id", "name" });
}
});
}
});
return writer;
}
@Bean
public Step step1() {
return stepBuilderFactory.get("step1").<User, User>chunk(10).reader(reader()).processor(processor())
.writer(writer()).build();
}
@Bean
public Job exportUserJob() {
return jobBuilderFactory.get("exportUserJob2").incrementer(new RunIdIncrementer()).flow(step1()).end().build();
}
}