'Spring Batch cannot Load the JobBuilderFactory in my integration test
I have a configuration that successfully works and loads cell line data and publishes to various recipients in a cell line topic. It works fine, but when I try to load the JobLauncherTestUtils and JobRepositoryTestUtils, I get an error which says that the JobBuilderFactory is not found. As you will see from my configuration, I do load the JobBuilderFactory and StepBuilderFactory using Lombok which delegates to Spring. Like I said all that works fine but the test Here is the test configuration yaml file
application-test.yml
spring:
sql:
init:
schema-locations: classpath:db/migration
platform: derby
jobmeta-ds:
datasource:
driver-class-name: org.apache.derby.jdbc.EmbeddedDriver
url: jdbc:derby:support/jhhmeta;create=true
password:
jndi-name: false
cell-datasource:
datasource:
driver-class-name: oracle.jdbc.driver.OracleDriver
url: jdbc:oracle:thin:@localhost:1521:xe
password:
jndi-name: false
Here are the datasources:
// CellDbConfig class
@Configuration
public class CellDbConfig {
@Bean
@ConfigurationProperties("cell-datasource")
public DataSourceProperties cellLineDataSourceProperties() {
return new DataSourceProperties();
}
@Bean(name = "cellDataSource")
public DataSource cellDataSource() {
HikariDataSource dataSource = cellLineDataSourceProperties().initializeDataSourceBuilder().type(HikariDataSource.class)
.build();
return dataSource;
}
@Bean(name = "cellJdbcTemplate")
public JdbcTemplate cellJdbcTemplate(@Qualifier("cellDataSource") DataSource cellDatataSource) {
return new JdbcTemplate(cellDataSource);
}
}
Here is the other datasource for the JobRepository datasource configuration
@Configuration
public class JobRepoMetadataDbConfig {
@Primary
@Bean
@ConfigurationProperties("jobmeta.datasource")
public DataSourceProperties jobMetadataProperties() {
return new DataSourceProperties();
}
@Primary
@Bean(name = "jobMetaDataSource")
public DataSource dataSourceJobMeta() {
DataSource dataSource = jobMetadataProperties().initializeDataSourceBuilder().type(BasicDataSource.class)
.build();
return dataSource;
}
@Bean(name = "jobMetaJdbcTemplate")
public JdbcTemplate jobMetaJdbcTemplate(@Qualifier("jobMetaDataSource") DataSource jobMetaDataSource) {
return new JdbcTemplate(jobMetaDataSource);
}
}
Here is the Spring Batch specific configuration, i.e. JobRepository etc.
@Configuration
@EnableBatchProcessing
@RequiredArgsConstructor
public class JobRepoConfig {
@Qualifier("jobMetaDataSource")
final DataSource jobMetaDataSource;
@Bean
AbstractPlatformTransactionManager jobTransactionManager() {
return new ResourcelessTransactionManager();
}
@Bean
public JobRepositoryFactoryBean jobRepositoryFactory() throws Exception {
JobRepositoryFactoryBean factory = new JobRepositoryFactoryBean();
factory.setDataSource(jobMetaDataSource);
factory.setTransactionManager(jobTransactionManager());
factory.afterPropertiesSet();
return factory;
}
@Bean
public JobRepository jobRepository() throws Exception {
JobRepositoryFactoryBean jobRepositoryFactoryBean = new JobRepositoryFactoryBean();
jobRepositoryFactoryBean.setDataSource(jobMetaDataSource);
jobRepositoryFactoryBean.setTransactionManager(jobTransactionManager());
jobRepositoryFactoryBean.setDatabaseType(DatabaseType.H2.getProductName());
return jobRepositoryFactoryBean.getObject();
}
@Bean
public SimpleJobLauncher launchAppJobLauncher() throws Exception{
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepository());
return simpleJobLauncher;
}
}
Here is the KafkaProducer configuration that publishes the cell line data:
@Configuration
@Slf4j
public class ProducerConfig {
@Value("${spring.kafka.template.default-topic}")
private String cellsTopic;
@Bean
public ProducerFactory<Long, CellVO> kafkaProducerFactory(KafkaProperties kafkaProperties) {
var producerProperties = kafkaProperties.buildProducerProperties();
var sslProperties = kafkaProperties.getSsl().buildProperties();
Map<String, Object> props = new HashMap<>(producerProperties);
if (!CollectionUtils.isEmpty(sslProperties)) {
props.putAll(sslProperties);
}
return new DefaultKafkaProducerFactory<>(props);
}
@Bean
public KafkaTemplate<Long, CellVO> kafkaTemplate(ProducerFactory<Long, CellVO> kafkaProducerFactory) {
KafkaTemplate<Long, CellVO> kafkaTemplate = new KafkaTemplate<Long, CellVO>(kafkaProducerFactory);
kafkaTemplate.setDefaultTopic(cellsTopic);
return kafkaTemplate;
}
}
Here is the Spring Batch test class:
@SpringBatchTest
@SpringBootTest
@ActiveProfiles("test")
@Tag("integration")
@EnableAutoConfiguration
public class CellCongTest {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils;
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils;
@Test
public void testSuccessfulLoad() throws Exception {
}
}
Finally here is the Batch Job itself:
@Configuration
@EnableScheduling
@RequiredArgsConstructor
@Slf4j
public class CellBatchJobConfig {
final JobBuilderFactory jobBuilderFactory;
final JobLauncher jobAppJobLauncher;
final StepBuilderFactory stepBuilderFactory;
final KafkaTemplate<Long, CellVO> kafkaTemplate;
final KafkaItemWriteListener kafkaItemWriteListener;
final static String CELL_LINE_JOB = "CELL_LINE_JOB";
@Value("${chunk-size}")
private int chunkSize;
@Qualifier("cellDataSource")
final DataSource cellDataSource;
@Bean
public JdbcPagingItemReader<CellVO> cellDataReader(
PagingQueryProvider pagingQueryProvider) {
return new JdbcPagingItemReaderBuilder<CellVO>()
.name("cellDataReader")
.dataSource(cellDataSource)
.queryProvider(pagingQueryProvider)
.pageSize(chunkSize)
.rowMapper(new CellRowMapper())
.build();
}
@Bean
public PagingQueryProvider pagingQueryProvider() {
OraclePagingQueryProvider pagingQueryProvider = new OraclePagingQueryProvider();
final Map<String, Order> sortKeys = new HashMap<>();
sortKeys.put("CELL_ID", Order.ASCENDING);
pagingQueryProvider.setSortKeys(sortKeys);
pagingQueryProvider.setSelectClause(" CELL_ID, CELL_TYPE, SITE, CELL_QUALITY_LINE ");
pagingQueryProvider.setFromClause(" FROM DCV.CELL_LINES");
return pagingQueryProvider;
}
@Bean
public KafkaItemWriter<Long, CellVO> kafkaItemWriter() throws Exception {
KafkaItemWriter<Long, CellVO> kafkaItemWriter = new KafkaItemWriter<>();
kafkaItemWriter.setKafkaTemplate(kafkaTemplate);
kafkaItemWriter.setItemKeyMapper(CellVO::getLocationId);
kafkaItemWriter.setDelete(false);
kafkaItemWriter.afterPropertiesSet();
return kafkaItemWriter;
}
@Bean
public Step loadCellLines() throws Exception {
return stepBuilderFactory.get("step1")
.<CellVO, CellVO>chunk(chunkSize)
.reader(cellDataReader(pagingQueryProvider()))
.writer(kafkaItemWriter())
.listener(kafkaItemWriteListener)
.build();
}
@Bean
public Job cellLineJob() throws Exception {
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
@Bean("jobParameters")
JobParameters jobParameters() {
JobParameters jobParameters = new JobParametersBuilder()
.addString("jobId", UUID.randomUUID().toString())
.addDate("date", new Date())
.addLong("time", System.currentTimeMillis()).toJobParameters();
return jobParameters;
}
@Scheduled(cron = "0 0 5 * * *")
public Job runCellLineJob() throws Exception {
kafkaItemWriteListener.setItems(new ArrayList<>());
return jobBuilderFactory.get(CELL_LINE_JOB)
.incrementer(new RunIdIncrementer())
.start(loadCellLines())
.build();
}
}
Unfortunately, the test fails with a message that it could not load the application context:
The error is as follows:
Caused by: org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name 'jobLauncherTestUtils':
Unsatisfied dependency expressed through method 'setJob' parameter 0; nested exception is org.springframework.beans.factory.UnsatisfiedDependencyException: Error creating bean with name ‘cellBatchJobConfig': Unsatisfied dependency expressed through constructor parameter 0; nested exception is org.springframework.beans.factory.NoSuchBeanDefinitionException: No qualifying bean of type 'org.springframework.batch.core.configuration.annotation.JobBuilderFactory' available: expected at least 1 bean which qualifies as autowire candidate. Dependency annotations: {}
One thing I did try was to inject the job manually but that did not work: I don't even know why it should be able to find the job if it can find the Job in the actual configuration but not in the test
@Configuration
class JobLaunchUtilsCellLine {
@Autowired
@Qualifier("cellLineJob")
Job cellLineJob;
@Bean
public JobLauncherTestUtils cellLineJobLauncherUtils(){
JobLauncherTestUtils jobLauncherTestUtils = new JobLauncherTestUtils();
jobLauncherTestUtils.setJob(cellLineJob);
return jobLauncherTestUtils;
}
}
I then inject it like this in the Spring Batch test, but it doesn't work:
@Qualifier("cellLineJobLauncherUtils")
@Autowired
JobLauncherTestUtils cellLineJobLauncherUtils;
However, it still complains about JobBuilderFactory bean does not exist
Solution 1:[1]
We encountered the same issue when we added a new scheduled job configuration
How it has been addressed:
- Create the JobLaunchUtils (similar to yours)
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.context.annotation.Bean
class JobSpecConfiguration {
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils()
}
@Bean
JobRepositoryTestUtils getJobRepositoryTestUtils() {
new JobRepositoryTestUtils()
}
}
- Remove the
@SpringBatchTest
and added the needed configuration in the@ContextConfiguration
. We're able to solve the bean depenedency issue upon doing this.
import org.spockframework.spring.SpringBean
import org.springframework.batch.core.Job
import org.springframework.batch.test.JobLauncherTestUtils
import org.springframework.batch.test.JobRepositoryTestUtils
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.beans.factory.annotation.Qualifier
import org.springframework.boot.test.context.TestConfiguration
import org.springframework.context.annotation.Bean
import org.springframework.context.annotation.Primary
import org.springframework.test.annotation.DirtiesContext
import org.springframework.test.context.ActiveProfiles
import org.springframework.test.context.ContextConfiguration
import spock.lang.Specification
@DirtiesContext
@ContextConfiguration(classes = [Application, TestConfig])
@ActiveProfiles(['test', 'kafka'])
class SubmitFilesJobSpec extends Specification {
@Autowired
private JobLauncherTestUtils jobLauncherTestUtils
@Autowired
private JobRepositoryTestUtils jobRepositoryTestUtils
@SpringBean
private SomeRepo someRepo = Mock()
def cleanup() {
jobRepositoryTestUtils.removeJobExecutions()
}
//some unit test that we have
def "Verify batch run"() {
given: "At least 1 Open Record"
def record = defaultData()
someRepo.findTop1ByStatus(_) >> record
when: "A batch job has been triggered"
def jobExecution = jobLauncherTestUtils.launchJob(BaseJobExecution.getJobParameters(null))
then: "Job will be completed with at least 1 persisted/processed record"
2 == jobExecution.getStepExecutions().size()
jobExecution.getStepExecutions().forEach(stepExecution -> {
1 == stepExecution.getWriteCount()
})
"SOME_JOB_NAME" == jobExecution.getJobInstance().getJobName()
"COMPLETED" == jobExecution.getExitStatus().getExitCode()
}
@TestConfiguration
static class TestConfig extends JobSpecConfiguration {
@Override
@Bean
JobLauncherTestUtils getJobLauncherTestUtils() {
new JobLauncherTestUtils() {
@Override
@Autowired
void setJob(@Qualifier("submitFilesJob") Job job) {
super.setJob(job)
}
}
}
}
Sources
This article follows the attribution requirements of Stack Overflow and is licensed under CC BY-SA 3.0.
Source: Stack Overflow
Solution | Source |
---|---|
Solution 1 | czarmayne |