I want to use Spring batch with osgi to run a job daily.
here what i did:
#Component
#EnableBatchProcessing
public class BatchConfiguration {
private JobBuilderFactory jobs;
public JobBuilderFactory getJobs() {
return jobs;
}
public void setJobs(JobBuilderFactory jobs) {
this.jobs = jobs;
}
private StepBuilderFactory steps;
private EmployeeRepository employeeRepository; //spring data repository
public EmployeeRepository getEmployeeRepository() {
return employeeRepository;
}
#Reference
public void setEmployeeRepository(EmployeeRepository employeeRepository) {
employeeRepository= employeeRepository;
}
public Step syncEmployeesStep() throws Exception{
RepositoryItemWriter writer = new RepositoryItemWriter();
writer.setRepository(employeeRepository);
writer.setMethodName("save");
return steps.get("syncEmployeesStep")
.<Employee, Employee> chunk(10)
.reader(reader())
.writer(writer)
.build();
}
public Job importEmpJob()throws Exception {
return jobs.get("importEmpJob")
.incrementer(new RunIdIncrementer())
.start(syncEmployeesStep())
.next(syncEmployeesStep())
.build();
}
public ItemReader<Employee> reader() throws Exception {
String jpqlQuery = "select a from Employee a";
ServerEMF entityManager = new ServerEMF();
JpaPagingItemReader<Employee> reader = new JpaPagingItemReader<Tariff>();
reader.setQueryString(jpqlQuery);
reader.setEntityManagerFactory(entityManager.getEntityManagerFactory());
reader.setPageSize(3);
reader.afterPropertiesSet();
reader.setSaveState(true);
return reader;
}
}
here I want to run this job to sync between two databases,My problem is how to run this job inside osgi.
#EnableScheduling
#Component
public class JobRunner {
private JobLauncher jobLauncher;
private Job job ;
private BatchConfiguration batchConfig;
//private JobBuilderFactory jobs;
//private JobRepository jobrepo;
final static Logger logger = LoggerFactory.getLogger(BatchConfiguration.class);
BundleContext ctx;
#SuppressWarnings("rawtypes")
ServiceTracker servicetracker;
#Activate
public void start(BundleContext context) {
batchConfig = new BatchConfiguration();
//jobs = new JobBuilderFactory(jobRepository)
try {
job = batchConfig.importEmpJob(); //job is null because i don't know how to use it
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ctx = context;
servicetracker= new ServiceTracker(ctx, BatchConfiguration.class, null);
servicetracker.open();
new Thread() {
public void run() { findAndRunJob(); }
}.start();
}
#Deactivate
public void stop() {
configAdminTracker.close();
}
#Scheduled(fixedRate = 5000)
protected void findAndRunJob() {
logger.info("job created.");
try {
String dateParam = new Date().toString();
JobParameters param = new JobParametersBuilder().addString("date", dateParam).toJobParameters();
System.out.println(dateParam);
JobExecution execution = jobLauncher.run(job, param);
System.out.println("Exit Status : " + execution.getStatus());
} catch (Exception e) {
//e.printStackTrace();
}
}
for sure,I got a java.lang.NullPointerException because the job is null.
could anyone help me with that?
after updates
#Component
#EnableBatchProcessing
public class BatchConfiguration {
private EmployeeRepository employeeRepository; //spring data repository
public EmployeeRepository getEmployeeRepository() {
return employeeRepository;
}
#Reference
public void setEmployeeRepository(EmployeeRepository employeeRepository) {
employeeRepository= employeeRepository;
}
public Step syncEmployeesStep() throws Exception{
RepositoryItemWriter writer = new RepositoryItemWriter();
writer.setRepository(employeeRepository);
writer.setMethodName("save");
return steps.get("syncEmployeesStep")
.<Employee, Employee> chunk(10)
.reader(reader())
.writer(writer)
.build();
}
public Job importEmpJob(JobRepository jobRepository, PlatformTransactionManager transactionManager)throws Exception {
JobBuilderFactory jobs= new JobBuilderFactory(jobRepository);
StepBuilderFactory stepBuilderFactory = new StepBuilderFactory(jobRepository, transactionManager);
return jobs.get("importEmpJob")
.incrementer(new RunIdIncrementer())
.start(syncEmployeesStep())
.next(syncEmployeesStep())
.build();
}
public ItemReader<Employee> reader() throws Exception {
String jpqlQuery = "select a from Employee a";
ServerEMF entityManager = new ServerEMF();
JpaPagingItemReader<Employee> reader = new JpaPagingItemReader<Tariff>();
reader.setQueryString(jpqlQuery);
reader.setEntityManagerFactory(entityManager.getEntityManagerFactory());
reader.setPageSize(3);
reader.afterPropertiesSet();
reader.setSaveState(true);
return reader;
}
}
job runner class
private JobLauncher jobLauncher;
private PlatformTransactionManager transactionManager;
private JobRepository jobRepository;
Job importEmpJob;
private BatchConfiguration batchConfig;
#SuppressWarnings("deprecation")
#Activate
public void start(BundleContext context) {
try {
batchConfig = new BatchConfiguration();
this.transactionManager = new ResourcelessTransactionManager();
MapJobRepositoryFactoryBean repositorybean = new MapJobRepositoryFactoryBean();
repositorybean.setTransactionManager(transactionManager);
this.jobRepository = repositorybean.getJobRepository(); //error after executing this statement
// setup job launcher
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setTaskExecutor(new SyncTaskExecutor());
simpleJobLauncher.setJobRepository(jobRepository);
this.jobLauncher = simpleJobLauncher;
//System.out.println(job);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ctx = context;
configAdminTracker = new ServiceTracker(ctx, BatchConfiguration.class.getName(), null);
configAdminTracker.open();
new Thread() {
public void run() { findAndRunJob(); }
}.start();
}
#Deactivate
public void stop() {
configAdminTracker.close();
}
protected void findAndRunJob() {
logger.info("job created.");
try {
String dateParam = new Date().toString();
// creating the job
Job job = batchConfig.importEmpJob(jobRepository, transactionManager);
// running the job
JobExecution execution = this.jobLauncher.run(job, new JobParameters());
System.out.println("Exit Status : " + execution.getStatus());
} catch (Exception e) {
//e.printStackTrace();
}
}
what i getting is "java.lang.IllegalArgumentException: interface org.springframework.batch.core.repository.JobRepository is not visible from class loader" after running .could anyone help me with that error?
In Short
If you're just trying to kick off a something simple and don't need all the Spring Batch goodness I would look into the Apache Sling Commons Scheduler which has a simple job processor on top of Quartz for scheduling[1].
In General
There are a couple of considerations here depending on what you are trying to do. Are you deploying the Spring Batch Jars to the OSGi container with the assumption that the code that is written for the jobs (steps, tasks, etc) will live in separate bundles? OSGi's purpose is to develop modular code so my answer is assuming that this is your end goal. The folks at Pivotal have dropped requiring support for OSGi on their artifacts so to make it work you'll need to determine what you need to export from the Batch jar files. This can be done with BND. I would recommend checking out the new BND Maven plugin [2]. I would configure the Export-Package to export the interfaces you need to write the jobs so that you can write the jobs in separate modular bundles. Then I would probably embed the spring batch jars in a bundle and write a small wrapper around the JobLauncher. This should contain all the actual batch code to a single classloader so that you don't have to worry about OSGi trying to pull in classes dynamically. The downside is that this will prevent you from using many of the batch annotations outside of the spring batch bundle you created but will provide the modularity that you'd be looking for by implementing this type of solution with OSGi.
[1] https://sling.apache.org/documentation/bundles/apache-sling-eventing-and-job-handling.html
[2] http://njbartlett.name/2015/03/27/announcing-bnd-maven-plugin.html
Related
I tried to implement an example of Spring Boot Batch Processing from db to csv.
The issue which I cannot solve is based on not sorting all values by id in csv file as well as showing column titles.
Here is the output in csv file. (First value is related with id)
3,9,2013-04-15,Japan,jheino3#mayoclinic.com,Jard,MALE,Heino,87cdda81-45d0-451a-a62f-f8450eae1b64
2,23,1999-01-25,Panama,nloynes2#woothemes.com,Natala,FEMALE,Loynes,24be24e6-525f-42de-855d-52d4fef21608
6,9,2013-02-16,China,rcossans4#harvard.edu,Roseline,FEMALE,Cossans,06f70b0d-2c98-4f46-b933-528499ab91b3
4,8,2014-05-09,Indonesia,jcarlaw1#t.co,Jilleen,FEMALE,Carlaw,c722e6d5-9024-49c5-80e0-c2555f1eb9cc
1,22,2000-08-15,China,gspearing0#flickr.com,Ginnie,FEMALE,Spearing,fa26fa96-97d3-4e8e-856a-fdf07499e13e
5,22,2000-03-18,Indonesia,rgillino6#china.com.cn,Rainer,MALE,Gillino,5302a199-f313-4a24-9550-d643001d9faf
I want all values are sorted by id.
How can I do that?
Here is the batch configuration file shown below.
#Configuration // Informs Spring that this class contains configurations
#EnableBatchProcessing // Enables batch processing for the application
#RequiredArgsConstructor
public class BatchConfiguration {
private final JobBuilderFactory jobBuilderFactory;
private final StepBuilderFactory stepBuilderFactory;
private final UserRepository userRepository;
Date now = new Date(); // java.util.Date, NOT java.sql.Date or java.sql.Timestamp!
String format1 = new SimpleDateFormat("yyyy-MM-dd'-'HH-mm-ss-SSS",Locale.forLanguageTag("tr-TR")).format(now);
private Resource outputResource = new FileSystemResource("output/customers_" + format1 + ".csv");
#Bean
public RepositoryItemReader<User> reader(){
RepositoryItemReader<User> repositoryItemReader = new RepositoryItemReader<>();
repositoryItemReader.setRepository(userRepository);
repositoryItemReader.setMethodName("findAll");
final HashMap<String, Sort.Direction> sorts = new HashMap<>();
sorts.put("id", Sort.Direction.ASC);
repositoryItemReader.setSort(sorts);
return repositoryItemReader;
}
#Bean
public FlatFileItemWriter<User> writer() {
FlatFileItemWriter<User> writer = new FlatFileItemWriter<>();
writer.setResource(outputResource);
writer.setAppendAllowed(true);
writer.setLineAggregator(new DelimitedLineAggregator<User>() {
{
setDelimiter(",");
setFieldExtractor(new BeanWrapperFieldExtractor<User>() {
{
setNames(new String[]{"id", "age", "birthday", "country", "email", "firstName", "gender", "lastName", "personId"});
}
});
}
});
return writer;
}
#Bean
public UserProcessor processor() {
return new UserProcessor();
}
#Bean
public UserJobExecutionNotificationListener stepExecutionListener() {
return new UserJobExecutionNotificationListener(userRepository);
}
#Bean
public UserStepCompleteNotificationListener jobExecutionListener() {
return new UserStepCompleteNotificationListener();
}
#Bean
public Step step1() {
return stepBuilderFactory.get("csv-step").<User, User>chunk(10)
.reader(reader())
.processor(processor())
.writer(writer())
.listener(stepExecutionListener())
.taskExecutor(taskExecutor())
.build();
}
#Bean
public Job runJob() {
return jobBuilderFactory.get("importuserjob")
.listener(jobExecutionListener())
.flow(step1()).end().build();
}
#Bean
public TaskExecutor taskExecutor() {
SimpleAsyncTaskExecutor asyncTaskExecutor = new SimpleAsyncTaskExecutor();
asyncTaskExecutor.setConcurrencyLimit(10);
return asyncTaskExecutor;
}
}
Here is the link of example : Link
After I added FlatFileHeaderCallback into FlatFileItemWriter, I fixed the issue.
Here is the code snippets shown below.
writer.setHeaderCallback(new FlatFileHeaderCallback() {
#Override
public void writeHeader(Writer writer) throws IOException {
for(int i=0;i<headers.length;i++){
if(i!=headers.length-1)
writer.append(headers[i] + ",");
else
writer.append(headers[i]);
}
}
});
I have a multi module maven project with several modules (parent, service, updater1, updater2).
The #SpringBootApplication is in 'service' module and the others doesn't have artifacts.
'updater1' is a module which have a Kafka listener and a http client, and when receives a kafka event launches a request to an external API. I want to create integration tests in this module with testcontainers, so I've created the containers and a Kafka producer to send a KafkaTemplate to my consumer.
My problem is the Kafka producer is autowiring to null, so the tests throws a NullPointerException. I think it should be a Spring configuration problem, but I can't find the problem. Can you help me? Thank's!
This is my test class:
#ExtendWith(SpringExtension.class)
#ContextConfiguration(classes = {KafkaConfiguration.class, CacheConfiguration.class, ClientConfiguration.class})
public class InvoicingTest {
#ClassRule
public static final Containers containers = Containers.Builder.aContainer()
.withKafka()
.withServer()
.build();
private final MockHttpClient mockHttpClient =
new MockHttpClient(containers.getHost(SERVER),
containers.getPort(SERVER));
#Autowired
private KafkaEventProducer kafkaEventProducer;
#BeforeEach
#Transactional
void setUp() {
mockHttpClient.reset();
}
#Test
public void createElementSuccesfullResponse() throws ExecutionException, InterruptedException, TimeoutException {
mockHttpClient.whenPost("/v1/endpoint")
.respond(HttpStatusCode.OK_200);
kafkaEventProducer.produce("src/test/resources/event/invoiceCreated.json");
mockHttpClient.verify();
}
And this is the event producer:
#Component
public class KafkaEventProducer {
private final KafkaTemplate<String, String> kafkaTemplate;
private final String topic;
#Autowired
KafkaInvoicingEventProducer(KafkaTemplate<String, String> kafkaTemplate,
#Value("${kafka.topic.invoicing.name}") String topic){
this.kafkaTemplate = kafkaTemplate;
this.topic = topic;
}
public void produce(String event){
kafkaTemplate.send(topic, event);
}
}
You haven't detailed how KafkaEventProducer is implemented (is it a #Component?), neither your test class is annotated with #SpringBootTest and the runner #RunWith.
Check out this sample, using Apache KakfaProducer:
import org.apache.kafka.clients.producer.KafkaProducer;
public void sendRecord(String topic, String event) {
try (KafkaProducer<String, byte[]> producer = new KafkaProducer<>(producerProps(bootstrapServers, false))) {
send(producer, topic, event);
}
}
where
public void send(KafkaProducer<String, byte[]> producer, String topic, String event) {
try {
ProducerRecord<String, byte[]> record = new ProducerRecord<>(topic, event.getBytes());
producer.send(record).get();
} catch (InterruptedException | ExecutionException e) {
fail("Not expected exception: " + e.getMessage());
}
}
protected Properties producerProps(String bootstrapServer, boolean transactional) {
Properties producerProperties = new Properties();
producerProperties.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServer);
producerProperties.put(KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class.getName());
producerProperties.put(VALUE_SERIALIZER_CLASS_CONFIG, ByteArraySerializer.class.getName());
if (transactional) {
producerProperties.put(TRANSACTIONAL_ID_CONFIG, "my-transactional-id");
}
return producerProperties;
}
and bootstrapServers is taken from kafka container:
KafkaContainer kafka = new KafkaContainer();
kafka.start();
bootstrapServers = kafka.getBootstrapServers();
I am using both Spring Boot and Batch in a maven multi-module project for parsing CSV files and storing data in a MySQL database.
When running the batch module using my BatchLauncher class (shared below) I get a BeanCurrentlyInCreationException caused by getDataBase() which I use for configuring my MySQL database. (click this link to see logs)
And when I remove this method Spring Boot choose automatically an embedded database of type H2 (link for logs)
BatchLauncher class :
#Slf4j
public class BatchLauncher {
public static void main(String[] args) {
try {
Launcher.launchWithConfig("My Batch", BatchConfig.class, false);
}catch (Exception ex) {
log.error(ex.getMessage());
}
}
}
Launcher class :
#Slf4j
public class Launcher {
private Launcher() {}
public static void launchWithConfig(String batchName, Class<?> configClass, boolean oncePerDayMax) throws JobExecutionException, BatchException {
try {
// Check the spring profiles used
log.info("Start batch \"" + batchName + "\" with profiles : " + System.getProperty("spring.profiles.active"));
// Load configuration
#SuppressWarnings("resource")
AnnotationConfigApplicationContext context = new AnnotationConfigApplicationContext(configClass);
JobLauncher jobLauncher = context.getBean(JobLauncher.class);
Job job = context.getBean(Job.class);
//Authorize only one execution of each job per day
JobParameters jobParameters = new JobParameters();
JobExecution execution = jobLauncher.run(job, jobParameters);
if(!BatchStatus.COMPLETED.equals(execution.getStatus())) {
throw new BatchException("Unknown error while executing batch : " + batchName);
}
}catch (Exception ex){
log.error("Exception",ex);
throw new BatchException(ex.getMessage());
}
}
}
BatchConfig class :
#Slf4j
#Configuration
#EnableAutoConfiguration(exclude = {DataSourceAutoConfiguration.class, DataSourceTransactionManagerAutoConfiguration.class, HibernateJpaAutoConfiguration.class})
#EnableBatchProcessing
#ComponentScan(basePackages = {
"fr.payet.flad.batch.tasklet",
"fr.payet.flad.batch.mapper"
})
#Import({CoreConfig.class})
public class BatchConfig {
private StepBuilderFactory steps;
private JobBuilderFactory jobBuilderFactory;
private ReadInputTasklet readInputTasklet;
public BatchConfig(StepBuilderFactory steps, JobBuilderFactory jobBuilderFactory, ReadInputTasklet readInputTasklet) {
this.steps = steps;
this.jobBuilderFactory = jobBuilderFactory;
this.readInputTasklet = readInputTasklet;
}
#Bean
public DataSource getDataBase(){
return DataSourceBuilder
.create()
.driverClassName("com.mysql.jdbc.Driver")
.url("jdbc:mysql://localhost:3306/myDb?useSSL=false")
.username("myuser")
.password("mypwd")
.build();
}
#Bean
public Step readInputStep() {
return steps.get("readInputStep")
.tasklet(readInputTasklet)
.build();
}
#Bean
public Job readCsvJob() {
return jobBuilderFactory.get("readCsvJob")
.incrementer(new RunIdIncrementer())
.flow(readInputStep())
.end()
.build();
}
}
The solution was to create a custom DataSourceConfiguration class annotated with #Configuration in which I set my own database like this :
#Bean
public DataSource getDataBase(){
return DataSourceBuilder
.create()
.driverClassName("com.mysql.jdbc.Driver")
.url("jdbc:mysql://localhost:3306/myDB?useSSL=false")
.username("myUser")
.password("myPwd")
.build();
}
I am trying to build a spring batch application where the batch job is built dynamically (not spring managed beans) and launched using JobLauncher. The job is built based on source file and few other information like target store etc... Based on these details I have to build a Job with corresponding reader/writer.
I am able to build and launch synchronous as well as multi threaded job successfully. I am trying scale up the application to handle large files using Partition SPI. But I am not able find a way to pass correct partition to the step.
Because in normal application StepScope annotation is used so spring creates a separate reader for each Step. And late binding (#Value) helps to pass the StepExecution (filePath) information to reader.
Is there any way to achieve my use case without using Step scope?
class CustomJobBuilder {
//JobInfo contains table name, source file etc...
Job build(JobInfo jobInfo) throws Exception {
return jobBuilderFactory
.get(jobInfo.getName())
.start(masterStep())
.build();
}
private Step masterStep() throws Exception {
Step importFileStep = importFileStep();
return stepBuilderFactory
.get("masterStep")
.partitioner(importFileStep.getName(), partitioner())
.step(importFileStep)
.gridSize(6)
.taskExecutor(new SimpleAsyncTaskExecutor())
.build();
}
private MultiResourcePartitioner partitioner() throws IOException {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
partitioner.setKeyName(PARTITION_KEY_NAME);
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
partitioner.setResources(patternResolver.getResources(jobInfo.getFilePath())); //*.csv
return partitioner;
}
private Step importFileStep() throws Exception {
JdbcBatchItemWriter<Row> successRecordsWriter = dbWriter();
FlatFileItemWriter<Row> failedRecordsWriter = errorWriter();
return stepBuilderFactory
.get("importFile")
.<Row, Row>chunk(CHUNK_SIZE)
.reader(csvReader(null))
.processor(processor())
.writer(writer(successRecordsWriter, failedRecordsWriter))
.stream(failedRecordsWriter)
.build();
}
//Problem here. Passing filePath to CSV Reader dynamically
private ItemReader<Row> csvReader(#Value("#{stepExecutionContext['" + PARTITION_KEY_NAME + "']}") String filePath) {
DefaultLineMapper<Row> lineMapper = new DefaultLineMapper<>();
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
tokenizer.setNames(jobInfo.getColumns());
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(new CustomFieldSetMapper(jobInfo.getColumns()));
lineMapper.afterPropertiesSet();
FlatFileItemReader<Row> reader = new FlatFileItemReader<>();
reader.setLinesToSkip(1);
reader.setResource(new FileSystemResource(filePath));
reader.setLineMapper(lineMapper);
return reader;
}
}
class CustomJobLauncher {
JobParameters jobParameters = new JobParametersBuilder()
.addString("id", UUID.randomUUID().toString())
.toJobParameters();
JobExecution jobExecution;
try {
CustomJobBuilder jobBuilder = new CustomJobBuilder();
jobBuilder.setJobBuilderFactory(jobBuilderFactory);
jobBuilder.setDataSource(getDataSource(objectDto.getDataStore()));
jobBuilder.setStepBuilderFactory(stepBuilderFactory);
jobExecution = jobLauncher.run(jobBuilder.build(jobInfo), jobParameters);
jobExecution.getAllFailureExceptions().forEach(Throwable::printStackTrace);
} catch (Exception e) {
LOGGER.error("Failed", e);
}
}
I have solved the problem by mimicing
MessageChannelRemotePartitionHandler and StepExecutionRequestHandler.
Instead of relying on BeanFactoryStepLocator to get the step from the
beanFactory, I have re-constructed the step on the slave and executed
it.
You have to have to cautious about constructing new Step because it has to be exactly same on all slaves other it would lead to processing/writing inconsistencies.
// PartitionHandler - partition method
public Collection<StepExecution> handle(StepExecutionSplitter stepExecutionSplitter,
final StepExecution masterStepExecution) throws Exception {
final Set<StepExecution> split = stepExecutionSplitter.split(masterStepExecution, gridSize);
if(CollectionUtils.isEmpty(split)) {
return null;
}
int count = 0;
for (StepExecution stepExecution : split) {
Message<PartitionExecutionRequest> request = createMessage(count++, split.size(),
new PartitionExecutionRequest(stepExecution.getJobExecutionId(), stepExecution.getId(), RequestContextProvider.getRequestInfo(), jobInfo, object),
replyChannel);
if (logger.isDebugEnabled()) {
logger.debug("Sending request: " + request);
}
messagingGateway.send(request);
}
if(!pollRepositoryForResults) {
return receiveReplies(replyChannel);
}
else {
return pollReplies(masterStepExecution, split);
}
}
//On the slave
#MessageEndpoint
public class PartitionExecutionRequestHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(PartitionExecutionRequestHandler.class);
private BatchBeanProvider batchBeanProvider;
public void setBatchBeanProvider(BatchBeanProvider batchBeanProvider) {
this.batchBeanProvider = batchBeanProvider;
}
#ServiceActivator
public StepExecution handle(PartitionExecutionRequest request) {
StepExecution stepExecution = null;
try {
before(request);
Long jobExecutionId = request.getJobExecutionId();
Long stepExecutionId = request.getStepExecutionId();
stepExecution = batchBeanProvider.getJobExplorer().getStepExecution(jobExecutionId, stepExecutionId);
if (stepExecution == null) {
throw new NoSuchStepException("No StepExecution could be located for this request: " + request);
}
try {
CustomJobCreator jobCreator = new CustomJobCreator(batchBeanProvider, request.getJobInfo(), request.getObject());
jobCreator.afterPropertiesSet();
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
Resource resource = patternResolver.getResource(stepExecution.getExecutionContext().getString(CustomJobCreator.PARTITION_KEY_NAME));
Step step = jobCreator.partitionStep(resource.getFile().getAbsolutePath());
step.execute(stepExecution);
} catch (JobInterruptedException e) {
stepExecution.setStatus(BatchStatus.STOPPED);
// The receiver should update the stepExecution in repository
} catch (Throwable e) {
stepExecution.addFailureException(e);
stepExecution.setStatus(BatchStatus.FAILED);
// The receiver should update the stepExecution in repository
}
}
return stepExecution;
}
}
I followed this sample for Spring Batch with Boot.
When you run the main method the job is executed.
This way I can't figure out how one can control the job execution. For example how you schedule a job, or get access to the job execution, or set job parameters.
I tried to register my own JobLauncher
#Bean
public JobLauncher jobLauncher(JobRepository jobRepo){
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepo);
return simpleJobLauncher;
}
but when I try to use it in the main method:
public static void main(String[] args) {
ConfigurableApplicationContext ctx = SpringApplication.run(Application.class, args);
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
//try catch removed for readability
jobLauncher.run(ctx.getBean(Job.class), new JobParameters());
}
The job is again executed when the context is loaded and I got JobInstanceAlreadyCompleteException when I try to run it manually.
Is there a way to prevent the automatic job execution?
The jobs execution can be prevented by setting
spring.batch.job.enabled=false
in application.properties. Or you can use spring.batch.job.names it takes a comma-delimited list of job names that will be run.
Taken from here: how to stop spring batch scheduled jobs from running at first time when executing the code?
You can enable the execution of a Job using rest controller POST:
#RestController
#RequestMapping(value="/job/")
public class JobLauncherController {
private static final Log LOG = LogFactory.getLog(JobLauncherController.class);
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
#Autowired
private JobRepository jobRepository;
#Autowired
private JobRegistry jobRegistry;
#RequestMapping("/launchjob/{jobName}")
public String handle(#PathVariable("jobName") String jobName, #RequestBody Map<String,Object> request) throws Exception {
try {
request.put("timeJobStarted", DateUtil.getDateFormatted(new Date(), DateUtil.DATE_UUUUMMDDHHMMSS));
Map<String,Object> mapMessage = this.enrichJobMessage(request);
Map<String, JobParameter> jobParameters = new HashMap<>();
mapMessage.forEach((k,v)->{
MapperUtil.castParameter(jobParameters, k, v);
});
jobParameters.put(Field.Batch.JOB_INSTANCE_NAME, new JobParameter(jobName));
jobLauncher.run(job, new JobParameters(jobParameters));
assertNotNull(jobRegistry.getJob(job.getName()));
}catch( NoSuchJobException ex){
jobRegistry.register(new ReferenceJobFactory(job));
} catch (Exception e) {
LOG.error(e.getMessage(),e);
}
return "Done";
}
public static void castParameter(Map<String, JobParameter> jobParameters, String k, Object v){
if(v instanceof String){
jobParameters.put(k, new JobParameter((String)v));
}else if(v instanceof Date){
jobParameters.put(k, new JobParameter((Date)v));
}else if(v instanceof Double){
jobParameters.put(k, new JobParameter((Double)v));
}else if(v instanceof Long){
jobParameters.put(k, new JobParameter((Long)v));
}else{
DslJson dslJson = new DslJson<>();
JsonWriter writer = dslJson.newWriter();
try {
dslJson.serialize(writer,v);
jobParameters.put(k, new JobParameter(writer.toString()));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
}
}
}
}