Spring Batch - Custom Job - Dynamically Passing Partition FileName - java

I am trying to build a spring batch application where the batch job is built dynamically (not spring managed beans) and launched using JobLauncher. The job is built based on source file and few other information like target store etc... Based on these details I have to build a Job with corresponding reader/writer.
I am able to build and launch synchronous as well as multi threaded job successfully. I am trying scale up the application to handle large files using Partition SPI. But I am not able find a way to pass correct partition to the step.
Because in normal application StepScope annotation is used so spring creates a separate reader for each Step. And late binding (#Value) helps to pass the StepExecution (filePath) information to reader.
Is there any way to achieve my use case without using Step scope?
​class CustomJobBuilder {
​//JobInfo contains table name, source file etc...
​Job build(JobInfo jobInfo) throws Exception {
return jobBuilderFactory
.get(jobInfo.getName())
.start(masterStep())
.build();
}
private Step masterStep() throws Exception {
Step importFileStep = importFileStep();
return stepBuilderFactory
.get("masterStep")
.partitioner(importFileStep.getName(), partitioner())
.step(importFileStep)
.gridSize(6)
.taskExecutor(new SimpleAsyncTaskExecutor())
.build();
}
private MultiResourcePartitioner partitioner() throws IOException {
MultiResourcePartitioner partitioner = new MultiResourcePartitioner();
partitioner.setKeyName(PARTITION_KEY_NAME);
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
partitioner.setResources(patternResolver.getResources(jobInfo.getFilePath())); //*.csv
return partitioner;
}
private Step importFileStep() throws Exception {
JdbcBatchItemWriter<Row> successRecordsWriter = dbWriter();
FlatFileItemWriter<Row> failedRecordsWriter = errorWriter();
return stepBuilderFactory
.get("importFile")
.<Row, Row>chunk(CHUNK_SIZE)
.reader(csvReader(null))
.processor(processor())
.writer(writer(successRecordsWriter, failedRecordsWriter))
.stream(failedRecordsWriter)
.build();
}
//Problem here. Passing filePath to CSV Reader dynamically
private ItemReader<Row> csvReader(#Value("#{stepExecutionContext['" + PARTITION_KEY_NAME + "']}") String filePath) {
DefaultLineMapper<Row> lineMapper = new DefaultLineMapper<>();
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer();
tokenizer.setNames(jobInfo.getColumns());
lineMapper.setLineTokenizer(tokenizer);
lineMapper.setFieldSetMapper(new CustomFieldSetMapper(jobInfo.getColumns()));
lineMapper.afterPropertiesSet();
FlatFileItemReader<Row> reader = new FlatFileItemReader<>();
reader.setLinesToSkip(1);
reader.setResource(new FileSystemResource(filePath));
reader.setLineMapper(lineMapper);
return reader;
}
​}
​class CustomJobLauncher {
JobParameters jobParameters = new JobParametersBuilder()
.addString("id", UUID.randomUUID().toString())
.toJobParameters();
JobExecution jobExecution;
try {
CustomJobBuilder jobBuilder = new CustomJobBuilder();
jobBuilder.setJobBuilderFactory(jobBuilderFactory);
jobBuilder.setDataSource(getDataSource(objectDto.getDataStore()));
jobBuilder.setStepBuilderFactory(stepBuilderFactory);
jobExecution = jobLauncher.run(jobBuilder.build(jobInfo), jobParameters);
jobExecution.getAllFailureExceptions().forEach(Throwable::printStackTrace);
} catch (Exception e) {
LOGGER.error("Failed", e);
}
}

I have solved the problem by mimicing
MessageChannelRemotePartitionHandler and StepExecutionRequestHandler.
Instead of relying on BeanFactoryStepLocator to get the step from the
beanFactory, I have re-constructed the step on the slave and executed
it.
You have to have to cautious about constructing new Step because it has to be exactly same on all slaves other it would lead to processing/writing inconsistencies.
// PartitionHandler - partition method
public Collection<StepExecution> handle(StepExecutionSplitter stepExecutionSplitter,
final StepExecution masterStepExecution) throws Exception {
final Set<StepExecution> split = stepExecutionSplitter.split(masterStepExecution, gridSize);
if(CollectionUtils.isEmpty(split)) {
return null;
}
int count = 0;
for (StepExecution stepExecution : split) {
Message<PartitionExecutionRequest> request = createMessage(count++, split.size(),
new PartitionExecutionRequest(stepExecution.getJobExecutionId(), stepExecution.getId(), RequestContextProvider.getRequestInfo(), jobInfo, object),
replyChannel);
if (logger.isDebugEnabled()) {
logger.debug("Sending request: " + request);
}
messagingGateway.send(request);
}
if(!pollRepositoryForResults) {
return receiveReplies(replyChannel);
}
else {
return pollReplies(masterStepExecution, split);
}
}
//On the slave
#MessageEndpoint
public class PartitionExecutionRequestHandler {
private static final Logger LOGGER = LoggerFactory.getLogger(PartitionExecutionRequestHandler.class);
private BatchBeanProvider batchBeanProvider;
public void setBatchBeanProvider(BatchBeanProvider batchBeanProvider) {
this.batchBeanProvider = batchBeanProvider;
}
#ServiceActivator
public StepExecution handle(PartitionExecutionRequest request) {
StepExecution stepExecution = null;
try {
before(request);
Long jobExecutionId = request.getJobExecutionId();
Long stepExecutionId = request.getStepExecutionId();
stepExecution = batchBeanProvider.getJobExplorer().getStepExecution(jobExecutionId, stepExecutionId);
if (stepExecution == null) {
throw new NoSuchStepException("No StepExecution could be located for this request: " + request);
}
try {
CustomJobCreator jobCreator = new CustomJobCreator(batchBeanProvider, request.getJobInfo(), request.getObject());
jobCreator.afterPropertiesSet();
ResourcePatternResolver patternResolver = new PathMatchingResourcePatternResolver();
Resource resource = patternResolver.getResource(stepExecution.getExecutionContext().getString(CustomJobCreator.PARTITION_KEY_NAME));
Step step = jobCreator.partitionStep(resource.getFile().getAbsolutePath());
step.execute(stepExecution);
} catch (JobInterruptedException e) {
stepExecution.setStatus(BatchStatus.STOPPED);
// The receiver should update the stepExecution in repository
} catch (Throwable e) {
stepExecution.addFailureException(e);
stepExecution.setStatus(BatchStatus.FAILED);
// The receiver should update the stepExecution in repository
}
}
return stepExecution;
}
}

Related

Remove file via SftpOutboundGateway after processing file

I'm using Spring Integration to read files from a SFTP Server and everything works fine using an InboundChannelAdapter with Java Configuration.
Now, i want to modify my process in order to remove all processed files from SFTP Server. Therefore I want to use an SFTP OutboundGateway with Java Configuration. This is my new code with few modifications based on https://docs.spring.io/spring-integration/docs/5.0.0.BUILD-SNAPSHOT/reference/html/sftp.html#sftp-outbound-gateway:
#Configuration
public class SftpConfiguration {
#Value("${sftp.host}")
String sftpHost = "";
#Value("${sftp.user}")
String sftpUser = "";
#Value("${sftp.pass}")
String sftpPass = "";
#Value("${sftp.port}")
Integer sftpPort = 0;
#Bean
public SessionFactory<LsEntry> sftpSessionFactory() {
DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(true);
factory.setHost(sftpHost);
factory.setPort(sftpPort);
factory.setUser(sftpUser);
factory.setPassword(sftpPass);
factory.setAllowUnknownKeys(true);
return new CachingSessionFactory<LsEntry>(factory);
}
#Bean
public SftpInboundFileSynchronizer sftpInboundFileSynchronizer() {
SftpInboundFileSynchronizer fileSynchronizer = new SftpInboundFileSynchronizer(sftpSessionFactory());
fileSynchronizer.setDeleteRemoteFiles(false);
fileSynchronizer.setRemoteDirectory("/upload/");
return fileSynchronizer;
}
#Bean
#InboundChannelAdapter(channel = "sftpChannel", poller = #Poller(cron = "0 * * * * ?"))
public MessageSource<File> sftpMessageSource() {
SftpInboundFileSynchronizingMessageSource source =
new SftpInboundFileSynchronizingMessageSource(sftpInboundFileSynchronizer());
source.setLocalDirectory(new File("sftp-folder"));
source.setAutoCreateLocalDirectory(true);
return source;
}
#Bean
#ServiceActivator(inputChannel = "sftpChannel")
MessageHandler handler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
File f = (File) message.getPayload();
try {
myProcessingClass.processFile(f);
SftpOutboundGateway sftpOG = new SftpOutboundGateway(sftpSessionFactory(), "rm",
"'/upload/" + f.getName() + "'");
} catch(QuoCrmException e) {
logger.error("File [ Process with errors, file won't be deleted: " + e.getMessage() + "]");
}
}
};
}
}
Modifications are:
I defined my fileSynchronizer to setDeleteRemoteFiles(false), in order to remove files manually according to my process.
In my MessageHandler, I added my SFTPOutboundGateway and if there is no exception, it means the processing was successful and removes the file (but if there is an exception won't delete the file).
This code is not removing any file. Any suggestions?
You shouldn't create a new gateway for each request (which is what you are doing).
You are not doing anything with sftpOG after you create it anyway; you need to send a message to the gateway.
You can create a reply-producing handler and wire it's output channel to the gateway (which should be its own #Bean).
Or, you can simply use an SftpRemoteFileTemplate to remove the file - but again, you only need one, you don't need to create a new one for each request.

Spring-batch : How to run processor with multi thread of ItemProcessor in spring batch?

Now my previous usage scenarios is like below:
using FlatFileItemReader read input stream with .txt file line by line
using ItemProcessor process per line data to invoke remote service with http
using FlatFileItemWriter write result of per request into the file
I would like to process remote calling with multi thread with ItemProcessor in step 2
Main flow code like below (with spring boot):
//read data
FlatFileItemReader<ItemProcessing> reader = read(batchReqRun);
//process data
ItemProcessor<ItemProcessing, ItemProcessing> processor = process(batchReqDef);
//write data
File localOutputDir = new File(localStoragePath+"/batch-results");
File localOutputFile = new File(localOutputDir, batchReqExec.getDestFile());
FlatFileItemWriter<ItemProcessing> writer = write(localOutputDir,localOutputFile);
StepExecutionListener stepExecListener = new StepExecutionListener() {
#Override
public void beforeStep(StepExecution stepExecution) {
logger.info("Job {} step start {}",stepExecution.getJobExecutionId(), stepExecution.getStepName());
}
#Override
public ExitStatus afterStep(StepExecution stepExecution) {
logger.info("Job {} step end {}",stepExecution.getJobExecutionId(), stepExecution.getStepName());
//.......ingore some code
return finalStatus;
}
};
Tasklet resultFileTasklet = new BatchFileResultTasklet(localOutputFile, httpClientService);
TaskletStep resultFileStep = stepBuilder.get("result")
.tasklet(resultFileTasklet)
.listener(stepExecListener)
.build();
//create step
Step mainStep = stepBuilder.get("run")
.<ItemProcessing, ItemProcessing>chunk(5)
.faultTolerant()
.skip(IOException.class).skip(SocketTimeoutException.class)//skip IOException here
.skipLimit(2000)
.reader(reader)
.processor(processor)
.writer(writer)
.listener(stepExecListener)
.listener(new ItemProcessorListener()) //add process listener
.listener(skipExceptionListener) //add skip exception listner
.build();
//create job
Job job = jobBuilder.get(batchReqExec.getId())
.start(mainStep)
.next(resultFileStep)
.build();
JobParametersBuilder jobParamBuilder = new JobParametersBuilder();
//run job
JobExecution execution = jobLauncher.run(job, jobParamBuilder.toJobParameters());
read data like below:
private FlatFileItemReader<ItemProcessing> read(BatchRequestsRun batchReqRun) throws Exception {
//prepare input file
File localInputDir = new File(localStoragePath+"/batch-requests");
if(!localInputDir.exists() || localInputDir.isFile()) {
localInputDir.mkdir();
}
File localFile = new File(localInputDir, batchReqRun.getFileRef()+"-"+batchReqRun.getFile());
if(!localFile.exists()) {
httpClientService.getFileFromStorage(batchReqRun.getFileRef(), localFile);
}
FlatFileItemReader<ItemProcessing> reader = new FlatFileItemReader<ItemProcessing>();
reader.setResource(new FileSystemResource(localFile));
reader.setLineMapper(new DefaultLineMapper<ItemProcessing>() {
{
setLineTokenizer(new DelimitedLineTokenizer());
setFieldSetMapper(new FieldSetMapper<ItemProcessing>() {
#Override
public ItemProcessing mapFieldSet(FieldSet fieldSet) throws BindException {
ItemProcessing item = new ItemProcessing();
item.setFieldSet(fieldSet);
return item;
}
});
}
});
return reader;
}
process data like below:
private ItemProcessor<ItemProcessing, ItemProcessing> process(BatchRequestsDef batchReqDef) {
ItemProcessor<ItemProcessing, ItemProcessing> processor = (input) -> {
VelocityContext context = new VelocityContext();
//.....ingore velocity code
String responseBody = null;
//send http invoking
input.setResponseBody(httpClientService.process(batchReqDef, input));
responseBody = input.getResponseBody();
logger.info(responseBody);
// using Groovy to parse response
Binding binding = new Binding();
try {
binding.setVariable("response", responseBody);
GroovyShell shell = new GroovyShell(binding);
Object result = shell.evaluate(batchReqDef.getConfig().getResponseHandler());
input.setResult(result.toString());
} catch(Exception e) {
logger.error("parse groovy script found exception:{},{}",e.getMessage(),e);
}
return input;
};
return processor;
}
Ignore writing file method here.
Who can help me to implement process method with multi thread ?
I guess spring batch read one line data and then process one line (execute ItemProcessor to invoke remote service directly)
As We known, the speed of read one line data much more than invoking http service one time.
So I want to read all data(or some part data) into memory (List) with single thread,and then invoke remote call with multi thread in step 2.
(It's very easy with using java thread pool ,but i don't known implementation with spring batch)
Please show me some code, thanks a lot!

Spring Batch Integration Remote Chunking error - Message contained wrong job instance id [25] should have been [24]

I'm running into this bug (more info here) which appears to mean that for multi-threaded batches using remote chunking you can't use a common response channel. I'm not exactly sure how to proceed to get this working. Surely there's a way to get this working, because without it I can't see much benefit to remote chunking.
Here's my DSL config that creates a JobRequest:
#Bean
IntegrationFlow newPollingJobsAdapter(JobLaunchingGateway jobLaunchingGateway) {
// Start by polling the DB for new PollingJobs according to the polling rate
return IntegrationFlows.from(jdbcPollingChannelAdapter(),
c -> c.poller(Pollers.fixedRate(10000)
// Do the polling on one of 10 threads.
.taskExecutor(Executors.newFixedThreadPool(10))
// pull out up to 100 new ids for each poll.
.maxMessagesPerPoll(100)))
.log(LoggingHandler.Level.WARN)
// The polling adapter above returns a list of ids. Split them out into
// individual ids
.split()
// Now push each one onto a separate thread for batch processing.
.channel(MessageChannels.executor(Executors.newFixedThreadPool(10)))
.log(LoggingHandler.Level.WARN)
// Transform each one into a JobLaunchRequest
.<Long, JobLaunchRequest>transform(id -> {
logger.warn("Creating job for ID {}", id);
JobParametersBuilder builder = new JobParametersBuilder()
.addLong("polling-job-id", id, true);
return new JobLaunchRequest(job, builder.toJobParameters());
})
.handle(jobLaunchingGateway)
// TODO: Notify somebody? No idea yet
.<JobExecution>handle(exec -> System.out.println("GOT EXECUTION: " + exec))
.get();
}
Nothing in here is particularly special, no odd configs that I'm aware of.
The job itself is pretty straight-forward, too:
/**
* This is the definition of the entire batch process that runs polling.
* #return
*/
#Bean
Job pollingJobJob() {
return jobBuilderFactory.get("pollingJobJob")
.incrementer(new RunIdIncrementer())
// Ship it down to the slaves for actual processing
.start(remoteChunkingStep())
// Now mark it as complete
.next(markCompleteStep())
.build();
}
/**
* Sends the job to a remote slave via an ActiveMQ-backed JMS queue.
*/
#Bean
TaskletStep remoteChunkingStep() {
return stepBuilderFactory.get("polling-job-step-remote-chunking")
.<Long, String>chunk(20)
.reader(runningPollingJobItemReader)
.processor(toJsonProcessor())
.writer(chunkWriter)
.build();
}
/**
* This step just marks the PollerJob as Complete.
*/
#Bean
Step markCompleteStep() {
return stepBuilderFactory.get("polling-job-step-mark-complete")
// We want each PollerJob instance to be a separate job in batch, and the
// reader is using the id passed in via job params to grab the one we want,
// so we don't need a large chunk size. One at a time is fine.
.<Long, Long>chunk(1)
.reader(runningPollingJobItemReader)
.processor(new PassThroughItemProcessor<Long>())
.writer(this.completeStatusWriter)
.build();
}
Here's the chunk writer config:
/**
* This is part of the bridge between the spring-batch and spring-integration. Nothing special or weird is going
* on, so see the RemoteChunkHandlerFactoryBean for a description.
*/
#Bean
RemoteChunkHandlerFactoryBean<PollerJob> remoteChunkHandlerFactoryBean() {
RemoteChunkHandlerFactoryBean<PollerJob> factory = new RemoteChunkHandlerFactoryBean<>();
factory.setChunkWriter(chunkWriter);
factory.setStep(remoteChunkingStep());
return factory;
}
/**
* This is the writer that will actually send the chunk to the slaves. Note that it also configures the
* internal channel on which replies are expected.
*/
#Bean
#StepScope
ChunkMessageChannelItemWriter<String> chunkWriter() {
ChunkMessageChannelItemWriter<String> writer = new ChunkMessageChannelItemWriter<>();
writer.setMessagingOperations(batchMessagingTemplate());
writer.setReplyChannel(batchResponseChannel());
writer.setThrottleLimit(1000);
return writer;
}
The problem seems to be that last section sets up the ChunkMessageChannelItemWriter such that the replyChannel is the same one used by all of the writers, despite each writer being step-scoped. It would seem that I need to add a replyChannel header to one of the messages, but I'm not sure where in the chain to do that or how to process that (if I need to at all?).
Also, this is being sent to the slaves via JMS/ActiveMQ and I'd like to avoid having just a stupid number of nearly-identical queues on ActiveMQ just to support this.
What are my options?
Given that you are using a shared JMS infrastructure, you will need a router to get the responses back to the correct chunk writer.
If you use prototype scope on the batchResponseChannel() #Bean; you'll get a unique channel for each writer.
I don't have time to figure out how to set up a chunked batch job so the following simulates your environment (non-singleton bean that needs a unique reply channel for each instance). Hopefully it's self-explanatory...
#SpringBootApplication
public class So44806067Application {
public static void main(String[] args) {
ConfigurableApplicationContext context = SpringApplication.run(So44806067Application.class, args);
SomeNonSingletonNeedingDistinctRequestAndReplyChannels chunker1 = context
.getBean(SomeNonSingletonNeedingDistinctRequestAndReplyChannels.class);
SomeNonSingletonNeedingDistinctRequestAndReplyChannels chunker2 = context
.getBean(SomeNonSingletonNeedingDistinctRequestAndReplyChannels.class);
if (chunker1.equals(chunker2)) {
throw new IllegalStateException("Expected different instances");
}
chunker1.sendSome();
chunker2.sendSome();
ChunkResponse results = chunker1.getResults();
if (results == null) {
throw new IllegalStateException("No results1");
}
if (results.getJobId() != 1L) {
throw new IllegalStateException("Incorrect routing1");
}
results = chunker2.getResults();
if (results == null) {
throw new IllegalStateException("No results2");
}
if (results.getJobId() != 2L) {
throw new IllegalStateException("Incorrect routing2");
}
context.close();
}
#Bean
public Map<Long, PollableChannel> registry() {
// TODO: should clean up entry for jobId when job completes.
return new ConcurrentHashMap<>();
}
#Bean
#Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public SomeNonSingletonNeedingDistinctRequestAndReplyChannels chunker() {
MessagingTemplate template = template();
final PollableChannel replyChannel = replyChannel();
SomeNonSingletonNeedingDistinctRequestAndReplyChannels bean =
new SomeNonSingletonNeedingDistinctRequestAndReplyChannels(template, replyChannel);
AbstractSubscribableChannel requestChannel = (AbstractSubscribableChannel) template.getDefaultDestination();
requestChannel.addInterceptor(new ChannelInterceptorAdapter() {
#Override
public Message<?> preSend(Message<?> message, MessageChannel channel) {
registry().putIfAbsent(((ChunkRequest<?>) message.getPayload()).getJobId(), replyChannel);
return message;
}
});
BridgeHandler bridge = bridge();
requestChannel.subscribe(bridge);
return bean;
}
#Bean
#Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public MessagingTemplate template() {
MessagingTemplate messagingTemplate = new MessagingTemplate();
messagingTemplate.setDefaultChannel(requestChannel());
return messagingTemplate;
}
#Bean
#Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public DirectChannel requestChannel() {
return new DirectChannel();
}
#Bean
#Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public PollableChannel replyChannel() {
return new QueueChannel();
}
#Bean
#Scope(ConfigurableBeanFactory.SCOPE_PROTOTYPE)
public BridgeHandler bridge() {
BridgeHandler bridgeHandler = new BridgeHandler();
bridgeHandler.setOutputChannel(outboundChannel());
return bridgeHandler;
}
#Bean
public DirectChannel outboundChannel() {
return new DirectChannel();
}
#Bean
public DirectChannel masterReplyChannel() {
return new DirectChannel();
}
#ServiceActivator(inputChannel = "outboundChannel")
public void simulateJmsChannelAdapterPair(ChunkRequest<?> request) {
masterReplyChannel()
.send(new GenericMessage<>(new ChunkResponse(request.getSequence(), request.getJobId(), null)));
}
#Router(inputChannel = "masterReplyChannel")
public MessageChannel route(ChunkResponse reply) {
// TODO: error checking - missing reply channel for jobId
return registry().get(reply.getJobId());
}
public static class SomeNonSingletonNeedingDistinctRequestAndReplyChannels {
private final static AtomicLong jobIds = new AtomicLong();
private final long jobId = jobIds.incrementAndGet();
private final MessagingTemplate template;
private final PollableChannel replyChannel;
public SomeNonSingletonNeedingDistinctRequestAndReplyChannels(MessagingTemplate template,
PollableChannel replyChannel) {
this.template = template;
this.replyChannel = replyChannel;
}
public void sendSome() {
ChunkRequest<String> cr = new ChunkRequest<>(0, Collections.singleton("foo"), this.jobId, null);
this.template.send(new GenericMessage<>(cr));
}
public ChunkResponse getResults() {
#SuppressWarnings("unchecked")
Message<ChunkResponse> received = (Message<ChunkResponse>) this.replyChannel.receive(10_000);
if (received != null) {
if (received.getPayload().getJobId().equals(this.jobId)) {
System.out.println("Got the right one");
}
else {
System.out.println(
"Got the wrong one " + received.getPayload().getJobId() + " instead of " + this.jobId);
}
return received.getPayload();
}
return null;
}
}
}

How can I use Spring Batch with OSGI

I want to use Spring batch with osgi to run a job daily.
here what i did:
#Component
#EnableBatchProcessing
public class BatchConfiguration {
private JobBuilderFactory jobs;
public JobBuilderFactory getJobs() {
return jobs;
}
public void setJobs(JobBuilderFactory jobs) {
this.jobs = jobs;
}
private StepBuilderFactory steps;
private EmployeeRepository employeeRepository; //spring data repository
public EmployeeRepository getEmployeeRepository() {
return employeeRepository;
}
#Reference
public void setEmployeeRepository(EmployeeRepository employeeRepository) {
employeeRepository= employeeRepository;
}
public Step syncEmployeesStep() throws Exception{
RepositoryItemWriter writer = new RepositoryItemWriter();
writer.setRepository(employeeRepository);
writer.setMethodName("save");
return steps.get("syncEmployeesStep")
.<Employee, Employee> chunk(10)
.reader(reader())
.writer(writer)
.build();
}
public Job importEmpJob()throws Exception {
return jobs.get("importEmpJob")
.incrementer(new RunIdIncrementer())
.start(syncEmployeesStep())
.next(syncEmployeesStep())
.build();
}
public ItemReader<Employee> reader() throws Exception {
String jpqlQuery = "select a from Employee a";
ServerEMF entityManager = new ServerEMF();
JpaPagingItemReader<Employee> reader = new JpaPagingItemReader<Tariff>();
reader.setQueryString(jpqlQuery);
reader.setEntityManagerFactory(entityManager.getEntityManagerFactory());
reader.setPageSize(3);
reader.afterPropertiesSet();
reader.setSaveState(true);
return reader;
}
}
here I want to run this job to sync between two databases,My problem is how to run this job inside osgi.
#EnableScheduling
#Component
public class JobRunner {
private JobLauncher jobLauncher;
private Job job ;
private BatchConfiguration batchConfig;
//private JobBuilderFactory jobs;
//private JobRepository jobrepo;
final static Logger logger = LoggerFactory.getLogger(BatchConfiguration.class);
BundleContext ctx;
#SuppressWarnings("rawtypes")
ServiceTracker servicetracker;
#Activate
public void start(BundleContext context) {
batchConfig = new BatchConfiguration();
//jobs = new JobBuilderFactory(jobRepository)
try {
job = batchConfig.importEmpJob(); //job is null because i don't know how to use it
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ctx = context;
servicetracker= new ServiceTracker(ctx, BatchConfiguration.class, null);
servicetracker.open();
new Thread() {
public void run() { findAndRunJob(); }
}.start();
}
#Deactivate
public void stop() {
configAdminTracker.close();
}
#Scheduled(fixedRate = 5000)
protected void findAndRunJob() {
logger.info("job created.");
try {
String dateParam = new Date().toString();
JobParameters param = new JobParametersBuilder().addString("date", dateParam).toJobParameters();
System.out.println(dateParam);
JobExecution execution = jobLauncher.run(job, param);
System.out.println("Exit Status : " + execution.getStatus());
} catch (Exception e) {
//e.printStackTrace();
}
}
for sure,I got a java.lang.NullPointerException because the job is null.
could anyone help me with that?
after updates
#Component
#EnableBatchProcessing
public class BatchConfiguration {
private EmployeeRepository employeeRepository; //spring data repository
public EmployeeRepository getEmployeeRepository() {
return employeeRepository;
}
#Reference
public void setEmployeeRepository(EmployeeRepository employeeRepository) {
employeeRepository= employeeRepository;
}
public Step syncEmployeesStep() throws Exception{
RepositoryItemWriter writer = new RepositoryItemWriter();
writer.setRepository(employeeRepository);
writer.setMethodName("save");
return steps.get("syncEmployeesStep")
.<Employee, Employee> chunk(10)
.reader(reader())
.writer(writer)
.build();
}
public Job importEmpJob(JobRepository jobRepository, PlatformTransactionManager transactionManager)throws Exception {
JobBuilderFactory jobs= new JobBuilderFactory(jobRepository);
StepBuilderFactory stepBuilderFactory = new StepBuilderFactory(jobRepository, transactionManager);
return jobs.get("importEmpJob")
.incrementer(new RunIdIncrementer())
.start(syncEmployeesStep())
.next(syncEmployeesStep())
.build();
}
public ItemReader<Employee> reader() throws Exception {
String jpqlQuery = "select a from Employee a";
ServerEMF entityManager = new ServerEMF();
JpaPagingItemReader<Employee> reader = new JpaPagingItemReader<Tariff>();
reader.setQueryString(jpqlQuery);
reader.setEntityManagerFactory(entityManager.getEntityManagerFactory());
reader.setPageSize(3);
reader.afterPropertiesSet();
reader.setSaveState(true);
return reader;
}
}
job runner class
private JobLauncher jobLauncher;
private PlatformTransactionManager transactionManager;
private JobRepository jobRepository;
Job importEmpJob;
private BatchConfiguration batchConfig;
#SuppressWarnings("deprecation")
#Activate
public void start(BundleContext context) {
try {
batchConfig = new BatchConfiguration();
this.transactionManager = new ResourcelessTransactionManager();
MapJobRepositoryFactoryBean repositorybean = new MapJobRepositoryFactoryBean();
repositorybean.setTransactionManager(transactionManager);
this.jobRepository = repositorybean.getJobRepository(); //error after executing this statement
// setup job launcher
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setTaskExecutor(new SyncTaskExecutor());
simpleJobLauncher.setJobRepository(jobRepository);
this.jobLauncher = simpleJobLauncher;
//System.out.println(job);
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
ctx = context;
configAdminTracker = new ServiceTracker(ctx, BatchConfiguration.class.getName(), null);
configAdminTracker.open();
new Thread() {
public void run() { findAndRunJob(); }
}.start();
}
#Deactivate
public void stop() {
configAdminTracker.close();
}
protected void findAndRunJob() {
logger.info("job created.");
try {
String dateParam = new Date().toString();
// creating the job
Job job = batchConfig.importEmpJob(jobRepository, transactionManager);
// running the job
JobExecution execution = this.jobLauncher.run(job, new JobParameters());
System.out.println("Exit Status : " + execution.getStatus());
} catch (Exception e) {
//e.printStackTrace();
}
}
what i getting is "java.lang.IllegalArgumentException: interface org.springframework.batch.core.repository.JobRepository is not visible from class loader" after running .could anyone help me with that error?
In Short
If you're just trying to kick off a something simple and don't need all the Spring Batch goodness I would look into the Apache Sling Commons Scheduler which has a simple job processor on top of Quartz for scheduling[1].
In General
There are a couple of considerations here depending on what you are trying to do. Are you deploying the Spring Batch Jars to the OSGi container with the assumption that the code that is written for the jobs (steps, tasks, etc) will live in separate bundles? OSGi's purpose is to develop modular code so my answer is assuming that this is your end goal. The folks at Pivotal have dropped requiring support for OSGi on their artifacts so to make it work you'll need to determine what you need to export from the Batch jar files. This can be done with BND. I would recommend checking out the new BND Maven plugin [2]. I would configure the Export-Package to export the interfaces you need to write the jobs so that you can write the jobs in separate modular bundles. Then I would probably embed the spring batch jars in a bundle and write a small wrapper around the JobLauncher. This should contain all the actual batch code to a single classloader so that you don't have to worry about OSGi trying to pull in classes dynamically. The downside is that this will prevent you from using many of the batch annotations outside of the spring batch bundle you created but will provide the modularity that you'd be looking for by implementing this type of solution with OSGi.
[1] https://sling.apache.org/documentation/bundles/apache-sling-eventing-and-job-handling.html
[2] http://njbartlett.name/2015/03/27/announcing-bnd-maven-plugin.html

How Spring Boot run batch jobs

I followed this sample for Spring Batch with Boot.
When you run the main method the job is executed.
This way I can't figure out how one can control the job execution. For example how you schedule a job, or get access to the job execution, or set job parameters.
I tried to register my own JobLauncher
#Bean
public JobLauncher jobLauncher(JobRepository jobRepo){
SimpleJobLauncher simpleJobLauncher = new SimpleJobLauncher();
simpleJobLauncher.setJobRepository(jobRepo);
return simpleJobLauncher;
}
but when I try to use it in the main method:
public static void main(String[] args) {
ConfigurableApplicationContext ctx = SpringApplication.run(Application.class, args);
JobLauncher jobLauncher = ctx.getBean(JobLauncher.class);
//try catch removed for readability
jobLauncher.run(ctx.getBean(Job.class), new JobParameters());
}
The job is again executed when the context is loaded and I got JobInstanceAlreadyCompleteException when I try to run it manually.
Is there a way to prevent the automatic job execution?
The jobs execution can be prevented by setting
spring.batch.job.enabled=false
in application.properties. Or you can use spring.batch.job.names it takes a comma-delimited list of job names that will be run.
Taken from here: how to stop spring batch scheduled jobs from running at first time when executing the code?
You can enable the execution of a Job using rest controller POST:
#RestController
#RequestMapping(value="/job/")
public class JobLauncherController {
private static final Log LOG = LogFactory.getLog(JobLauncherController.class);
#Autowired
private JobLauncher jobLauncher;
#Autowired
private Job job;
#Autowired
private JobRepository jobRepository;
#Autowired
private JobRegistry jobRegistry;
#RequestMapping("/launchjob/{jobName}")
public String handle(#PathVariable("jobName") String jobName, #RequestBody Map<String,Object> request) throws Exception {
try {
request.put("timeJobStarted", DateUtil.getDateFormatted(new Date(), DateUtil.DATE_UUUUMMDDHHMMSS));
Map<String,Object> mapMessage = this.enrichJobMessage(request);
Map<String, JobParameter> jobParameters = new HashMap<>();
mapMessage.forEach((k,v)->{
MapperUtil.castParameter(jobParameters, k, v);
});
jobParameters.put(Field.Batch.JOB_INSTANCE_NAME, new JobParameter(jobName));
jobLauncher.run(job, new JobParameters(jobParameters));
assertNotNull(jobRegistry.getJob(job.getName()));
}catch( NoSuchJobException ex){
jobRegistry.register(new ReferenceJobFactory(job));
} catch (Exception e) {
LOG.error(e.getMessage(),e);
}
return "Done";
}
public static void castParameter(Map<String, JobParameter> jobParameters, String k, Object v){
if(v instanceof String){
jobParameters.put(k, new JobParameter((String)v));
}else if(v instanceof Date){
jobParameters.put(k, new JobParameter((Date)v));
}else if(v instanceof Double){
jobParameters.put(k, new JobParameter((Double)v));
}else if(v instanceof Long){
jobParameters.put(k, new JobParameter((Long)v));
}else{
DslJson dslJson = new DslJson<>();
JsonWriter writer = dslJson.newWriter();
try {
dslJson.serialize(writer,v);
jobParameters.put(k, new JobParameter(writer.toString()));
} catch (IOException e) {
LOG.warn(e.getMessage(), e);
}
}
}
}

Categories