How to fail a spring job if exception is thrown? - java

I have a spring batch job which converts tsv file to json. I have specified the following listener which checks for exit code:
import org.springframework.batch.core.ExitStatus;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.StepExecutionListener;
public class StepResultListener implements StepExecutionListener {
#Override
public void beforeStep(StepExecution stepExecution) {
System.out.println("Called beforeStep().");
}
#Override
public ExitStatus afterStep(StepExecution stepExecution) {
System.out.println("Called afterStep().");
String exitCode = stepExecution.getExitStatus().getExitCode();
if (exitCode.equals("FAILED")){
return ExitStatus.FAILED;
}
return ExitStatus.COMPLETED;
}
}
I want to fail the the whole job if the step fails:
#Bean
public Step stepOne(ObjectMapper mapper, IdProvider webIdProvider) {
return stepBuilderFactory.get("stepOne")
.<WebTaxonomy, WebTaxonomy> chunk(10)
.reader(readerWeb())
.processor(processorWeb(mapper, webIdProvider))
.writer(writerWeb(mapper))
.listener(new StepResultListener())
.build();
}
However it still proceeds to complete the job. Any ideas how I can achieve this ?
this is my batch configuration:
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
private static final String OUTPUT_DATA_JSON = "target/web_taxonomy.json";
private static final String OUTPUT_DATA_APP_JSON = "target/app_taxonomy.json";
#Autowired
public JobBuilderFactory jobBuilderFactory;
#Autowired
public StepBuilderFactory stepBuilderFactory;
#Autowired
private JobExplorer jobExplorer;
#Autowired
private JobRepository jobRepository;
#Bean
public FlatFileItemReader<WebTaxonomy> readerWeb() {
return new FlatFileItemReaderBuilder<WebTaxonomy>()
.name("webTaxonomyItemReader")
.resource(new ClassPathResource("/web_taxonomy.tsv"))
.linesToSkip(1)
.lineMapper(lineMapperWeb()).build();
}
#Bean
public DefaultLineMapper<WebTaxonomy> lineMapperWeb() {
DefaultLineMapper<WebTaxonomy> lineMapper = new DefaultLineMapper<>();
lineMapper.setLineTokenizer(lineTokenizerWeb());
lineMapper.setFieldSetMapper(new BeanWrapperFieldSetMapper<WebTaxonomy>() {
{
setTargetType(WebTaxonomy.class);
}
});
return lineMapper;
}
#Bean
public DelimitedLineTokenizer lineTokenizerWeb() {
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(DelimitedLineTokenizer.DELIMITER_TAB);
tokenizer.setNames("taxonomy_id", "website_name", "brand_name", "publisher", "category", "category_2",
"category_3", "category_4", "category_5", "curated", "blacklist", "brand_1", "brand_2", "brand_3",
"brand_4", "brand_5", "brand_6", "regex", "host", "source", "creation_date");
return tokenizer;
}
#Autowired
#Qualifier("webIdProvider")
#Bean
public WebTaxonomyItemProcessor processorWeb(ObjectMapper mapper, IdProvider webIdProvider) {
Map<String, Long> webMapping = getMapping(mapper, "src/main/resources/web_taxonomy.json", "taxonomy_id", "website_name");
Long maxID = Collections.max(webMapping.values());
webIdProvider.setID(maxID);
return new WebTaxonomyItemProcessor(webMapping, webIdProvider);
}
#Bean
public Step stepOne(ObjectMapper mapper, IdProvider webIdProvider) {
return stepBuilderFactory.get("stepOne")
.<WebTaxonomy, WebTaxonomy> chunk(10)
.reader(readerWeb())
.processor(processorWeb(mapper, webIdProvider))
.writer(writerWeb(mapper))
.listener(new StepResultListener())
.build();
}
#Autowired
#Bean
public FlatFileItemWriter<WebTaxonomy> writerWeb(ObjectMapper mapper) {
return new FlatFileItemWriterBuilder<WebTaxonomy>()
.name("WebTaxonomyJsonFileItemWriter")
.resource(new FileSystemResource(OUTPUT_DATA_JSON))
.lineAggregator(new JsonLineAggregator<>(mapper))
.build();
}
#Bean
public Job updateWebTaxonomy(ObjectMapper mapper, IdProvider webIdProvider) {
return jobBuilderFactory.get("updateWebTaxonomy")
.incrementer(new RunIdIncrementer())
.start(stepOne(mapper, webIdProvider))
.build();
}
// App job
#Bean
public FlatFileItemReader<AppTaxonomy> readerApp() {
return new FlatFileItemReaderBuilder<AppTaxonomy>()
.name("appTaxonomyItemReader")
.resource(new ClassPathResource("/app_taxonomy.tsv"))
.linesToSkip(1)
.lineMapper(lineMapperApp()).build();
}
#Bean
public DefaultLineMapper<AppTaxonomy> lineMapperApp() {
DefaultLineMapper<AppTaxonomy> lineMapper = new DefaultLineMapper<>();
lineMapper.setLineTokenizer(lineTokenizerApp());
lineMapper.setFieldSetMapper(new BeanWrapperFieldSetMapper<AppTaxonomy>() {
{
setTargetType(AppTaxonomy.class);
}
});
return lineMapper;
}
#Bean
public DelimitedLineTokenizer lineTokenizerApp() {
DelimitedLineTokenizer tokenizer = new DelimitedLineTokenizer(DelimitedLineTokenizer.DELIMITER_TAB);
tokenizer.setNames("taxonomy_id", "app_name", "app_store", "app_package_name", "brand_name",
"publisher", "category", "category_2", "category_3", "category_4", "category_5", "blacklist", "brand_1",
"brand_2", "brand_3", "brand_4", "brand_5", "brand_6", "platform", "source", "creation_date");
return tokenizer;
}
#Autowired
#Qualifier("appIdProvider")
#Bean
public AppTaxonomyItemProcessor processorApp(ObjectMapper mapper, IdProvider appIDProvider) {
Map<String, Long> appMapping = getMapping(mapper,"src/main/resources/app_taxonomy.json", "taxonomy_id", "app_package_name");
Long maxID = Collections.max(appMapping.values());
appIDProvider.setID(maxID);
return new AppTaxonomyItemProcessor(appMapping, appIDProvider);
}
#Bean
public Step stepTwo(ObjectMapper mapper, IdProvider appIDProvider) {
return stepBuilderFactory.get("stepTwo")
.<AppTaxonomy, AppTaxonomy> chunk(10)
.reader(readerApp())
.processor(processorApp(mapper, appIDProvider))
.writer(writerApp(mapper))
.listener(new StepResultListener())
.build();
}
#Autowired
#Bean
public FlatFileItemWriter<AppTaxonomy> writerApp(ObjectMapper mapper) {
return new FlatFileItemWriterBuilder<AppTaxonomy>()
.name("AppTaxonomyJsonFileItemWriter")
.resource(new FileSystemResource(OUTPUT_DATA_APP_JSON))
.lineAggregator(new JsonLineAggregator<>(mapper))
.build();
}
#Bean
public Job updateAppTaxonomy(ObjectMapper mapper, IdProvider appIDProvider) {
return jobBuilderFactory.get("updateAppTaxonomy")
.incrementer(new RunIdIncrementer())
.start(stepTwo(mapper, appIDProvider))
.build();
}
#Bean
public ResourcelessTransactionManager batchTransactionManager() {
return new ResourcelessTransactionManager();
}
#Bean
public TaskExecutor taskExecutor() {
ThreadPoolTaskExecutor taskExecutor = new ThreadPoolTaskExecutor();
taskExecutor.setCorePoolSize(15);
taskExecutor.setMaxPoolSize(20);
taskExecutor.setQueueCapacity(30);
return taskExecutor;
}
#Bean
public JobLauncher batchJobLauncher() throws Exception {
SimpleJobLauncher jobLauncher = new SimpleJobLauncher();
jobLauncher.setTaskExecutor(taskExecutor()); // Or below line
// jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor());
jobLauncher.setJobRepository(jobRepository);
jobLauncher.afterPropertiesSet();
return jobLauncher;
}
#Bean
public JobOperator jobOperator(JobRegistry jobRegistry) throws Exception {
SimpleJobOperator jobOperator = new SimpleJobOperator();
jobOperator.setJobExplorer(jobExplorer);
jobOperator.setJobLauncher(batchJobLauncher());
jobOperator.setJobRegistry(jobRegistry);
jobOperator.setJobRepository(jobRepository);
return jobOperator;
}
#Bean
public JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor(JobRegistry jobRegistry) {
JobRegistryBeanPostProcessor jobRegistryBeanPostProcessor = new JobRegistryBeanPostProcessor();
jobRegistryBeanPostProcessor.setJobRegistry(jobRegistry);
return jobRegistryBeanPostProcessor;
}
#Bean
public ObjectMapper objectMapper(){
return new ObjectMapper();
}
public void run1(ObjectMapper mapper, IdProvider appIDProvider) {
Map<String, JobParameter> confMap = new HashMap<>();
confMap.put("time", new JobParameter(System.currentTimeMillis()));
JobParameters jobParameters = new JobParameters(confMap);
try {
batchJobLauncher().run(updateAppTaxonomy(mapper, appIDProvider), jobParameters);
} catch (Exception ex) {
System.err.println(ex.getMessage());
}
}
public void run2(ObjectMapper mapper, IdProvider webIdProvider) {
Map<String, JobParameter> confMap = new HashMap<>();
confMap.put("time", new JobParameter(System.currentTimeMillis()));
JobParameters jobParameters = new JobParameters(confMap);
try {
batchJobLauncher().run(updateWebTaxonomy(mapper, webIdProvider), jobParameters);
} catch (Exception ex) {
System.err.println(ex.getMessage());
}
}
public Map<String, Long> getMapping(ObjectMapper mapper, String path, String id, String value){
Map<String, Long> map = new HashMap<>();
try {
FileReader reader = new FileReader(path);
BufferedReader bufferedReader = new BufferedReader(reader);
String currentLine;
while((currentLine=bufferedReader.readLine()) != null) {
JsonNode node = mapper.readTree(currentLine);
long taxonomyID = node.get(id).longValue();
String key = node.get(value).textValue().toLowerCase();
map.put(key, taxonomyID);
}
} catch (Exception ex) {
ex.printStackTrace();
}
return map;
}
}

Related

Spring MVC MQTT inbound messages not coming

I configured MQTT in spring mvc application, howewever when I am trying to print something on console from command prompt, it is not printing. Code snnipet is <
#Bean
public MqttPahoClientFactory mqttClientFactory() {
DefaultMqttPahoClientFactory factory = new DefaultMqttPahoClientFactory();
MqttConnectOptions options = new MqttConnectOptions();
options.setServerURIs(new String[] { "tcp://localhost:1883" });
options.setUserName("admin");
String pass = "123456";
options.setPassword(pass.toCharArray());
options.setCleanSession(true);
factory.setConnectionOptions(options);
return factory;
}
#Bean
public MessageChannel mqttInputChannel() {
return new DirectChannel();
}
#Bean
public MessageProducer inbound() {
MqttPahoMessageDrivenChannelAdapter adapter = new MqttPahoMessageDrivenChannelAdapter("serverIn",
mqttClientFactory(), "#");
adapter.setCompletionTimeout(5000);
adapter.setConverter(new DefaultPahoMessageConverter());
adapter.setQos(2);
adapter.setOutputChannel(mqttInputChannel());
//adapter.setOutputChannelName("mqttInputChannel");
return adapter;
}
#Bean
#ServiceActivator(inputChannel = "mqttInputChannel")
public MessageHandler handler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
String topic = message.getHeaders().get(MqttHeaders.RECEIVED_TOPIC).toString();
if(topic.equals("myTopic")) {
System.out.println("This is the topic");
}
System.out.println(message.getPayload());
}
};
}
>
I have installed Mosquitto on my windows and from there I need to check if this handler method is working buy trying to print. But I could not.
I made the below separate configuration file and it worked.
#EnableIntegration
#IntegrationComponentScan(basePackages = "com.dr")
#Configuration
public class MqttBeansConfiguration {
#Bean
public MqttPahoClientFactory mqttClientFactory() {//config
DefaultMqttPahoClientFactory factory = new DefaultMqttPahoClientFactory();
MqttConnectOptions options = new MqttConnectOptions();
//propeties
options.setServerURIs(new String[] { "localhost:1883" });
//options.setUserName("admin");
//String pass = "123456";
//options.setPassword(pass.toCharArray());
//options.setCleanSession(true);
factory.setConnectionOptions(options);
return factory;
}
#Bean
public MessageChannel mqttInputChannel() {//config
return new DirectChannel();
}
#Bean
public MessageProducer inbound() { //config
MqttPahoMessageDrivenChannelAdapter adapter = new MqttPahoMessageDrivenChannelAdapter("serverIn",
mqttClientFactory(), "#");
adapter.setCompletionTimeout(5000);
adapter.setConverter(new DefaultPahoMessageConverter());
adapter.setQos(2);
adapter.setOutputChannel(mqttInputChannel());
return adapter;
}
#Bean
#ServiceActivator(inputChannel = "mqttInputChannel")
public MessageHandler handler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
String topic = message.getHeaders().get(MqttHeaders.RECEIVED_TOPIC).toString();
if(topic.equals("myTopic")) {
System.out.println("This is the topic");
}
System.out.println(message.getPayload());
}
};
}
/*#Bean
public MessageChannel mqttOutboundChannel() {//leave it
return new DirectChannel();
}
#Bean
#ServiceActivator(inputChannel = "mqttOutboundChannel") //leave it
public MessageHandler mqttOutbound() {
//clientId is generated using a random number
MqttPahoMessageHandler messageHandler = new MqttPahoMessageHandler("serverOut", mqttClientFactory());
messageHandler.setAsync(true);
messageHandler.setDefaultTopic("#");
messageHandler.setDefaultRetained(false);
return messageHandler;
}
*/
}

Quartz Scheduler task suddenly not running

Task has started,but nerver executed ,Anyone know what's problem there?
Now the Problem is every time i run the code it gives me only The output
Here's the final log
2020-11-02 17:03:30.852 DEBUG 9264 --- [nio-8089-exec-7] c.z.t.c.quartz.QuartzScheduleManager : addNewCronJob() called with: jobInfo = [JobInfo(className=class com.zx.tzgl.cronJob.YearPlanJob, identifier=25ddaab4-3e3b-45e9-84a3-77ff7ca4c049, groupName=zx, cornExpr=00 18 17 02 11 ? 2020, params={PLAY_YEAR_SWITCH_PARAMS={"planType":"1","isOpen":"1","enable":"1","annual":"2020至2025","closeTime":1604307810844,"id":"1"}})]
2020-11-02 17:03:30.852 INFO 9264 --- [nio-8089-exec-7] org.quartz.core.QuartzScheduler : Scheduler SchedulerFactory_$_NON_CLUSTERED started.
Here's my using quartz' version
<dependency>
<groupId>org.quartz-scheduler</groupId>
<artifactId>quartz</artifactId>
<version>2.3.2</version>
</dependency>
Here's the quartz.properties file content
org.quartz.scheduler.instanceName = DefaultQuartzScheduler
org.quartz.scheduler.rmi.export = false
org.quartz.scheduler.rmi.proxy = false
org.quartz.scheduler.wrapJobExecutionInUserTransaction = false
org.quartz.threadPool.class = org.quartz.simpl.SimpleThreadPool
org.quartz.threadPool.threadCount = 100
org.quartz.threadPool.threadPriority = 5
org.quartz.threadPool.threadsInheritContextClassLoaderOfInitializingThread = true
org.quartz.jobStore.misfireThreshold = 5000
#org.quartz.jobStore.class = org.quartz.simpl.RAMJobStore
org.quartz.jobStore.class = org.quartz.impl.jdbcjobstore.JobStoreTX
org.quartz.jobStore.driverDelegateClass=org.quartz.impl.jdbcjobstore.StdJDBCDelegate
org.quartz.jobStore.useProperties=true
org.quartz.jobStore.tablePrefix = QRTZ_
org.quartz.jobStore.dataSource = myQuartzDB
org.quartz.dataSource.myQuartzDB.driver:oracle.jdbc.OracleDriver
#org.quartz.dataSource.myQuartzDB.URL:jdbc:oracle:thin:#//xx.xx.xx.xx:xxxx/cdb1
#org.quartz.dataSource.myQuartzDB.user:xxxx
#org.quartz.dataSource.myQuartzDB.password:xxxx
org.quartz.dataSource.myQuartzDB.URL:jdbc:oracle:thin:#//xx.xx.x.xx:xxxx/szorcl
org.quartz.dataSource.myQuartzDB.user:xxxx
org.quartz.dataSource.myQuartzDB.password:xxxx
org.quartz.dataSource.myQuartzDB.maxConnections:10
org.quartz.jobStore.isClustered=false
Here's Configuartion
#Configuration
public class QuartzConfig {
#Resource
private MyJobFactory myJobFactory;
#Bean(name = "SchedulerFactory")
public SchedulerFactoryBean schedulerFactoryBean() throws IOException {
SchedulerFactoryBean factory = new SchedulerFactoryBean();
factory.setQuartzProperties(quartzProperties());
factory.setJobFactory(myJobFactory);
return factory;
}
#Bean
public Properties quartzProperties() throws IOException {
PropertiesFactoryBean propertiesFactoryBean = new PropertiesFactoryBean();
propertiesFactoryBean.setLocation(new ClassPathResource("config/quartz.properties"));
propertiesFactoryBean.afterPropertiesSet();
return propertiesFactoryBean.getObject();
}
#Bean
public QuartzInitializerListener executorListener() {
return new QuartzInitializerListener();
}
#Bean(name = "quartzScheduler")
public Scheduler scheduler() throws IOException {
return schedulerFactoryBean().getScheduler();
}
}
#Component
public class MyJobFactory extends AdaptableJobFactory {
#Autowired
private AutowireCapableBeanFactory capableBeanFactory;
#Override
protected Object createJobInstance(TriggerFiredBundle bundle) throws Exception {
Object jobInstance = super.createJobInstance(bundle);
capableBeanFactory.autowireBean(jobInstance);
return jobInstance;
}
}
Here's the schduler manager
#Component
#Slf4j
public class QuartzScheduleManager implements QuartzScheduleI {
#Resource(name = "quartzScheduler")
private Scheduler scheduler;
#Override #SneakyThrows
public void checkAndAddCornJob(JobInfo jobInfo) {
if (isExistJob(jobInfo)) {
resumeJob(jobInfo);
} else {
addNewCronJob(jobInfo);
}
}
#Override #SneakyThrows
public boolean isExistJob(JobInfo jobInfo) {
return scheduler.checkExists(new JobKey(jobInfo.getIdentifier(), jobInfo.getGroupName()));
}
#Override #SneakyThrows
public boolean isExistJob2(JobInfo jobInfo) {
boolean isExist = false;
JobKey jobKey1 = new JobKey(jobInfo.getIdentifier(), jobInfo.getGroupName());
List<? extends Trigger> triggers1 = scheduler.getTriggersOfJob(jobKey1);
if (triggers1.size() > 0) {
for (Trigger tg : triggers1) {
if ((tg instanceof CronTrigger) || (tg instanceof SimpleTrigger)) {
isExist = true;
}
}
}
return isExist;
}
private void addNewCronJob(JobInfo jobInfo) throws Exception {
scheduler.start();
//build job info
JobDetail jobDetail = JobBuilder.newJob(jobInfo.getClassName()).
withIdentity(jobInfo.getIdentifier(), jobInfo.getGroupName())
.build();
//add params to job
if (ObjectUtil.isNotNull(jobInfo.getParams()) && !jobInfo.getParams().isEmpty()) {
jobInfo.getParams().forEach((key, value) -> jobDetail.getJobDataMap().put(key, value));
}
CronScheduleBuilder scheduleBuilder = CronScheduleBuilder.cronSchedule(jobInfo.getCornExpr());
CronTrigger trigger = TriggerBuilder.newTrigger().
withIdentity(jobInfo.getIdentifier(), jobInfo.getGroupName())
.forJob(jobDetail)
.withSchedule(scheduleBuilder)
.build();
try {
scheduler.scheduleJob(jobDetail, trigger);
} catch (SchedulerException e) {
throw new CustomException(ResultCode.PARAM_ERROR.getCode(), e.getMessage());
}
}
Here's the job listener
public class YearPlanJob implements Job {
#Override public void execute(JobExecutionContext jobExecutionContext) {
JobDetail jobDetail = jobExecutionContext.getJobDetail();
JobDataMap jobDataMap;
if (!jobDetail.getJobDataMap().isEmpty()) {
jobDataMap = jobDetail.getJobDataMap();
} else {
jobDataMap = jobExecutionContext.getTrigger().getJobDataMap();
}
}
}
Here's my test code
public String testQuartzTask() {
Date date = DateUtil.parse("2020-11-02 17:18:00", "yyyy-MM-dd HH:mm:ss");
PlanapplyYear planapplyYear = new PlanapplyYear();
planapplyYear.setCloseTime(new Date());
String jsonParams = JSONUtil.toJsonStr(planapplyYear);
JobInfo jobInfo = new JobInfo();
jobInfo.setClassName(YearPlanJob.class);
jobInfo.setIdentifier(UUID.randomUUID().toString());
jobInfo.setGroupName("zx");
jobInfo.setCornExpr(CronDateUtils.getCron(date));
jobInfo.setParams(ImmutableMap.of(PLAN_YEAR_SWITCH, jsonParams));
mQuartzScheduleManager.checkAndAddCornJob(jobInfo);
}

How to implement simple echo socket service in Spring Integration DSL

Please,
could you help with implementation of a simple, echo style, Heartbeat TCP socket service in Spring Integration DSL? More precisely how to plug Adapter/Handler/Gateway to IntegrationFlows on the client and server side. Practical examples are hard to come by for Spring Integration DSL and TCP/IP client/server communication.
I think, I nailed most of the code, it's just that bit about plugging everything together in the IntegrationFlow.
There is an sample echo service in SI examples, but it's written in the "old" XML configuration and I really struggle to transform it to the configuration by code.
My Heartbeat service is a simple server waiting for client to ask "status", responding with "OK".
No #ServiceActivator, no #MessageGateways, no proxying, everything explicit and verbose; driven by a plain JDK scheduled executor on client side; server and client in separate configs and projects.
HeartbeatClientConfig
#Configuration
#EnableIntegration
public class HeartbeatClientConfig {
#Bean
public MessageChannel outboudChannel() {
return new DirectChannel();
}
#Bean
public PollableChannel inboundChannel() {
return new QueueChannel();
}
#Bean
public TcpNetClientConnectionFactory connectionFactory() {
TcpNetClientConnectionFactory connectionFactory = new TcpNetClientConnectionFactory("localhost", 7777);
return connectionFactory;
}
#Bean
public TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter(
TcpNetClientConnectionFactory connectionFactory,
MessageChannel inboundChannel) {
TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter = new TcpReceivingChannelAdapter();
heartbeatReceivingMessageAdapter.setConnectionFactory(connectionFactory);
heartbeatReceivingMessageAdapter.setOutputChannel(inboundChannel); // ???
heartbeatReceivingMessageAdapter.setClientMode(true);
return heartbeatReceivingMessageAdapter;
}
#Bean
public TcpSendingMessageHandler heartbeatSendingMessageHandler(
TcpNetClientConnectionFactory connectionFactory) {
TcpSendingMessageHandler heartbeatSendingMessageHandler = new TcpSendingMessageHandler();
heartbeatSendingMessageHandler.setConnectionFactory(connectionFactory);
return heartbeatSendingMessageHandler;
}
#Bean
public IntegrationFlow heartbeatClientFlow(
TcpNetClientConnectionFactory connectionFactory,
TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter,
TcpSendingMessageHandler heartbeatSendingMessageHandler,
MessageChannel outboudChannel) {
return IntegrationFlows
.from(outboudChannel) // ??????
.// adapter ???????????
.// gateway ???????????
.// handler ???????????
.get();
}
#Bean
public HeartbeatClient heartbeatClient(
MessageChannel outboudChannel,
PollableChannel inboundChannel) {
return new HeartbeatClient(outboudChannel, inboundChannel);
}
}
HeartbeatClient
public class HeartbeatClient {
private final MessageChannel outboudChannel;
private final PollableChannel inboundChannel;
private final Logger log = LogManager.getLogger(HeartbeatClient.class);
public HeartbeatClient(MessageChannel outboudChannel, PollableChannel inboundChannel) {
this.inboundChannel = inboundChannel;
this.outboudChannel = outboudChannel;
}
#EventListener
public void initializaAfterContextIsReady(ContextRefreshedEvent event) {
log.info("Starting Heartbeat client...");
start();
}
public void start() {
Executors.newSingleThreadScheduledExecutor().scheduleAtFixedRate(() -> {
while (true) {
try {
log.info("Sending Heartbeat");
outboudChannel.send(new GenericMessage<String>("status"));
Message<?> message = inboundChannel.receive(1000);
if (message == null) {
log.error("Heartbeat timeouted");
} else {
String messageStr = new String((byte[]) message.getPayload());
if (messageStr.equals("OK")) {
log.info("Heartbeat OK response received");
} else {
log.error("Unexpected message content from server: " + messageStr);
}
}
} catch (Exception e) {
log.error(e);
}
}
}, 0, 10000, TimeUnit.SECONDS);
}
}
HeartbeatServerConfig
#Configuration
#EnableIntegration
public class HeartbeatServerConfig {
#Bean
public MessageChannel outboudChannel() {
return new DirectChannel();
}
#Bean
public PollableChannel inboundChannel() {
return new QueueChannel();
}
#Bean
public TcpNetServerConnectionFactory connectionFactory() {
TcpNetServerConnectionFactory connectionFactory = new TcpNetServerConnectionFactory(7777);
return connectionFactory;
}
#Bean
public TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter(
TcpNetServerConnectionFactory connectionFactory,
MessageChannel outboudChannel) {
TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter = new TcpReceivingChannelAdapter();
heartbeatReceivingMessageAdapter.setConnectionFactory(connectionFactory);
heartbeatReceivingMessageAdapter.setOutputChannel(outboudChannel);
return heartbeatReceivingMessageAdapter;
}
#Bean
public TcpSendingMessageHandler heartbeatSendingMessageHandler(
TcpNetServerConnectionFactory connectionFactory) {
TcpSendingMessageHandler heartbeatSendingMessageHandler = new TcpSendingMessageHandler();
heartbeatSendingMessageHandler.setConnectionFactory(connectionFactory);
return heartbeatSendingMessageHandler;
}
#Bean
public IntegrationFlow heartbeatServerFlow(
TcpReceivingChannelAdapter heartbeatReceivingMessageAdapter,
TcpSendingMessageHandler heartbeatSendingMessageHandler,
MessageChannel outboudChannel) {
return IntegrationFlows
.from(heartbeatReceivingMessageAdapter) // ???????????????
.handle(heartbeatSendingMessageHandler) // ???????????????
.get();
}
#Bean
public HeartbeatServer heartbeatServer(
PollableChannel inboundChannel,
MessageChannel outboudChannel) {
return new HeartbeatServer(inboundChannel, outboudChannel);
}
}
HeartbeatServer
public class HeartbeatServer {
private final PollableChannel inboundChannel;
private final MessageChannel outboudChannel;
private final Logger log = LogManager.getLogger(HeartbeatServer.class);
public HeartbeatServer(PollableChannel inboundChannel, MessageChannel outboudChannel) {
this.inboundChannel = inboundChannel;
this.outboudChannel = outboudChannel;
}
#EventListener
public void initializaAfterContextIsReady(ContextRefreshedEvent event) {
log.info("Starting Heartbeat");
start();
}
public void start() {
Executors.newSingleThreadExecutor().execute(() -> {
while (true) {
try {
Message<?> message = inboundChannel.receive(1000);
if (message == null) {
log.error("Heartbeat timeouted");
} else {
String messageStr = new String((byte[]) message.getPayload());
if (messageStr.equals("status")) {
log.info("Heartbeat received");
outboudChannel.send(new GenericMessage<>("OK"));
} else {
log.error("Unexpected message content from client: " + messageStr);
}
}
} catch (Exception e) {
log.error(e);
}
}
});
}
}
Bonus question
Why channel can be set on TcpReceivingChannelAdapter (inbound adapter) but not TcpSendingMessageHandler (outbound adapter)?
UPDATE
Here is the full project source code if anyone is interested for anyone to git clone it:
https://bitbucket.org/espinosa/spring-integration-tcp-demo
I will try to put all suggested solutions there.
It's much simpler with the DSL...
#SpringBootApplication
#EnableScheduling
public class So55154418Application {
public static void main(String[] args) {
SpringApplication.run(So55154418Application.class, args);
}
#Bean
public IntegrationFlow server() {
return IntegrationFlows.from(Tcp.inboundGateway(Tcp.netServer(1234)))
.transform(Transformers.objectToString())
.log()
.handle((p, h) -> "OK")
.get();
}
#Bean
public IntegrationFlow client() {
return IntegrationFlows.from(Gate.class)
.handle(Tcp.outboundGateway(Tcp.netClient("localhost", 1234)))
.transform(Transformers.objectToString())
.handle((p, h) -> {
System.out.println("Received:" + p);
return null;
})
.get();
}
#Bean
#DependsOn("client")
public Runner runner(Gate gateway) {
return new Runner(gateway);
}
public static class Runner {
private final Gate gateway;
public Runner(Gate gateway) {
this.gateway = gateway;
}
#Scheduled(fixedDelay = 5000)
public void run() {
this.gateway.send("foo");
}
}
public interface Gate {
void send(String out);
}
}
Or, get the reply from the Gate method...
#Bean
public IntegrationFlow client() {
return IntegrationFlows.from(Gate.class)
.handle(Tcp.outboundGateway(Tcp.netClient("localhost", 1234)))
.transform(Transformers.objectToString())
.get();
}
#Bean
#DependsOn("client")
public Runner runner(Gate gateway) {
return new Runner(gateway);
}
public static class Runner {
private final Gate gateway;
public Runner(Gate gateway) {
this.gateway = gateway;
}
#Scheduled(fixedDelay = 5000)
public void run() {
String reply = this.gateway.sendAndReceive("foo"); // null for timeout
System.out.println("Received:" + reply);
}
}
public interface Gate {
#Gateway(replyTimeout = 5000)
String sendAndReceive(String out);
}
Bonus:
Consuming endpoints are actually comprised of 2 beans; a consumer and a message handler. The channel goes on the consumer. See here.
EDIT
An alternative, for a single bean for the client...
#Bean
public IntegrationFlow client() {
return IntegrationFlows.from(() -> "foo",
e -> e.poller(Pollers.fixedDelay(Duration.ofSeconds(5))))
.handle(Tcp.outboundGateway(Tcp.netClient("localhost", 1234)))
.transform(Transformers.objectToString())
.handle((p, h) -> {
System.out.println("Received:" + p);
return null;
})
.get();
}
For anyone interested, here is one of the working solutions I made with help from Gary Russell. All credits to Gary Russell. Full project source code here.
Highlights:
IntegrationFlows: Use only inbound and outbound Gateways.
No Adapters or Channels needed; no ServiceActivators or Message Gate proxies.
No need for ScheduledExecutor or Executors; client and server code got significatn
IntegrationFlows directly calls methods on client class and server class; I like this type of explicit connection.
Split client class on two parts, two methods: request producing part and response processing part; this way it can be better chained to flows.
explicitly define clientConnectionFactory/serverConnectionFactory. This way more things can be explicitly configured later.
HeartbeatClientConfig
#Bean
public IntegrationFlow heartbeatClientFlow(
TcpNetClientConnectionFactory clientConnectionFactory,
HeartbeatClient heartbeatClient) {
return IntegrationFlows.from(heartbeatClient::send, e -> e.poller(Pollers.fixedDelay(Duration.ofSeconds(5))))
.handle(Tcp.outboundGateway(clientConnectionFactory))
.handle(heartbeatClient::receive)
.get();
}
HeartbeatClient
public class HeartbeatClient {
private final Logger log = LogManager.getLogger(HeartbeatClient.class);
public GenericMessage<String> send() {
log.info("Sending Heartbeat");
return new GenericMessage<String>("status");
}
public Object receive(byte[] payload, MessageHeaders messageHeaders) { // LATER: use transformer() to receive String here
String messageStr = new String(payload);
if (messageStr.equals("OK")) {
log.info("Heartbeat OK response received");
} else {
log.error("Unexpected message content from server: " + messageStr);
}
return null;
}
}
HeartbeatServerConfig
#Bean
public IntegrationFlow heartbeatServerFlow(
TcpNetServerConnectionFactory serverConnectionFactory,
HeartbeatServer heartbeatServer) {
return IntegrationFlows
.from(Tcp.inboundGateway(serverConnectionFactory))
.handle(heartbeatServer::processRequest)
.get();
}
HeartbeatServer
public class HeartbeatServer {
private final Logger log = LogManager.getLogger(HeartbeatServer.class);
public Message<String> processRequest(byte[] payload, MessageHeaders messageHeaders) {
String messageStr = new String(payload);
if (messageStr.equals("status")) {
log.info("Heartbeat received");
return new GenericMessage<>("OK");
} else {
log.error("Unexpected message content from client: " + messageStr);
return null;
}
}
}

Spring FTP Inbound Channel Adapter log every certain time to the FTP

quick question
So in the FTP inbound channel adapter how to log for example every 10 minutes to the remote FTP, is the poller fixed rate what does this? the poller is for polling but it keeps logged into the remote server?
I have this:
#Bean
#InboundChannelAdapter(value = "stream", poller = #Poller(fixedRate = "1000"))
public MessageSource<InputStream> ftpMessageSource() {
FtpStreamingMessageSource messageSource = new FtpStreamingMessageSource(template(), null);
messageSource.setRemoteDirectory(remotedirectory);
messageSource.setFilter(filter());
return messageSource;
}
or the poller METADATA trigger:
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(5000));
return pollerMetadata;
}
or how to log every 10 minutes and then poll all new files, setting a Thread.sleep() ?
_______EDIT___
public static void main(String[] args) {
SpringApplication.run(FtpinboundApp.class, args);
}
#Bean
public SessionFactory<FTPFile> ftpSessionFactory() {
DefaultFtpSessionFactory sf = new DefaultFtpSessionFactory();
sf.setHost(remotehost);
sf.setPort(remoteport);
sf.setUsername(remoteuser);
sf.setPassword(remotepassword);
return new CachingSessionFactory<FTPFile>(sf);
}
#Bean
#ServiceActivator(inputChannel = "data", adviceChain = "after")
public MessageHandler handler() {
return new MessageHandler() {
#Override
public void handleMessage(Message<?> message) throws MessagingException {
try {
httpposthgfiles.getHGFilesfromRestful(message.getPayload().toString());
httppost990.get990fromRestful(message.getPayload().toString());
} catch (IOException e) {
logger.error(e);
} catch (Exception e) {
logger.error(e);
}
}
};
}
#Bean
public ExpressionEvaluatingRequestHandlerAdvice after() {
ExpressionEvaluatingRequestHandlerAdvice advice = new ExpressionEvaluatingRequestHandlerAdvice();
advice.setOnSuccessExpression("#template.remove(headers['file_remoteDirectory'] + headers['file_remoteFile'])");
advice.setPropagateEvaluationFailures(true);
return advice;
}
#Bean
#InboundChannelAdapter(value = "stream", poller = #Poller(fixedRate = "1000"))
public MessageSource<InputStream> ftpMessageSource() {
FtpStreamingMessageSource messageSource = new FtpStreamingMessageSource(template(), null);
messageSource.setRemoteDirectory(remotedirectory);
messageSource.setFilter(filter());
return messageSource;
}
public FileListFilter<FTPFile> filter() {
CompositeFileListFilter<FTPFile> filter = new CompositeFileListFilter<>();
filter.addFilter(new FtpSimplePatternFileListFilter("xxxx_aaa204*"));
filter.addFilter(acceptOnceFilter());
return filter;
}
#Bean
public FtpPersistentAcceptOnceFileListFilter acceptOnceFilter() {
FtpPersistentAcceptOnceFileListFilter filter = new FtpPersistentAcceptOnceFileListFilter(meta(), "xxxx_aaa204");
filter.setFlushOnUpdate(true);
return filter;
}
#Bean
public ConcurrentMetadataStore meta() {
PropertiesPersistingMetadataStore meta = new PropertiesPersistingMetadataStore();
meta.setBaseDirectory("/tmp/foo");
meta.setFileName("ftpStream.properties");
return meta;
}
#Bean
#Transformer(inputChannel = "stream", outputChannel = "data")
public org.springframework.integration.transformer.Transformer transformer() {
return new StreamTransformer("UTF-8");
}
#Bean
public FtpRemoteFileTemplate template() {
return new FtpRemoteFileTemplate(ftpSessionFactory());
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata defaultPoller() {
PollerMetadata pollerMetadata = new PollerMetadata();
pollerMetadata.setTrigger(new PeriodicTrigger(5000));
return pollerMetadata;
}
It will only stay logged in if you use a CachingSessionFactory.
It's better not to sleep and tie up a thread like that, but use the task scheduler (which is what the poller does).
new PeriodicTrigger(600_000) will schedule a task to log in and check for files once every 10 minutes.

Start new job with quartz in Java

I would like understand how to a job, start new job after of finished with quartz in java.
My Jobs Bean:
#Bean
public JobDetailFactoryBean jobDownload() {
JobDetailFactoryBean j = new JobDetailFactoryBean();
j.setJobClass(DownloadJob.class);
j.setName("jobDownload");
j.setGroup("group1");
return j;
}
#Bean
public JobDetailFactoryBean jobRegex() {
JobDetailFactoryBean j = new JobDetailFactoryBean();
j.setJobClass(RegexJob.class);
j.setName("jobRegex");
j.setGroup("group1");
return j;
}
My Trigger Bean:
#Bean
public CronTriggerFactoryBean triggerDownload() {
CronTriggerFactoryBean cron = new CronTriggerFactoryBean();
cron.setName("triggerDownload");
cron.setGroup("group1");
cron.setCronExpression("0 15 17 * * ?");
cron.setJobDetail(jobConfiguration.jobDownload().getObject());
return cron;
}
My Scheduler:
#Bean
public SchedulerFactoryBean sched() {
SchedulerFactoryBean sched = new SchedulerFactoryBean();
try {
AutowiringSpringBeanJobFactory jobFactory = new AutowiringSpringBeanJobFactory();
jobFactory.setApplicationContext(applicationContext);
sched.setJobFactory(jobFactory);
// Agendando execução das triggers
sched.setTriggers(triggerConfiguration.triggerDownload()
.getObject());
sched.start();
} catch (Exception e) {
e.printStackTrace();
}
return sched;
}
My Job Executer:
#Service
#Transactional
public class DownloadJob implements Job {
#Autowired
private RegexService service;
#Override
public void execute(JobExecutionContext j) throws JobExecutionException {
try {
DownloadService t = new DownloadService();
File file = t
.fazerDownload(
"nameFile",
"urlName");
service.aplicaTreeRegex(3L, file, 789L);
} catch (Exception e) {
e.printStackTrace();
}
}
}
I would like jobDownload to start jobRegex after finish. How to do that?

Categories