Spring Integration File reading - java

I am newbie to Spring Integration. I am working on solution, but I am stuck on a specific issue while using inbound file adapter ( FileReadingMessageSource ).
I have to read files from different directories and process them and save the files in different directories. As I understand, the directory name is fixed at the start of the flow.
Can some one help me on changing the directory name for different requests.
I attempted the following. First of all, I am not sure whether it is correct way to about and although it worked for only one directory. I think Poller was waiting for more files and never came back to read another directory.
#SpringBootApplication
#EnableIntegration
#IntegrationComponentScan
public class SiSampleFileProcessor {
#Autowired
MyFileProcessor myFileProcessor;
#Value("${si.outdir}")
String outDir;
#Autowired
Environment env;
public static void main(String[] args) throws IOException {
ConfigurableApplicationContext ctx = new SpringApplication(SiSampleFileProcessor.class).run(args);
FileProcessingService gateway = ctx.getBean(FileProcessingService.class);
boolean process = true;
while (process) {
System.out.println("Please enter the input Directory: ");
String inDir = new Scanner(System.in).nextLine();
if ( inDir.isEmpty() || inDir.equals("exit") ) {
process=false;
} else {
System.out.println("Processing... " + inDir);
gateway.processFilesin(inDir);
}
}
ctx.close();
}
#MessagingGateway(defaultRequestChannel="requestChannel")
public interface FileProcessingService {
String processFilesin( String inputDir );
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller() {
return Pollers.fixedDelay(1000).get();
}
#Bean
public MessageChannel requestChannel() {
return new DirectChannel();
}
#ServiceActivator(inputChannel = "requestChannel")
#Bean
GenericHandler<String> fileReader() {
return new GenericHandler<String>() {
#Override
public Object handle(String p, Map<String, Object> map) {
FileReadingMessageSource fileSource = new FileReadingMessageSource();
fileSource.setDirectory(new File(p));
Message<File> msg;
while( (msg = fileSource.receive()) != null ) {
fileInChannel().send(msg);
}
return null; // Not sure what to return!
}
};
}
#Bean
public MessageChannel fileInChannel() {
return MessageChannels.queue("fileIn").get();
}
#Bean
public IntegrationFlow fileProcessingFlow() {
return IntegrationFlows.from(fileInChannel())
.handle(myFileProcessor)
.handle(Files.outboundAdapter(new File(outDir)).autoCreateDirectory(true).get())
.get();
}
}
EDIT: Based on Gary's response replaced some methods as
#MessagingGateway(defaultRequestChannel="requestChannel")
public interface FileProcessingService {
boolean processFilesin( String inputDir );
}
#ServiceActivator(inputChannel = "requestChannel")
public boolean fileReader(String inDir) {
FileReadingMessageSource fileSource = new FileReadingMessageSource();
fileSource.setDirectory(new File(inDir));
fileSource.afterPropertiesSet();
fileSource.start();
Message<File> msg;
while ((msg = fileSource.receive()) != null) {
fileInChannel().send(msg);
}
fileSource.stop();
System.out.println("Sent all files in directory: " + inDir);
return true;
}
Now it is working as expected.

You can use this code
FileProcessor.java
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
#Component
public class FileProcessor {
private static final String HEADER_FILE_NAME = "file_name";
private static final String MSG = "%s received. Content: %s";
public void process(Message<String> msg) {
String fileName = (String) msg.getHeaders().get(HEADER_FILE_NAME);
String content = msg.getPayload();
//System.out.println(String.format(MSG, fileName, content));
System.out.println(content);
}
}
LastModifiedFileFilter.java
package com.example.demo;
import org.springframework.integration.file.filters.AbstractFileListFilter;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
public class LastModifiedFileFilter extends AbstractFileListFilter<File> {
private final Map<String, Long> files = new HashMap<>();
private final Object monitor = new Object();
#Override
protected boolean accept(File file) {
synchronized (this.monitor) {
Long previousModifiedTime = files.put(file.getName(), file.lastModified());
return previousModifiedTime == null || previousModifiedTime != file.lastModified();
}
}
}
Main Class= DemoApplication.java
package com.example.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import org.apache.commons.io.FileUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.Aggregator;
import org.springframework.integration.annotation.InboundChannelAdapter;
import org.springframework.integration.annotation.Poller;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.integration.core.MessageSource;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.channel.MessageChannels;
import org.springframework.integration.dsl.core.Pollers;
import org.springframework.integration.file.FileReadingMessageSource;
import org.springframework.integration.file.filters.CompositeFileListFilter;
import org.springframework.integration.file.filters.SimplePatternFileListFilter;
import org.springframework.integration.file.transformer.FileToStringTransformer;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.PollableChannel;
import org.springframework.stereotype.Component;
#SpringBootApplication
#Configuration
public class DemoApplication {
private static final String DIRECTORY = "E:/usmandata/logs/input/";
public static void main(String[] args) throws IOException, InterruptedException {
SpringApplication.run(DemoApplication.class, args);
}
#Bean
public IntegrationFlow processFileFlow() {
return IntegrationFlows
.from("fileInputChannel")
.transform(fileToStringTransformer())
.handle("fileProcessor", "process").get();
}
#Bean
public MessageChannel fileInputChannel() {
return new DirectChannel();
}
#Bean
#InboundChannelAdapter(value = "fileInputChannel", poller = #Poller(fixedDelay = "1000"))
public MessageSource<File> fileReadingMessageSource() {
CompositeFileListFilter<File> filters =new CompositeFileListFilter<>();
filters.addFilter(new SimplePatternFileListFilter("*.log"));
filters.addFilter(new LastModifiedFileFilter());
FileReadingMessageSource source = new FileReadingMessageSource();
source.setAutoCreateDirectory(true);
source.setDirectory(new File(DIRECTORY));
source.setFilter(filters);
return source;
}
#Bean
public FileToStringTransformer fileToStringTransformer() {
return new FileToStringTransformer();
}
#Bean
public FileProcessor fileProcessor() {
return new FileProcessor();
}
}

The FileReadingMessageSource uses a DirectoryScanner internally; it is normally set up by Spring after the properties are injected. Since you are managing the object outside of Spring, you need to call Spring bean initialization and lifecycle methods afterPropertiesSet() , start() and stop().
Call stop() when the receive returns null.
> return null; // Not sure what to return!
If you return nothing, your calling thread will hang in the gateway waiting for a response. You could change the gateway to return void or, since your gateway is expecting a String, just return some value.
However, your calling code is not looking at the result anyway.
> gateway.processFilesin(inDir);
Also, remove the #Bean from the #ServiceActivator; with that style, the bean type must be MessageHandler.

Related

Infinite Loop with authenticate spring

I'm at a loss. I upgraded an application from Spring Boot 2.1 to 2.6 and from Wicket 8.0 to 9.6. I had two issues with circular-references that I fixed but now I get an infinite loop if I want to start the application with H2 database because of authenticate. And I'm not sure what is happening there.
So this is the part of the StackTrace that keeps repeating. IntelliJ cuts off the beginning, not sure what to do about that:
at com.xyz.ufa.app.TestUFSession.<init>(TestUFSession.java:15)
at jdk.internal.reflect.GeneratedConstructorAccessor102.newInstance(Unknown Source)
at java.base/jdk.internal.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
at java.base/java.lang.reflect.Constructor.newInstance(Constructor.java:490)
at org.apache.wicket.authroles.authentication.AuthenticatedWebApplication.newSession(AuthenticatedWebApplication.java:108)
at org.apache.wicket.Application.fetchCreateAndSetSession(Application.java:1527)
at org.apache.wicket.Session.get(Session.java:194)
at org.apache.wicket.protocol.http.WebSession.get(WebSession.java:41)
at com.giffing.wicket.spring.boot.starter.configuration.extensions.external.spring.security.SecureWebSession.authenticate(SecureWebSession.java:48)
at org.apache.wicket.authroles.authentication.AuthenticatedWebSession.signIn(AuthenticatedWebSession.java:66)
at com.xyz.ufa.app.TestUFSession.<init>(TestUFSession.java:15)
Here is the TestUFSession.class
import com.xyz.ufa.frontend.config.UFSession;
import org.apache.wicket.request.Request;
/**
* Helper session to login autmatically.
*
*/
public class TestUFSession extends UFSession {
public TestUFSession(Request request) {
super(request);
signIn("admin", "admin"); // this calls authenticate
}
}
And here the UFSession class
import com.giffing.wicket.spring.boot.starter.configuration.extensions.external.spring.security.SecureWebSession;
import java.util.Locale;
import lombok.Getter;
import org.apache.wicket.request.Request;
#Getter
public class UFSession extends SecureWebSession {
private String username;
private Locale locale;
public UFSession(Request request) {
super(request);
locale = request.getLocale();
}
#Override
public void signOut() {
username = null;
super.signOut();
}
}
And here the WebSecurityConfiguration class
import com.xyz.uf.common.ApplicationProfile;
import java.util.Optional;
import lombok.extern.slf4j.Slf4j;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Import;
import org.springframework.context.annotation.Profile;
import org.springframework.ldap.core.support.LdapContextSource;
import org.springframework.security.authentication.AuthenticationManager;
import org.springframework.security.authentication.AuthenticationProvider;
import org.springframework.security.authentication.AuthenticationServiceException;
import org.springframework.security.config.annotation.authentication.builders.AuthenticationManagerBuilder;
import org.springframework.security.config.annotation.authentication.configurers.userdetails.DaoAuthenticationConfigurer;
import org.springframework.security.config.annotation.web.builders.HttpSecurity;
import org.springframework.security.config.annotation.web.configuration.WebSecurityConfigurerAdapter;
import org.springframework.security.core.authority.mapping.GrantedAuthoritiesMapper;
import org.springframework.security.crypto.password.NoOpPasswordEncoder;
import org.springframework.security.crypto.password.PasswordEncoder;
import org.springframework.security.ldap.authentication.AbstractLdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.LdapAuthenticationProvider;
import org.springframework.security.ldap.authentication.PasswordComparisonAuthenticator;
import org.springframework.security.ldap.authentication.ad.ActiveDirectoryLdapAuthenticationProvider;
import org.springframework.security.ldap.search.FilterBasedLdapUserSearch;
import org.springframework.security.ldap.search.LdapUserSearch;
import org.springframework.security.ldap.userdetails.DefaultLdapAuthoritiesPopulator;
import org.springframework.security.ldap.userdetails.LdapAuthoritiesPopulator;
import org.springframework.security.ldap.userdetails.LdapUserDetailsService;
#Slf4j
#Configuration
#Import(LdapProperties.class)
#ComponentScan(basePackages = {"com.xyz.ufa.security.userinfo"})
public class WebSecurityConfiguration extends WebSecurityConfigurerAdapter {
private final String AUTHENTICATION_PROVIDER_BEAN_NAME = "authenticationProvider";
#Autowired
private LdapProperties ldapProperties;
#Bean(name = "authenticationManager")
#Override
public AuthenticationManager authenticationManagerBean() throws AuthenticationServiceException {
try {
return super.authenticationManagerBean();
} catch (Exception e) {
log.error("Error in authenticationManagerBean", e);
throw new AuthenticationServiceException(e.getMessage(), e);
}
}
#Override
protected void configure(HttpSecurity httpSecurity) throws AuthenticationServiceException {
try {
httpSecurity
.csrf().disable()
.authorizeRequests()
.antMatchers("/*").permitAll()
.antMatchers("/restservice/**").hasAuthority(UFARole.TECHNICAL_ADMIN)
.and().httpBasic()
.and().logout().permitAll();
httpSecurity.headers().frameOptions().disable();
} catch (Exception e) {
throw new AuthenticationServiceException(String.format("Could not configure %s with csrf disabled and matching Pattern /*.", httpSecurity), e);
}
}
#Override
public void configure(AuthenticationManagerBuilder authenticationManagerBuilder) throws Exception {
try {
AbstractLdapAuthenticationProvider authentProvider = (AbstractLdapAuthenticationProvider) getApplicationContext().getBean(AUTHENTICATION_PROVIDER_BEAN_NAME);
authentProvider.setAuthoritiesMapper(authoritiesMapper());
DaoAuthenticationConfigurer<AuthenticationManagerBuilder, LdapUserDetailsService> configurer = authenticationManagerBuilder
.authenticationProvider(authentProvider)
.userDetailsService(ldapUserDetailsService());
passwordEncoder.ifPresent(configurer::passwordEncoder);
} catch (Exception e) {
throw new AuthenticationServiceException("Could not configure authentication manager ", e);
}
}
#Bean
public LdapUserDetailsService ldapUserDetailsService() {
LdapUserDetailsService userDetailsService = new LdapUserDetailsService(userSearch(), ldapAuthoritiesPopulator());
return userDetailsService;
}
#Bean
public LdapUserSearch userSearch() {
return new FilterBasedLdapUserSearch(ldapProperties.getUserSearchBase(), ldapProperties.getUserSearchFilter(), contextSource());
}
#Bean
public GrantedAuthoritiesMapper authoritiesMapper() {
return new GrantAuthoritiesMapperWithEnvTag(ldapProperties.getEnv());
}
#Bean
public LdapContextSource contextSource() {
LdapContextSource ldapContextSource = new LdapContextSource();
ldapContextSource.setUrl(ldapProperties.getServerUrl());
ldapContextSource.setAnonymousReadOnly(true);
return ldapContextSource;
}
#Bean
public LdapAuthoritiesPopulator ldapAuthoritiesPopulator() {
DefaultLdapAuthoritiesPopulator ldapAuthoritiesPopulator = new DefaultLdapAuthoritiesPopulator(contextSource(), ldapProperties.getGroupSearchBase());
ldapAuthoritiesPopulator.setGroupSearchFilter(ldapProperties.getGroupSearchFilter());
ldapAuthoritiesPopulator.setGroupRoleAttribute(ldapProperties.getGroupRoleAttribute());
ldapAuthoritiesPopulator.setRolePrefix("");
ldapAuthoritiesPopulator.setConvertToUpperCase(false);
return ldapAuthoritiesPopulator;
}
#Bean(name = AUTHENTICATION_PROVIDER_BEAN_NAME)
#Profile(value = { ApplicationProfile.Values.TEST, ApplicationProfile.Values.PROD })
public AuthenticationProvider activeDirectory() {
ActiveDirectoryLdapAuthenticationProvider authenticationProvider = new ActiveDirectoryLdapAuthenticationProvider("HRE.LOC", ldapProperties.getServerUrl());
authenticationProvider.setSearchFilter(ldapProperties.getUserSearchFilter());
return authenticationProvider;
}
#Bean(name = AUTHENTICATION_PROVIDER_BEAN_NAME)
#Profile(value = { ApplicationProfile.Values.DEFAULT, ApplicationProfile.Values.DEV })
public AuthenticationProvider defaultAuthenticationProvider() {
PasswordComparisonAuthenticator authenticator = new PasswordComparisonAuthenticator(contextSource());
authenticator.setPasswordAttributeName("userPassword");
passwordEncoder.ifPresent(authenticator::setPasswordEncoder);
authenticator.setUserSearch(userSearch());
LdapAuthenticationProvider authenticationProvider = new LdapAuthenticationProvider(authenticator, ldapAuthoritiesPopulator());
return authenticationProvider;
}
/**
* This bean is optional and not available for some profiles. Password encoder is only required for embedded LDAP, for productive Active Directory it is not used
*/
#Bean("passwordEncoder")
#Profile(value = { ApplicationProfile.Values.DEFAULT, ApplicationProfile.Values.DEV })
public static PasswordEncoder passwordEncoder() {
return NoOpPasswordEncoder.getInstance();
}
#Autowired(required = false)
#Qualifier("passwordEncoder")
private Optional<PasswordEncoder> passwordEncoder;
}
Profile used for test is default.
Anyone any ideas?
Edit: So Martin helped me to understand the problem, but I'm too dumb to fix it. TestUFApplication is instantiated here via ReflectionTestUtils
ยดยดยดยด
#SpringBootApplication
public class TestApplicationWithH2Database {
#Autowired
private UFyWicketWebApplication webApplication;
#Value("${test.autologin.active}")
private boolean testAutologinActive;
public static void main(String[] args) {
new SpringApplicationBuilder()
.sources(Application.class)
.run(args);
}
#PostConstruct
public void PostConstruct() {
if (testAutologinActive) {
ReflectionTestUtils.setField(webApplication, "sessionClass", TestUFSession.class);
}
}
}
I tried to make signIn() a method in TestUFSession and call it like that
TestUFSession testUFSession = (TestUFSession) ReflectionTestUtils.getField(webApplication, "sessionClass");
testUFSession.signIn();
but got a ClassCastException
You will need to call signIn("admin", "admin"); after instantiating the TestUFSession.
From the stacktrace we can see that com.giffing.wicket.spring.boot.starter.configuration.extensions.external.spring.security.SecureWebSession.authenticate(SecureWebSession.java:48) tries to lookup the WebSession via its static #get() method and that leads to the infinite loop.

Error with StreamIdentifier when using MultiStreamTracker in kinesis

I'm getting an error with StreamIdentifier when trying to use MultiStreamTracker in a kinesis consumer application.
java.lang.IllegalArgumentException: Unable to deserialize StreamIdentifier from first-stream-name
What is causing this error? I can't find a good example of using the tracker with kinesis.
The stream name works when using a consumer with a single stream so I'm not sure what is happening. It looks like the consumer is trying to parse the accountId and streamCreationEpoch. But when I create the identifiers I am using the singleStreamInstance method. Is the stream name required to have these values? They appear to be optional from the code.
This test is part of a complete example on github.
package kinesis.localstack.example;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.amazonaws.services.kinesis.producer.KinesisProducer;
import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.localstack.LocalStackContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.utility.DockerImageName;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.cloudwatch.CloudWatchAsyncClient;
import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient;
import software.amazon.awssdk.services.kinesis.KinesisAsyncClient;
import software.amazon.kinesis.common.ConfigsBuilder;
import software.amazon.kinesis.common.InitialPositionInStream;
import software.amazon.kinesis.common.InitialPositionInStreamExtended;
import software.amazon.kinesis.common.KinesisClientUtil;
import software.amazon.kinesis.common.StreamConfig;
import software.amazon.kinesis.common.StreamIdentifier;
import software.amazon.kinesis.coordinator.Scheduler;
import software.amazon.kinesis.exceptions.InvalidStateException;
import software.amazon.kinesis.exceptions.ShutdownException;
import software.amazon.kinesis.lifecycle.events.InitializationInput;
import software.amazon.kinesis.lifecycle.events.LeaseLostInput;
import software.amazon.kinesis.lifecycle.events.ProcessRecordsInput;
import software.amazon.kinesis.lifecycle.events.ShardEndedInput;
import software.amazon.kinesis.lifecycle.events.ShutdownRequestedInput;
import software.amazon.kinesis.processor.FormerStreamsLeasesDeletionStrategy;
import software.amazon.kinesis.processor.FormerStreamsLeasesDeletionStrategy.NoLeaseDeletionStrategy;
import software.amazon.kinesis.processor.MultiStreamTracker;
import software.amazon.kinesis.processor.ShardRecordProcessor;
import software.amazon.kinesis.processor.ShardRecordProcessorFactory;
import software.amazon.kinesis.retrieval.KinesisClientRecord;
import software.amazon.kinesis.retrieval.polling.PollingConfig;
import static java.util.stream.Collectors.toList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.CLOUDWATCH;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.DYNAMODB;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.KINESIS;
import static software.amazon.kinesis.common.InitialPositionInStream.TRIM_HORIZON;
import static software.amazon.kinesis.common.StreamIdentifier.singleStreamInstance;
#Testcontainers
public class KinesisMultiStreamTest {
static class TestProcessorFactory implements ShardRecordProcessorFactory {
private final TestKinesisRecordService service;
public TestProcessorFactory(TestKinesisRecordService service) {
this.service = service;
}
#Override
public ShardRecordProcessor shardRecordProcessor() {
throw new UnsupportedOperationException("must have streamIdentifier");
}
public ShardRecordProcessor shardRecordProcessor(StreamIdentifier streamIdentifier) {
return new TestRecordProcessor(service, streamIdentifier);
}
}
static class TestRecordProcessor implements ShardRecordProcessor {
public final TestKinesisRecordService service;
public final StreamIdentifier streamIdentifier;
public TestRecordProcessor(TestKinesisRecordService service, StreamIdentifier streamIdentifier) {
this.service = service;
this.streamIdentifier = streamIdentifier;
}
#Override
public void initialize(InitializationInput initializationInput) {
}
#Override
public void processRecords(ProcessRecordsInput processRecordsInput) {
service.addRecord(streamIdentifier, processRecordsInput);
}
#Override
public void leaseLost(LeaseLostInput leaseLostInput) {
}
#Override
public void shardEnded(ShardEndedInput shardEndedInput) {
try {
shardEndedInput.checkpointer().checkpoint();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
#Override
public void shutdownRequested(ShutdownRequestedInput shutdownRequestedInput) {
}
}
static class TestKinesisRecordService {
private List<ProcessRecordsInput> firstStreamRecords = Collections.synchronizedList(new ArrayList<>());
private List<ProcessRecordsInput> secondStreamRecords = Collections.synchronizedList(new ArrayList<>());
public void addRecord(StreamIdentifier streamIdentifier, ProcessRecordsInput processRecordsInput) {
if(streamIdentifier.streamName().contains(firstStreamName)) {
firstStreamRecords.add(processRecordsInput);
} else if(streamIdentifier.streamName().contains(secondStreamName)) {
secondStreamRecords.add(processRecordsInput);
} else {
throw new IllegalStateException("no list for stream " + streamIdentifier);
}
}
public List<ProcessRecordsInput> getFirstStreamRecords() {
return Collections.unmodifiableList(firstStreamRecords);
}
public List<ProcessRecordsInput> getSecondStreamRecords() {
return Collections.unmodifiableList(secondStreamRecords);
}
}
public static final String firstStreamName = "first-stream-name";
public static final String secondStreamName = "second-stream-name";
public static final String partitionKey = "partition-key";
DockerImageName localstackImage = DockerImageName.parse("localstack/localstack:latest");
#Container
public LocalStackContainer localstack = new LocalStackContainer(localstackImage)
.withServices(KINESIS, CLOUDWATCH)
.withEnv("KINESIS_INITIALIZE_STREAMS", firstStreamName + ":1," + secondStreamName + ":1");
public Scheduler scheduler;
public TestKinesisRecordService service = new TestKinesisRecordService();
public KinesisProducer producer;
#BeforeEach
void setup() {
KinesisAsyncClient kinesisClient = KinesisClientUtil.createKinesisAsyncClient(
KinesisAsyncClient.builder().endpointOverride(localstack.getEndpointOverride(KINESIS)).region(Region.of(localstack.getRegion()))
);
DynamoDbAsyncClient dynamoClient = DynamoDbAsyncClient.builder().region(Region.of(localstack.getRegion())).endpointOverride(localstack.getEndpointOverride(DYNAMODB)).build();
CloudWatchAsyncClient cloudWatchClient = CloudWatchAsyncClient.builder().region(Region.of(localstack.getRegion())).endpointOverride(localstack.getEndpointOverride(CLOUDWATCH)).build();
MultiStreamTracker tracker = new MultiStreamTracker() {
private List<StreamConfig> configs = List.of(
new StreamConfig(singleStreamInstance(firstStreamName), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)),
new StreamConfig(singleStreamInstance(secondStreamName), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)));
#Override
public List<StreamConfig> streamConfigList() {
return configs;
}
#Override
public FormerStreamsLeasesDeletionStrategy formerStreamsLeasesDeletionStrategy() {
return new NoLeaseDeletionStrategy();
}
};
ConfigsBuilder configsBuilder = new ConfigsBuilder(tracker, "KinesisPratTest", kinesisClient, dynamoClient, cloudWatchClient, UUID.randomUUID().toString(), new TestProcessorFactory(service));
scheduler = new Scheduler(
configsBuilder.checkpointConfig(),
configsBuilder.coordinatorConfig(),
configsBuilder.leaseManagementConfig(),
configsBuilder.lifecycleConfig(),
configsBuilder.metricsConfig(),
configsBuilder.processorConfig().callProcessRecordsEvenForEmptyRecordList(false),
configsBuilder.retrievalConfig()
);
new Thread(scheduler).start();
producer = producer();
}
#AfterEach
public void teardown() throws ExecutionException, InterruptedException, TimeoutException {
producer.destroy();
Future<Boolean> gracefulShutdownFuture = scheduler.startGracefulShutdown();
gracefulShutdownFuture.get(60, TimeUnit.SECONDS);
}
public KinesisProducer producer() {
var configuration = new KinesisProducerConfiguration()
.setVerifyCertificate(false)
.setCredentialsProvider(localstack.getDefaultCredentialsProvider())
.setMetricsCredentialsProvider(localstack.getDefaultCredentialsProvider())
.setRegion(localstack.getRegion())
.setCloudwatchEndpoint(localstack.getEndpointOverride(CLOUDWATCH).getHost())
.setCloudwatchPort(localstack.getEndpointOverride(CLOUDWATCH).getPort())
.setKinesisEndpoint(localstack.getEndpointOverride(KINESIS).getHost())
.setKinesisPort(localstack.getEndpointOverride(KINESIS).getPort());
return new KinesisProducer(configuration);
}
#Test
void testFirstStream() {
String expected = "Hello";
producer.addUserRecord(firstStreamName, partitionKey, ByteBuffer.wrap(expected.getBytes(StandardCharsets.UTF_8)));
var result = await().timeout(600, TimeUnit.SECONDS)
.until(() -> service.getFirstStreamRecords().stream()
.flatMap(r -> r.records().stream())
.map(KinesisClientRecord::data)
.map(r -> StandardCharsets.UTF_8.decode(r).toString())
.collect(toList()), records -> records.size() > 0);
assertThat(result).anyMatch(r -> r.equals(expected));
}
#Test
void testSecondStream() {
String expected = "Hello";
producer.addUserRecord(secondStreamName, partitionKey, ByteBuffer.wrap(expected.getBytes(StandardCharsets.UTF_8)));
var result = await().timeout(600, TimeUnit.SECONDS)
.until(() -> service.getSecondStreamRecords().stream()
.flatMap(r -> r.records().stream())
.map(KinesisClientRecord::data)
.map(r -> StandardCharsets.UTF_8.decode(r).toString())
.collect(toList()), records -> records.size() > 0);
assertThat(result).anyMatch(r -> r.equals(expected));
}
}
Here is the error I am getting.
[Thread-9] ERROR software.amazon.kinesis.coordinator.Scheduler - Worker.run caught exception, sleeping for 1000 milli seconds!
java.lang.IllegalArgumentException: Unable to deserialize StreamIdentifier from first-stream-name
at software.amazon.kinesis.common.StreamIdentifier.multiStreamInstance(StreamIdentifier.java:75)
at software.amazon.kinesis.coordinator.Scheduler.getStreamIdentifier(Scheduler.java:1001)
at software.amazon.kinesis.coordinator.Scheduler.buildConsumer(Scheduler.java:917)
at software.amazon.kinesis.coordinator.Scheduler.createOrGetShardConsumer(Scheduler.java:899)
at software.amazon.kinesis.coordinator.Scheduler.runProcessLoop(Scheduler.java:419)
at software.amazon.kinesis.coordinator.Scheduler.run(Scheduler.java:330)
at java.base/java.lang.Thread.run(Thread.java:829)
According to documentation:
The serialized stream identifier should be of the following format: account-id:StreamName:streamCreationTimestamp
So your code should be like this:
private List<StreamConfig> configs = List.of(
new StreamConfig(multiStreamInstance("111111111:multiStreamTest-1:12345"), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)),
new StreamConfig(multiStreamInstance("111111111:multiStreamTest-2:12389"), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)));
Note: this also will change leaseKey format to account-id:StreamName:streamCreationTimestamp:ShardId

Spring-Batch: Testing custom itemReader

I am trying to test my custom itemReader :
#Bean
#StepScope
MyMultiLineItemReader itemReader(#Value("#{stepExecutionContext['fileName']}") String filename) throws MalformedURLException {
MyMultiLineItemReader itemReader = new MyMultiLineItemReader();
itemReader.setDelegate(myFlatFileItemReader(filename));
return itemReader;
}
#Bean
#StepScope
public FlatFileItemReader<String> myFlatFileItemReader(#Value("#{stepExecutionContext['fileName']}") String filename) throws MalformedURLException {
return new FlatFileItemReaderBuilder<String>()
.name("myFlatFileItemReader")
.resource(new UrlResource(filename))
.lineMapper(new PassThroughLineMapper())
.build();
}
my test class looks like
#Test
public void givenMockedStep_whenReaderCalled_thenSuccess() throws Exception {
// given
JobExecution jobExecution = new JobExecution(5l);
ExecutionContext ctx = new ExecutionContext();
ctx.put("fileName", "src/main/resources/data/input.txt");
jobExecution.setExecutionContext(ctx);
JobSynchronizationManager.register(jobExecution);
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(ctx);
// when
StepScopeTestUtils.doInStepScope(stepExecution, () -> {
...
});
}
When I run the test case, the process fails because the fileName parameter is null.
I am looking for the right way of testing the itemReader.
Thanks
You don't need to create a JobExecution and register it in a JobSynchronizationManager to test a step-scoped component. Mocking a step execution and using it in StepScopeTestUtils.doInStepScope is enough. Here is a complete example:
import java.util.ArrayList;
import java.util.List;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ExecutionContext;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.builder.FlatFileItemReaderBuilder;
import org.springframework.batch.item.file.mapping.PassThroughLineMapper;
import org.springframework.batch.test.MetaDataInstanceFactory;
import org.springframework.batch.test.StepScopeTestUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit4.SpringRunner;
#RunWith(SpringRunner.class)
#ContextConfiguration(classes = StepScopedComponentTest.MyConfiguration.class)
public class StepScopedComponentTest {
#Autowired
private FlatFileItemReader<String> reader;
#Test
public void givenMockedStep_whenReaderCalled_thenSuccess() throws Exception {
// given
ExecutionContext ctx = new ExecutionContext();
ctx.put("fileName", "data/input.txt");
StepExecution stepExecution = MetaDataInstanceFactory.createStepExecution(ctx);
// when
List<String> items = StepScopeTestUtils.doInStepScope(stepExecution, () -> {
List<String> result = new ArrayList<>();
String item;
reader.open(stepExecution.getExecutionContext());
while ((item = reader.read()) != null) {
result.add(item);
}
reader.close();
return result;
});
// then
Assert.assertEquals(2, items.size());
Assert.assertEquals("foo", items.get(0));
Assert.assertEquals("bar", items.get(1));
}
#Configuration
#EnableBatchProcessing
static class MyConfiguration {
#Bean
#StepScope
public FlatFileItemReader<String> myFlatFileItemReader(#Value("#{stepExecutionContext['fileName']}") String filename) {
return new FlatFileItemReaderBuilder<String>()
.name("myFlatFileItemReader")
.resource(new ClassPathResource(filename))
.lineMapper(new PassThroughLineMapper())
.build();
}
}
}
This test passes assuming the class path resource data/input.txt contains two lines foo and bar.

Spring Integration Java DSL SFTP how to get remote SFTP server information in handler

I am trying to download files from multiple SFTP servers then handle those files. But I can not get the information of remote SFTP server such as: IpAddress, remoteDirectory depending on which file MessageHandler handling. Instead Payload only contains the information of the dowloaded files at local. Here the source code I use from the guide:
How to dynamically define file filter pattern for Spring Integration SFTP Inbound Adapter?
SFTIntegration.java
import com.jcraft.jsch.ChannelSftp.LsEntry;
import java.io.File;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Consumer;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.NullChannel;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.Pollers;
import org.springframework.integration.dsl.SourcePollingChannelAdapterSpec;
import org.springframework.integration.expression.FunctionExpression;
import org.springframework.integration.file.remote.aop.RotatingServerAdvice;
import org.springframework.integration.file.remote.session.DelegatingSessionFactory;
import org.springframework.integration.file.remote.session.SessionFactory;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.integration.sftp.dsl.Sftp;
import org.springframework.integration.sftp.dsl.SftpInboundChannelAdapterSpec;
import org.springframework.integration.sftp.session.DefaultSftpSessionFactory;
import org.springframework.messaging.MessageChannel;
import org.springframework.stereotype.Component;
/**
* flow.
*/
#Configuration
#Component
public class SFTIntegration {
public static final String TIMEZONE_UTC = "UTC";
public static final String TIMESTAMP_FORMAT_OF_FILES = "yyyyMMddHHmmssSSS";
public static final String TEMPORARY_FILE_SUFFIX = ".part";
public static final int POLLER_FIXED_PERIOD_DELAY = 60000;
public static final int MAX_MESSAGES_PER_POLL = 100;
private static final Logger LOG = LoggerFactory.getLogger(SFTIntegration.class);
private static final String CHANNEL_INTERMEDIATE_STAGE = "intermediateChannel";
#Autowired
private ImportHandler importHandler;
/** database access repository */
private final SFTPServerConfigRepo SFTPServerConfigRepo;
#Value("${sftp.local.directory.download:${java.io.tmpdir}/localDownload}")
private String localTemporaryPath;
public SFTIntegration(final SFTPServerConfigRepo SFTPServerConfigRepo) {
this.SFTPServerConfigRepo = SFTPServerConfigRepo;
}
/**
* The default poller with 5s, 100 messages, RotatingServerAdvice and transaction.
*
* #return default poller.
*/
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller() {
return Pollers
.fixedDelay(POLLER_FIXED_PERIOD_DELAY)
.advice(advice())
.maxMessagesPerPoll(MAX_MESSAGES_PER_POLL)
.transactional()
.get();
}
/**
* The direct channel for the flow.
*
* #return MessageChannel
*/
#Bean
public MessageChannel stockIntermediateChannel() {
return new DirectChannel();
}
/**
* Get the files from a remote directory. Add a timestamp to the filename
* and write them to a local temporary folder.
*
* #return IntegrationFlow
*/
#Bean
public IntegrationFlow collectionInboundFlowFromSFTPServer() {
// Source definition
final SftpInboundChannelAdapterSpec sourceSpec = Sftp.inboundAdapter(delegatingSFtpSessionFactory())
.preserveTimestamp(true)
.patternFilter("*.*")
.deleteRemoteFiles(true)
.maxFetchSize(MAX_MESSAGES_PER_POLL)
.remoteDirectory("/")
.localDirectory(new File(localTemporaryPath))
.temporaryFileSuffix(TEMPORARY_FILE_SUFFIX)
.localFilenameExpression(new FunctionExpression<String>(s -> {
final int fileTypeSepPos = s.lastIndexOf('.');
return
DateTimeFormatter
.ofPattern(TIMESTAMP_FORMAT_OF_FILES)
.withZone(ZoneId.of(TIMEZONE_UTC))
.format(Instant.now())
+ "_"
+ s.substring(0, fileTypeSepPos)
+ s.substring(fileTypeSepPos);
}));
// Poller definition
final Consumer<SourcePollingChannelAdapterSpec> collectionInboundPoller = endpointConfigurer -> endpointConfigurer
.id("collectionInboundPoller")
.autoStartup(true)
.poller(poller());
return IntegrationFlows
.from(sourceSpec, collectionInboundPoller)
.transform(File.class, p -> {
// log step
LOG.info("flow=collectionInboundFlowFromSFTPServer, message=incoming file: " + p);
return p;
})
.channel(CHANNEL_INTERMEDIATE_STAGE)
.get();
}
#Bean
public IntegrationFlow collectionIntermediateStageChannel() {
return IntegrationFlows
.from(CHANNEL_INTERMEDIATE_STAGE)
.handle(importHandler)
.channel(new NullChannel())
.get();
}
public DefaultSftpSessionFactory createNewSftpSessionFactory(final SFTPServerConfig pc) {
final DefaultSftpSessionFactory factory = new DefaultSftpSessionFactory(
false);
factory.setHost(pc.getServerIp());
factory.setPort(pc.getPort());
factory.setUser(pc.getUsername());
factory.setPassword(pc.getPassword());
factory.setAllowUnknownKeys(true);
return factory;
}
#Bean
public DelegatingSessionFactory<LsEntry> delegatingSFtpSessionFactory() {
final List<SFTPServerConfig> partnerConnections = SFTPServerConfigRepo.findAll();
if (partnerConnections.isEmpty()) {
return null;
}
final Map<Object, SessionFactory<LsEntry>> factories = new LinkedHashMap<>(10);
for (SFTPServerConfig pc : partnerConnections) {
// create a factory for every key containing server type, url and port
if (factories.get(pc.getKey()) == null) {
factories.put(pc.getKey(), createNewSftpSessionFactory(pc));
}
}
// use the first SF as the default
return new DelegatingSessionFactory<>(factories, factories.values().iterator().next());
}
#Bean
public RotatingServerAdvice advice() {
final List<SFTPServerConfig> sftpConnections = SFTPServerConfigRepo.findAll();
final List<RotatingServerAdvice.KeyDirectory> keyDirectories = new ArrayList<>();
for (SFTPServerConfig pc : sftpConnections) {
keyDirectories
.add(new RotatingServerAdvice.KeyDirectory(pc.getKey(), pc.getServerPath()));
}
return new RotatingServerAdvice(delegatingSFtpSessionFactory(), keyDirectories, true);
}
}
ImportHandler.java
import org.springframework.messaging.Message;
import org.springframework.stereotype.Service;
#Service
public class ImportHandler {
public void handle(Message<?> message) {
System.out.println("Hello " + message);
System.out.println(message.getPayload());
System.out.println(message.getHeaders());
//How can I get the information of remote server Ip address, remoteDirectory here where the file comes from
}
}
If you have any ideas, please let me know. Thank you so much!.
It's not currently supported; please open a new feature request.

Spring TCP support High CPU utilisation

I'm using spring TCP support to create a TCP server.
I noticed that the CPU is running at 91% when I sent only one request.
This is my code
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.IntegrationComponentScan;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.ip.tcp.TcpInboundGateway;
import org.springframework.integration.ip.tcp.connection.AbstractServerConnectionFactory;
import org.springframework.integration.ip.tcp.connection.MessageConvertingTcpMessageMapper;
import org.springframework.integration.ip.tcp.connection.TcpNetServerConnectionFactory;
import org.springframework.integration.ip.tcp.serializer.AbstractByteArraySerializer;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.converter.ByteArrayMessageConverter;
import org.springframework.messaging.converter.MessageConverter;
#Configuration
#IntegrationComponentScan
public class TCPServerConfig {
#Value("${tcp.listener.port}")
private int port;
#Bean
public MessageConvertingTcpMessageMapper mapper(final MessageConverter messageConverter) {
return new MessageConvertingTcpMessageMapper(messageConverter);
}
#Bean
public MessageConverter messageConverter() {
return new ByteArrayMessageConverter();
}
#Bean
public TcpInboundGateway tcpInGate(final AbstractServerConnectionFactory connectionFactory) {
final TcpInboundGateway inGate = new TcpInboundGateway();
inGate.setConnectionFactory(connectionFactory);
inGate.setRequestChannel(fromTcp());
return inGate;
}
#Bean
public MessageChannel fromTcp() {
return new DirectChannel();
}
#Bean
public AbstractServerConnectionFactory serverCF(
final AbstractByteArraySerializer byteArraySerializer) {
final TcpNetServerConnectionFactory connectionFactory =
new TcpNetServerConnectionFactory(this.port);
connectionFactory.setDeserializer(byteArraySerializer);
connectionFactory.setSerializer(byteArraySerializer);
return connectionFactory;
}
#Bean
public AbstractByteArraySerializer byteArraySerializer() {
return new ByteArrayCustomeSerializer();
}
}
and this is the how I convert and log the messages
import static org.apache.commons.lang3.StringUtils.EMPTY;
import static org.apache.commons.lang3.StringUtils.isBlank;
import java.nio.charset.Charset;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.ArrayUtils;
import org.springframework.integration.annotation.MessageEndpoint;
import org.springframework.integration.annotation.ServiceActivator;
import org.springframework.integration.annotation.Transformer;
#Slf4j
#MessageEndpoint
class TCPMessageProcessor {
#Transformer(inputChannel = "fromTcp", outputChannel = "toProcess")
public String convertInput(final byte[] bytes) {
if(ArrayUtils.isEmpty(bytes)){
return EMPTY;
}
String inboundMessage = new String(bytes, Charset.forName("ASCII"));
log.info("Converted the message to string: '{}'. Handing it processor", inboundMessage);
return inboundMessage;
}
#ServiceActivator(inputChannel = "toProcess")
public String process(final String message) {
if(isBlank(message)){
return EMPTY;
}
log.info("Started processing message '{}'", message);
return "some response";
}
}
I add logging and found that once the application receives the first request it serves it correctly then calls ByteArrayCustomeSerializer#doDeserialize multiple times every second with input stream of empty byte array. Can anyone please provide insight on why is this happening and how one might avoid this behavior?
This is from org.springframework.integration.ip.tcp.connection.TcpNetConnection
"threadId":"pool-1-thread-3", "message":"Message received GenericMessage [payload=byte[0], headers={ip_tcp_remotePort=*****, ip_connectionId=localhost:*****:*****:d313c398-fc80-48dd-b9c1-f447c9172f09, ip_localInetAddress=/127.0.0.1, ip_address=127.0.0.1, id=1fd69791-c300-787b-c5cc-281a360ae8f4, ip_hostname=localhost, timestamp=1521623822108}]"
It's most likely a bug in your ByteArrayCustomeSerializer - edit the question to show the code.
Most likely you are not detecting the end of stream.
If the stream closes between messages, you need to throw a SoftEndOfStreamException to signal that the socket has closed in an "expected" fashion. If the stream closes during message deserialization, throw some other exception.

Categories