spring-kafka how to run consumer in background - java

I found a tutorial on spring-kafka where they created a producer and consumer. However, the program was run through a test case. As the test case ends, the consumer stops.
How to ensure the consumer keeps running in the backgorund so that I can test some messages from my terminal command line.
SpringKafkaExampleApplication.java
package com.howtoprogram.kafka;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
#SpringBootApplication
public class SpringKafkaExampleApplication {
public static void main(String[] args) {
SpringApplication.run(SpringKafkaExampleApplication.class,
args);
}
}
KafkaProducerConfig.java
package com.howtoprogram.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
#Configuration
#EnableKafka
public class KafkaProducerConfig {
#Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
#Bean
public Map<String, Object> producerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG,
"localhost:9092");
props.put(ProducerConfig.RETRIES_CONFIG, 0);
props.put(ProducerConfig.BATCH_SIZE_CONFIG, 16384);
props.put(ProducerConfig.LINGER_MS_CONFIG, 1);
props.put(ProducerConfig.BUFFER_MEMORY_CONFIG, 33554432);
props.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
props.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
StringSerializer.class);
return props;
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<String, String>(producerFactory());
}
}
KafkaConsumerConfig.java
package com.howtoprogram.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
#Configuration
#EnableKafka
public class KafkaConsumerConfig {
#Bean
KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
factory.setConcurrency(3);
factory.getContainerProperties().setPollTimeout(3000);
return factory;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
#Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> propsMap = new HashMap<>();
propsMap.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
propsMap.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, false);
propsMap.put(ConsumerConfig.AUTO_COMMIT_INTERVAL_MS_CONFIG, "100");
propsMap.put(ConsumerConfig.SESSION_TIMEOUT_MS_CONFIG, "15000");
propsMap.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
propsMap.put(ConsumerConfig.GROUP_ID_CONFIG, "group1");
propsMap.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "earliest");
return propsMap;
}
#Bean
public Listener listener() {
return new Listener();
}
}
Listener.java
package com.howtoprogram.kafka;
import java.util.concurrent.CountDownLatch;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.springframework.kafka.annotation.KafkaListener;
public class Listener {
public final CountDownLatch countDownLatch1 = new CountDownLatch(1);
#KafkaListener(id = "foo", topics = "topic1", group = "group1")
public void listen(ConsumerRecord<?, ?> record) {
System.out.println(record);
countDownLatch1.countDown();
}
}
SpringKafkaExampleApplicationTests.java
package com.howtoprogram.kafka;
import static org.assertj.core.api.Assertions.assertThat;
import java.util.concurrent.TimeUnit;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.support.SendResult;
import org.springframework.kafka.test.rule.KafkaEmbedded;
import org.springframework.test.context.junit4.SpringRunner;
import org.springframework.util.concurrent.ListenableFuture;
import org.springframework.util.concurrent.ListenableFutureCallback;
#RunWith(SpringRunner.class)
#SpringBootTest
public class SpringKafkaExampleApplicationTests {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
#Autowired
private Listener listener;
#Test
public void contextLoads() throws InterruptedException {
ListenableFuture<SendResult<String, String>> future = kafkaTemplate.send("topic1", "ABC");
future.addCallback(new ListenableFutureCallback<SendResult<String, String>>() {
#Override
public void onSuccess(SendResult<String, String> result) {
System.out.println("success");
}
#Override
public void onFailure(Throwable ex) {
System.out.println("failed");
}
});
System.out.println(Thread.currentThread().getId());
assertThat(this.listener.countDownLatch1.await(60, TimeUnit.SECONDS)).isTrue();
}
}
Please help!

We run our Kafka application in a while(true) loop with an #Scheduled on a Spring Bean : https://docs.spring.io/spring/docs/current/spring-framework-reference/html/scheduling.html
This way you can also delay the consumption of messages when the rest of your application is initialising.
#Scheduled(initialDelay = 5000L, fixedDelay = 10000L)
public void process() {
while (keepRunning) {
try {
ConsumerRecords<String, String> records = consumer.poll(500);
// do processing here
}
}
}
The fixedDelay is a bit strange. This value needs to be available, but is effectively ignored.
It might be tempting to start the consumer in the #PostConstruct but this way Spring keeps thinking the bean is in the init-phase. (so don't do this as Artem Bilan mentions below)

Add this code to your main after SpringApplication.run():
System.out.println("Hit 'Enter' to terminate");
System.in.read();
ctx.close();
System.exit(0);
And your program won't exit until you hit the Enter button in console.

Related

Liferay Kafka integration

Over internet I didn't got reference to implement Kafka in liferay.
Below are the requirements needs to be implemented
Push the message to Kafka topic
Poll and receive message from Kafka topic.
Tried below, but unable to receive message from kafka after pushed from kafka terminal
Dependencies :
compileInclude "org.springframework.kafka:spring-kafka:2.9.2"
compileInclude "org.apache.kafka:kafka-streams:3.3.1"
Code:
import org.apache.kafka.common.serialization.Serdes;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.annotation.EnableKafkaStreams;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import java.util.HashMap;
import java.util.Map;
import static org.apache.kafka.streams.StreamsConfig.APPLICATION_ID_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.BOOTSTRAP_SERVERS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG;
import static org.apache.kafka.streams.StreamsConfig.DEFAULT_VALUE_SERDE_CLASS_CONFIG;
#Configuration
#EnableKafka
#EnableKafkaStreams
public class KafkaConfig {
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
KafkaStreamsConfiguration kStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(APPLICATION_ID_CONFIG, "streams-app");
props.put(BOOTSTRAP_SERVERS_CONFIG, "localhost:9092");
props.put(DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
props.put(DEFAULT_VALUE_SERDE_CLASS_CONFIG, Serdes.String().getClass().getName());
return new KafkaStreamsConfiguration(props);
}
}
import com.liferay.portal.kernel.log.Log;
import com.liferay.portal.kernel.log.LogFactoryUtil;
import org.osgi.service.component.annotations.Component;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.scheduling.annotation.Async;
import org.springframework.transaction.annotation.Transactional;
#Component(immediate = true)
public class KafkaMessageReceiver {
public static Log _log = LogFactoryUtil.getLog(KafkaMessageReceiver.class);
#Async
#KafkaListener(
topics = "liferay-topic",
concurrency = "2"
)
#Transactional
public void handleMessage(String payload) {
_log.info(payload);
}
}

How can send byte by byte of a video from producer and collect each byte and convert it to the array

This is producer config.
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.kafka.core.ProducerFactory;
import java.util.HashMap;
import java.util.Map;
#Configuration
public class KafkaProducerConfig {
#Value("${spring.kafka.bootstrap-servers}")
private String bootStrapServers;
public Map<String, Object> producerConfig() {
HashMap<String, Object> props = new HashMap<>();
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers);
props.put("key.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put("value.serializer", "org.apache.kafka.common.serialization.StringSerializer");
props.put(ProducerConfig.MAX_REQUEST_SIZE_CONFIG, "20971520");
return props;
}
#Bean
public ProducerFactory<String, String> producerFactory() {
return new DefaultKafkaProducerFactory<String, String>(producerConfig());
}
#Bean
public KafkaTemplate<String, String> kafkaTemplate(ProducerFactory<String, String> producerFactory) {
return new KafkaTemplate<String, String>(producerFactory);
}
}
This is consumer config
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.config.KafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import java.util.HashMap;
import java.util.Map;
#Configuration
public class KafkaConsumerConfig {
#Value("${spring.kafka.bootstrap-servers}")
private String bootStrapServers;
public Map<String, Object> consumerConfig() {
HashMap<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootStrapServers);
props.put("key.deserializer", "org.apache.kafka.common.serialization.StringDeserializer");
props.put("value.deserializer", "org.apache.kafka.common.serialization.ByteArrayDeserializer");
return props;
}
#Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<String, String>(consumerConfig());
}
#Bean
public KafkaListenerContainerFactory<ConcurrentMessageListenerContainer<String, String>> factory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}
This is kafka template.
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Paths;
#Service
public class KafkaSender {
#Autowired
private KafkaTemplate<String, String> kafkaTemplate;
String kafkaTopic = "testTopic";
public void send() {
byte[] array = null;
try {
array = Files.readAllBytes(Paths.get("Test.webm"));
String kafkaTopic = "testTopic";
String encoded = java.util.Base64.getEncoder().encodeToString(array);
kafkaTemplate.send(kafkaTopic, encoded);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
This is listener.
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
#Component
public class Listener {
#KafkaListener(topics = "testTopic", groupId = "foo")
public void listenGroupFoo(String message){
byte[] decoded = java.util.Base64.getDecoder().decode(message);
try {
FileOutputStream out;
out = new FileOutputStream("video1.mp4");
out.write(decoded);
out.close();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
}
Currently I am sending whole byte array but it there is size limitation on kafka I could not sent larger size for eg: 1GB
Please let me know how can we implementation so that i can send byte by byte of a video from producer and collect at consumer and convert all the bytes to a array.
send byte by byte of a video from producer
Literally? Don't use StringSerializer. You'd loop over the array and use ByteArraySerializer
byte[] array = Files.readAllBytes(Paths.get("Test.webm"));
String kafkaTopic = "testTopic";
for (byte b : bytes) {
kafkaTemplate.send(kafkaTopic, new byte[] {b});
}
But
You can only ever produce one file into the same topic at a time - multiple producers will have mixed file bytes
You must modify Kafka producer properties to use transactions, no retries, and only one in flight request max. Otherwise, bytes get dropped, duplicated, or reordered.
Your topic can only have one partition. Otherwise, bytes get reordered
Now, you could chunk the file into larger byte slices, but then re-ordering matters even more.
As far as the consumer goes - there's no straightforward way to know which byte is the end of the file/stream, but you'd need to have some if statement in the listener/poll loop.
Ultimately, Kafka is not designed for file transfers or A/V streaming, and largest reasonable record size would only be a few MB.

Configure SpringBoot Kafka streams unit tests

I have a SpringBoot Kafka streams maven app. I use a spring-boot-starter-parent 2.4.4 for my springboot dependencies and kafka-streams 2.7.0.
I am stuck at running tests with
java.lang.NullPointerException
when trying to load my application configuration from either
resources/application.yml or test/resources/application-test.resources or test/resources/application.yml
I have a Config class with this annotations and getters and setters for fields, which are defined with same name as in the application.yml
package com.acme.rtc.configuration;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Configuration;
import org.springframework.stereotype.Component;
#ConfigurationProperties(prefix = "topics")
#Component
#Configuration
public class ConfigProps {
private String MATRIXX_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
private String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
private String EVENTS_NO_MVNO;
public void setMATRIXX_ADJ_EVENT_TOPIC(String MATRIXX_ADJ_EVENT_TOPIC) {
this.MATRIXX_ADJ_EVENT_TOPIC = MATRIXX_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC = OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public void setOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(String OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC) {
this.OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC = OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
public String getEVENTS_NO_MVNO() {
return EVENTS_NO_MVNO;
}
public void setEVENTS_NO_MVNO(String EVENTS_NO_MVNO) {
this.EVENTS_NO_MVNO = EVENTS_NO_MVNO;
}
public String getMATRIXX_ADJ_EVENT_TOPIC() {
return MATRIXX_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC;
}
public String getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC() {
return OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC;
}
}
I am doing #Autowire of this class in my test and app class,
#Autowired
ConfigProps cp;
and trying to access fields using cp.getBootstrapServerHost() but this resolves to a NullPointer in my test class. But resolves properly in my application class...
My test class looks like this
package distinct;
import com.acme.rtc.configuration.ConfigProps;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.*;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.test.context.SpringBootTest;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import com.acme.rtc.configuration.KafkaConfiguration;
import com.acme.rtc.configuration.TopologyConfiguration;
import org.springframework.test.context.ContextConfiguration;
import org.springframework.test.context.junit.jupiter.SpringJUnitConfig;
import java.util.List;
import static java.util.Collections.singletonList;
import static java.util.stream.Collectors.toList;
import static org.junit.jupiter.api.Assertions.assertEquals;
#SpringBootTest
#ContextConfiguration(classes = TopologyConfiguration.class)
#SpringJUnitConfig
public class TestWithTopologyTestDriver {
private TestInputTopic<String, String> inputTopicWrong;
private TestOutputTopic<String, String> outputTopicWrong;
private TestInputTopic<String, String> inputTopicRight;
private TestOutputTopic<String, String> outputTopicRight;
private TopologyTestDriver topologyTestDriver;
#Autowired
ConfigProps configProps;
#BeforeEach
public void setUp() {
KafkaProperties properties = new KafkaProperties();
properties.setBootstrapServers(singletonList("localhost:9092"));
KafkaStreamsConfiguration config = new KafkaConfiguration(properties).getStreamsConfig();
StreamsBuilder sb = new StreamsBuilder();
Topology topology = new TopologyConfiguration().createTopology(sb);
topologyTestDriver = new TopologyTestDriver(topology, config.asProperties());
inputTopicWrong =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicWrong =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
inputTopicRight =
topologyTestDriver.createInputTopic(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), new StringSerializer(),
new StringSerializer());
outputTopicRight =
topologyTestDriver.createOutputTopic(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC(), new StringDeserializer(),
new StringDeserializer());
}
#AfterEach
public void tearDown() {
topologyTestDriver.close();
}
#Test
void wrongDistinctTopology() {
testTopology(inputTopicWrong, outputTopicWrong);
}}
where TopologyConfiguration is my application and that has this signature
package com.acme.rtc.configuration;
import com.fasterxml.jackson.databind.JsonNode;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.admin.NewTopic;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.common.serialization.Serde;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.common.serialization.Serializer;
import org.apache.kafka.common.serialization.Deserializer;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.*;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.env.Environment;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import org.springframework.stereotype.Component;
#Configuration
#ConfigurationProperties(prefix = "topics")
#Component
#RequiredArgsConstructor
public class TopologyConfiguration {
#Autowired
Environment env;
#Autowired
ConfigProps configProps;
private void acmeStreamsTopoloy(StreamsBuilder streamsBuilder) {
Deserializer<JsonNode> jsonDeserializer = new JsonDeserializer();
Serializer<JsonNode> jsonSerializer = new JsonSerializer();
Serde<JsonNode> jsonSerde = Serdes.serdeFrom(jsonSerializer, jsonDeserializer);
System.out.println("ConfigProps.getMattrix: "+configProps.getMATRIXX_ADJ_EVENT_TOPIC());
KStream<String, String> inputStream =
streamsBuilder.stream(configProps.getMATRIXX_ADJ_EVENT_TOPIC(), Consumed.with(Serdes.String(), Serdes.String()));
KStream<String, String>[] branches = inputStream.branch(
(key, value)-> value.contains("KGN"),
(key, value)-> value.contains("LEB"),
(key, value)->true);
branches[0].to(configProps.getOUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC());
branches[1].to(configProps.getOUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC());
branches[2].to(configProps.getEVENTS_NO_MVNO());
}
#Bean
public Topology createTopology(StreamsBuilder streamsBuilder) {
acmeStreamsTopoloy(streamsBuilder);
return streamsBuilder.build();
}
}
My KafkaConfiguration class
package com.acme.rtc.configuration;
import lombok.RequiredArgsConstructor;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.errors.LogAndContinueExceptionHandler;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.autoconfigure.kafka.KafkaProperties;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.annotation.KafkaStreamsDefaultConfiguration;
import org.springframework.kafka.config.KafkaStreamsConfiguration;
import org.springframework.kafka.support.Acknowledgment;
import org.springframework.kafka.support.serializer.JsonDeserializer;
import org.springframework.kafka.support.serializer.JsonSerializer;
import java.util.HashMap;
import java.util.Map;
#Configuration
#RequiredArgsConstructor
public class KafkaConfiguration {
public static final String APP_ID = "acme-stream-rtc";
private final KafkaProperties kafkaProperties;
#Autowired
#Bean(name = KafkaStreamsDefaultConfiguration.DEFAULT_STREAMS_CONFIG_BEAN_NAME)
public KafkaStreamsConfiguration getStreamsConfig() {
Map<String, Object> props = new HashMap<>();
props.put(StreamsConfig.APPLICATION_ID_CONFIG, APP_ID);
props.put(StreamsConfig.NUM_STREAM_THREADS_CONFIG, 2);
props.put(StreamsConfig.BOOTSTRAP_SERVERS_CONFIG, kafkaProperties.getBootstrapServers());
props.put(JsonSerializer.ADD_TYPE_INFO_HEADERS,false);
KafkaStreamsConfiguration streamsConfig = new KafkaStreamsConfiguration(props);
return streamsConfig;
}
}
My application.yml has the right syntax etc.
spring:
kafka:
bootstrap-servers: localhost:9092
json:
value:
default:
type: true
kafka:
streams:
properties:
default:
value:
serde: org.springframework.kafka.support.serializer.JsonSerde
admin:
security:
protocol: SSL
ssl:
trust-store-location: ${TRUSTSTORE_LOCATION}
trust-store-password: ${TRUSTSTORE_PASSWORD}
key-store-location: ${KEYSTORE_LOCATION}
key-store-password: ${KEYSTORE_PASSWORD}
key-password: ${KEY_PASSWORD}
topics:
MATRIXX_ADJ_EVENT_TOPIC: input-matrixx-adj-event
OUTPUT_MNVO_KGN_ADJ_EVENT_TOPIC: output-KGN-adj-event
OUTPUT_MNVO_LEB_ADJ_EVENT_TOPIC: output-LEB-adj-event
EVENTS_NO_MVNO: events-no-mvno-spec
My main class
package com.acme.rtc;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.kafka.annotation.EnableKafkaStreams;
#SpringBootApplication
#EnableKafkaStreams
public class StreamProcessing {
public static void main(String[] args) {
SpringApplication.run(StreamProcessing.class, args);
}
}
I am not sure if I am missing any context when Autowiring my ConfigProps class or if I need further annotations on my test class.
For JUnit4 you need #Runwith(SpringJUnit4ClassRunner.class) alongside the #ContextConfiguration.
For JUnit5, use #SpringJUnitConfig.
For proper loading of properties, though, you need #SpringBootTest.
Boot 2.4 uses JUnit5.
And you should not have #ConfigurationProperties on the test.
EDIT
I just tested it with no problems.
#Configuration
public class Config {
#Bean
String str() {
return "str";
}
}
#ConfigurationProperties(prefix = "foo")
#Component
public class MyProps {
String bar;
public String getBar() {
return this.bar;
}
public void setBar(String bar) {
this.bar = bar;
}
#Override
public String toString() {
return "MyProps [bar=" + this.bar + "]";
}
}
#SpringBootApplication
public class So67078244Application {
public static void main(String[] args) {
SpringApplication.run(So67078244Application.class, args);
}
}
#SpringBootTest
class So67078244ApplicationTests {
#Autowired
MyProps props;
#Test
void contextLoads() {
System.out.println(this.props);
}
}
foo.bar=baz
MyProps [bar=baz]

There's any way to load #ImportResources dynamically by checking the active profile that's running?

I have this class below ... it's responsible for start my application , is there any
maneuver to make #ImportResources annotation dynamic?
For my test covering I would like to load another resource. I am trying to avoid to run my unit test with my parallel configuration, because the results are being intermittent.
import java.security.cert.CertificateException;
import java.security.cert.X509Certificate;
import org.apache.http.client.HttpClient;
import org.apache.http.conn.ssl.NoopHostnameVerifier;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.ssl.SSLContextBuilder;
import org.apache.http.ssl.TrustStrategy;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ImportResource;
import org.springframework.http.client.BufferingClientHttpRequestFactory;
import org.springframework.http.client.HttpComponentsClientHttpRequestFactory;
import org.springframework.validation.beanvalidation.LocalValidatorFactoryBean;
import org.springframework.web.client.RestTemplate;
import br.com.bradesco.ciar.srv.agorainv.infrastructure.LoggingClientHttpRequestInterceptor;
#SpringBootApplication
#ImportResource("spring/*.xml")
public class Application {
#Value("${rest.timeout.read}")
private int readTimeout;
#Value("${rest.timeout.connect}")
private int connectTimeout;
public static void main(final String[] args) {
SpringApplication.run(Application.class, args);
}
#Bean
public HttpClient httpClient() {
try {
return HttpClients.custom().setSSLHostnameVerifier(NoopHostnameVerifier.INSTANCE)
.setSSLContext(new SSLContextBuilder().loadTrustMaterial(null, new TrustStrategy() {
#Override
public boolean isTrusted(final X509Certificate[] arg0, final String arg1) throws CertificateException {
return true;
}
}).build()).build();
}
catch (final Exception e) {
throw new RuntimeException(e);
}
}
#Bean
public HttpComponentsClientHttpRequestFactory httpRequestFactory() {
final HttpComponentsClientHttpRequestFactory requestFactory = new HttpComponentsClientHttpRequestFactory(httpClient());
requestFactory.setReadTimeout(readTimeout);
requestFactory.setConnectTimeout(connectTimeout);
return requestFactory;
}
#Bean
public RestTemplate restTemplate() {
final RestTemplate restTemplate = new RestTemplate(new BufferingClientHttpRequestFactory(httpRequestFactory()));
restTemplate.getInterceptors().add(new LoggingClientHttpRequestInterceptor());
return restTemplate;
}
#Bean
public LocalValidatorFactoryBean restValidator() {
return new LocalValidatorFactoryBean();
}
}
You could try moving the #ImportResource annotation to a separate configuration class annotated with a #Profile annotation, e.g:
#Profile("!Test")
#Configuration
#ImportResource("spring/*.xml")
class ResourceConfiguration {}

MongoDb equivalent of writer in Spring Batch?

I'm following Spring doc for creating batch service. It implements ItemWriter using JdbcBatchItemWriter, so could you please help me write the MongoDb equivalent of following code using MongoItemWriter?
I found two tutorials using MongoDb, but they use XML files to define beans & seem outdated.
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
// tag::readerwriterprocessor[]
#Bean
public ItemWriter<Person> writer(DataSource dataSource) {
JdbcBatchItemWriter<Person> writer = new JdbcBatchItemWriter<Person>();
writer.setItemSqlParameterSourceProvider(new BeanPropertyItemSqlParameterSourceProvider<Person>());
writer.setSql("INSERT INTO people (first_name, last_name) VALUES (:firstName, :lastName)");
writer.setDataSource(dataSource);
return writer;
}
// end::readerwriterprocessor[]
#Bean
public JdbcTemplate jdbcTemplate(DataSource dataSource) {
return new JdbcTemplate(dataSource);
}
}
package hello;
import com.mongodb.DBAddress;
import com.mongodb.MongoClient;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.batch.core.Job;
import org.springframework.batch.core.JobExecutionListener;
import org.springframework.batch.core.Step;
import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing;
import org.springframework.batch.core.configuration.annotation.JobBuilderFactory;
import org.springframework.batch.core.configuration.annotation.StepBuilderFactory;
import org.springframework.batch.core.launch.support.RunIdIncrementer;
import org.springframework.batch.item.ItemProcessor;
import org.springframework.batch.item.ItemReader;
import org.springframework.batch.item.ItemWriter;
import org.springframework.batch.item.data.MongoItemWriter;
import org.springframework.batch.item.file.FlatFileItemReader;
import org.springframework.batch.item.file.mapping.BeanWrapperFieldSetMapper;
import org.springframework.batch.item.file.mapping.DefaultLineMapper;
import org.springframework.batch.item.file.transform.DelimitedLineTokenizer;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.core.io.ClassPathResource;
import org.springframework.data.mongodb.MongoDbFactory;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.data.mongodb.core.SimpleMongoDbFactory;
#Configuration
#EnableBatchProcessing
public class BatchConfiguration {
// tag::readerwriterprocessor[]
#Bean
public ItemReader<Person> reader() {
FlatFileItemReader<Person> reader = new FlatFileItemReader<Person>();
reader.setResource(new ClassPathResource("sample-data.csv"));
reader.setLineMapper(new DefaultLineMapper<Person>() {{
setLineTokenizer(new DelimitedLineTokenizer() {{
setNames(new String[] { "firstName", "lastName" });
}});
setFieldSetMapper(new BeanWrapperFieldSetMapper<Person>() {{
setTargetType(Person.class);
}});
}});
return reader;
}
#Bean
public ItemProcessor<Person, Person> processor() {
return new PersonItemProcessor();
}
#Bean
public ItemWriter<Record> writer() {
MongoItemWriter<Record> writer = new MongoItemWriter<Record>();
try {
writer.setTemplate(mongoTemplate());
} catch (Exception e) {
log.error(e.toString());
}
writer.setCollection("people");
return writer;
}
// end::readerwriterprocessor[]
// tag::jobstep[]
#Bean
public Job importUserJob(JobBuilderFactory jobs, Step s1, JobExecutionListener listener) {
return jobs.get("importUserJob")
.incrementer(new RunIdIncrementer())
.listener(listener)
.flow(s1)
.end()
.build();
}
#Bean
public Step step1(StepBuilderFactory stepBuilderFactory, ItemReader<Person> reader,
ItemWriter<Person> writer, ItemProcessor<Person, Person> processor) {
return stepBuilderFactory.get("step1")
.<Person, Person> chunk(10)
.reader(reader)
.processor(processor)
.writer(writer)
.build();
}
// end::jobstep[]
#Bean
public MongoDbFactory mongoDbFactory() throws Exception {
return new SimpleMongoDbFactory(new MongoClient(), "db-name");
}
#Bean
public MongoTemplate mongoTemplate() throws Exception {
MongoTemplate mongoTemplate = new MongoTemplate(mongoDbFactory());
return mongoTemplate;
}
}

Categories