Error with StreamIdentifier when using MultiStreamTracker in kinesis - java

I'm getting an error with StreamIdentifier when trying to use MultiStreamTracker in a kinesis consumer application.
java.lang.IllegalArgumentException: Unable to deserialize StreamIdentifier from first-stream-name
What is causing this error? I can't find a good example of using the tracker with kinesis.
The stream name works when using a consumer with a single stream so I'm not sure what is happening. It looks like the consumer is trying to parse the accountId and streamCreationEpoch. But when I create the identifiers I am using the singleStreamInstance method. Is the stream name required to have these values? They appear to be optional from the code.
This test is part of a complete example on github.
package kinesis.localstack.example;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import com.amazonaws.services.kinesis.producer.KinesisProducer;
import com.amazonaws.services.kinesis.producer.KinesisProducerConfiguration;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.localstack.LocalStackContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import org.testcontainers.utility.DockerImageName;
import software.amazon.awssdk.regions.Region;
import software.amazon.awssdk.services.cloudwatch.CloudWatchAsyncClient;
import software.amazon.awssdk.services.dynamodb.DynamoDbAsyncClient;
import software.amazon.awssdk.services.kinesis.KinesisAsyncClient;
import software.amazon.kinesis.common.ConfigsBuilder;
import software.amazon.kinesis.common.InitialPositionInStream;
import software.amazon.kinesis.common.InitialPositionInStreamExtended;
import software.amazon.kinesis.common.KinesisClientUtil;
import software.amazon.kinesis.common.StreamConfig;
import software.amazon.kinesis.common.StreamIdentifier;
import software.amazon.kinesis.coordinator.Scheduler;
import software.amazon.kinesis.exceptions.InvalidStateException;
import software.amazon.kinesis.exceptions.ShutdownException;
import software.amazon.kinesis.lifecycle.events.InitializationInput;
import software.amazon.kinesis.lifecycle.events.LeaseLostInput;
import software.amazon.kinesis.lifecycle.events.ProcessRecordsInput;
import software.amazon.kinesis.lifecycle.events.ShardEndedInput;
import software.amazon.kinesis.lifecycle.events.ShutdownRequestedInput;
import software.amazon.kinesis.processor.FormerStreamsLeasesDeletionStrategy;
import software.amazon.kinesis.processor.FormerStreamsLeasesDeletionStrategy.NoLeaseDeletionStrategy;
import software.amazon.kinesis.processor.MultiStreamTracker;
import software.amazon.kinesis.processor.ShardRecordProcessor;
import software.amazon.kinesis.processor.ShardRecordProcessorFactory;
import software.amazon.kinesis.retrieval.KinesisClientRecord;
import software.amazon.kinesis.retrieval.polling.PollingConfig;
import static java.util.stream.Collectors.toList;
import static org.assertj.core.api.Assertions.assertThat;
import static org.awaitility.Awaitility.await;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.CLOUDWATCH;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.DYNAMODB;
import static org.testcontainers.containers.localstack.LocalStackContainer.Service.KINESIS;
import static software.amazon.kinesis.common.InitialPositionInStream.TRIM_HORIZON;
import static software.amazon.kinesis.common.StreamIdentifier.singleStreamInstance;
#Testcontainers
public class KinesisMultiStreamTest {
static class TestProcessorFactory implements ShardRecordProcessorFactory {
private final TestKinesisRecordService service;
public TestProcessorFactory(TestKinesisRecordService service) {
this.service = service;
}
#Override
public ShardRecordProcessor shardRecordProcessor() {
throw new UnsupportedOperationException("must have streamIdentifier");
}
public ShardRecordProcessor shardRecordProcessor(StreamIdentifier streamIdentifier) {
return new TestRecordProcessor(service, streamIdentifier);
}
}
static class TestRecordProcessor implements ShardRecordProcessor {
public final TestKinesisRecordService service;
public final StreamIdentifier streamIdentifier;
public TestRecordProcessor(TestKinesisRecordService service, StreamIdentifier streamIdentifier) {
this.service = service;
this.streamIdentifier = streamIdentifier;
}
#Override
public void initialize(InitializationInput initializationInput) {
}
#Override
public void processRecords(ProcessRecordsInput processRecordsInput) {
service.addRecord(streamIdentifier, processRecordsInput);
}
#Override
public void leaseLost(LeaseLostInput leaseLostInput) {
}
#Override
public void shardEnded(ShardEndedInput shardEndedInput) {
try {
shardEndedInput.checkpointer().checkpoint();
} catch (Exception e) {
throw new IllegalStateException(e);
}
}
#Override
public void shutdownRequested(ShutdownRequestedInput shutdownRequestedInput) {
}
}
static class TestKinesisRecordService {
private List<ProcessRecordsInput> firstStreamRecords = Collections.synchronizedList(new ArrayList<>());
private List<ProcessRecordsInput> secondStreamRecords = Collections.synchronizedList(new ArrayList<>());
public void addRecord(StreamIdentifier streamIdentifier, ProcessRecordsInput processRecordsInput) {
if(streamIdentifier.streamName().contains(firstStreamName)) {
firstStreamRecords.add(processRecordsInput);
} else if(streamIdentifier.streamName().contains(secondStreamName)) {
secondStreamRecords.add(processRecordsInput);
} else {
throw new IllegalStateException("no list for stream " + streamIdentifier);
}
}
public List<ProcessRecordsInput> getFirstStreamRecords() {
return Collections.unmodifiableList(firstStreamRecords);
}
public List<ProcessRecordsInput> getSecondStreamRecords() {
return Collections.unmodifiableList(secondStreamRecords);
}
}
public static final String firstStreamName = "first-stream-name";
public static final String secondStreamName = "second-stream-name";
public static final String partitionKey = "partition-key";
DockerImageName localstackImage = DockerImageName.parse("localstack/localstack:latest");
#Container
public LocalStackContainer localstack = new LocalStackContainer(localstackImage)
.withServices(KINESIS, CLOUDWATCH)
.withEnv("KINESIS_INITIALIZE_STREAMS", firstStreamName + ":1," + secondStreamName + ":1");
public Scheduler scheduler;
public TestKinesisRecordService service = new TestKinesisRecordService();
public KinesisProducer producer;
#BeforeEach
void setup() {
KinesisAsyncClient kinesisClient = KinesisClientUtil.createKinesisAsyncClient(
KinesisAsyncClient.builder().endpointOverride(localstack.getEndpointOverride(KINESIS)).region(Region.of(localstack.getRegion()))
);
DynamoDbAsyncClient dynamoClient = DynamoDbAsyncClient.builder().region(Region.of(localstack.getRegion())).endpointOverride(localstack.getEndpointOverride(DYNAMODB)).build();
CloudWatchAsyncClient cloudWatchClient = CloudWatchAsyncClient.builder().region(Region.of(localstack.getRegion())).endpointOverride(localstack.getEndpointOverride(CLOUDWATCH)).build();
MultiStreamTracker tracker = new MultiStreamTracker() {
private List<StreamConfig> configs = List.of(
new StreamConfig(singleStreamInstance(firstStreamName), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)),
new StreamConfig(singleStreamInstance(secondStreamName), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)));
#Override
public List<StreamConfig> streamConfigList() {
return configs;
}
#Override
public FormerStreamsLeasesDeletionStrategy formerStreamsLeasesDeletionStrategy() {
return new NoLeaseDeletionStrategy();
}
};
ConfigsBuilder configsBuilder = new ConfigsBuilder(tracker, "KinesisPratTest", kinesisClient, dynamoClient, cloudWatchClient, UUID.randomUUID().toString(), new TestProcessorFactory(service));
scheduler = new Scheduler(
configsBuilder.checkpointConfig(),
configsBuilder.coordinatorConfig(),
configsBuilder.leaseManagementConfig(),
configsBuilder.lifecycleConfig(),
configsBuilder.metricsConfig(),
configsBuilder.processorConfig().callProcessRecordsEvenForEmptyRecordList(false),
configsBuilder.retrievalConfig()
);
new Thread(scheduler).start();
producer = producer();
}
#AfterEach
public void teardown() throws ExecutionException, InterruptedException, TimeoutException {
producer.destroy();
Future<Boolean> gracefulShutdownFuture = scheduler.startGracefulShutdown();
gracefulShutdownFuture.get(60, TimeUnit.SECONDS);
}
public KinesisProducer producer() {
var configuration = new KinesisProducerConfiguration()
.setVerifyCertificate(false)
.setCredentialsProvider(localstack.getDefaultCredentialsProvider())
.setMetricsCredentialsProvider(localstack.getDefaultCredentialsProvider())
.setRegion(localstack.getRegion())
.setCloudwatchEndpoint(localstack.getEndpointOverride(CLOUDWATCH).getHost())
.setCloudwatchPort(localstack.getEndpointOverride(CLOUDWATCH).getPort())
.setKinesisEndpoint(localstack.getEndpointOverride(KINESIS).getHost())
.setKinesisPort(localstack.getEndpointOverride(KINESIS).getPort());
return new KinesisProducer(configuration);
}
#Test
void testFirstStream() {
String expected = "Hello";
producer.addUserRecord(firstStreamName, partitionKey, ByteBuffer.wrap(expected.getBytes(StandardCharsets.UTF_8)));
var result = await().timeout(600, TimeUnit.SECONDS)
.until(() -> service.getFirstStreamRecords().stream()
.flatMap(r -> r.records().stream())
.map(KinesisClientRecord::data)
.map(r -> StandardCharsets.UTF_8.decode(r).toString())
.collect(toList()), records -> records.size() > 0);
assertThat(result).anyMatch(r -> r.equals(expected));
}
#Test
void testSecondStream() {
String expected = "Hello";
producer.addUserRecord(secondStreamName, partitionKey, ByteBuffer.wrap(expected.getBytes(StandardCharsets.UTF_8)));
var result = await().timeout(600, TimeUnit.SECONDS)
.until(() -> service.getSecondStreamRecords().stream()
.flatMap(r -> r.records().stream())
.map(KinesisClientRecord::data)
.map(r -> StandardCharsets.UTF_8.decode(r).toString())
.collect(toList()), records -> records.size() > 0);
assertThat(result).anyMatch(r -> r.equals(expected));
}
}
Here is the error I am getting.
[Thread-9] ERROR software.amazon.kinesis.coordinator.Scheduler - Worker.run caught exception, sleeping for 1000 milli seconds!
java.lang.IllegalArgumentException: Unable to deserialize StreamIdentifier from first-stream-name
at software.amazon.kinesis.common.StreamIdentifier.multiStreamInstance(StreamIdentifier.java:75)
at software.amazon.kinesis.coordinator.Scheduler.getStreamIdentifier(Scheduler.java:1001)
at software.amazon.kinesis.coordinator.Scheduler.buildConsumer(Scheduler.java:917)
at software.amazon.kinesis.coordinator.Scheduler.createOrGetShardConsumer(Scheduler.java:899)
at software.amazon.kinesis.coordinator.Scheduler.runProcessLoop(Scheduler.java:419)
at software.amazon.kinesis.coordinator.Scheduler.run(Scheduler.java:330)
at java.base/java.lang.Thread.run(Thread.java:829)

According to documentation:
The serialized stream identifier should be of the following format: account-id:StreamName:streamCreationTimestamp
So your code should be like this:
private List<StreamConfig> configs = List.of(
new StreamConfig(multiStreamInstance("111111111:multiStreamTest-1:12345"), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)),
new StreamConfig(multiStreamInstance("111111111:multiStreamTest-2:12389"), InitialPositionInStreamExtended.newInitialPosition(TRIM_HORIZON)));
Note: this also will change leaseKey format to account-id:StreamName:streamCreationTimestamp:ShardId

Related

Springboot - Save entity in 'normal' class

I'm pretty new to Springboot and Java in general and because we got this in school I'm fiddeling arround.
I'm now trying to save an entity outside of the Springboot Entities, Repositories or RestController with the following code:
InfMApplication.java:
package com.domain.springboot;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import com.domain.springboot.repositories.MovieRepository;
import com.domain.springboot.services.MovieImport;
#SpringBootApplication
public class InfMApplication {
public static void main(String[] args) {
SpringApplication.run(InfMApplication.class, args);
MovieImport movieImport = new MovieImport();
movieImport.saveToDb();
}
}
MovieImport.java:
package com.domain.springboot.services;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpRequest.BodyPublishers;
import java.net.http.HttpResponse;
import java.net.http.HttpResponse.BodyHandlers;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import org.springframework.web.bind.annotation.CrossOrigin;
import java.io.*;
import java.net.URL;
import com.google.gson.Gson;
import com.domain.omdbapi.entities.Movie;
import com.domain.omdbapi.entities.SearchResponse;
import com.domain.omdbapi.entities.SearchResult;
import com.domain.springboot.repositories.ComplexRepository;
import com.domain.springboot.repositories.DocumentRepository;
import com.domain.springboot.repositories.MovieRepository;
import com.domain.springboot.repositories.SimpleRepository;
#Service
public class MovieImport {
private final MovieRepository movieRepository;
public MovieImport(MovieRepository movieRepository){
this.movieRepository = movieRepository;
}
public void main() {
String randomImdbId = fetchRandomMovie();
Movie movie = fetchMovieDetails(randomImdbId);
saveToDb(movie);
}
public void saveToDb(Movie movie) {
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie(movie.Title, movie.imdbID);
this.movieRepository.save(springbootMovie);
}
public String fetchRandomMovie() {
String randomWord = getRandomWord();
String url = "https://www.omdbapi.com/?apikey=<API_KEY>&type=movie&s=" + randomWord;
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder(
URI.create(url))
.header("accept", "application/json")
.build();
HttpResponse<String> response = null;
try {
response = client.send(request, BodyHandlers.ofString());
} catch (Exception e) {
System.out.println(e);
}
Gson gson = new Gson();
SearchResponse searchResponse = gson.fromJson(response.body(), SearchResponse.class);
int randomIndex = new Random().nextInt(0, searchResponse.getSearch().length);
SearchResult randomResult = searchResponse.getSearch()[randomIndex];
return randomResult.getImdbID();
}
public Movie fetchMovieDetails(String imdbId) {
String url = "https://www.omdbapi.com/?apikey=<API_KEY>&type=movie&plot=full&i=" + imdbId;
HttpClient client = HttpClient.newHttpClient();
HttpRequest request = HttpRequest.newBuilder(
URI.create(url))
.header("accept", "application/json")
.build();
HttpResponse<String> response = null;
try {
response = client.send(request, BodyHandlers.ofString());
} catch (Exception e) {
System.out.println(e);
}
Gson gson = new Gson();
Movie movie = gson.fromJson(response.body(), Movie.class);
return movie;
}
public String getRandomWord() {
URL resource = getClass().getClassLoader().getResource("Wordlist.txt");
List<String> words = new ArrayList<>();
try {
File file = new File(resource.toURI());
words = Files.readAllLines(file.toPath(), StandardCharsets.UTF_8);
} catch (Exception e) {
e.printStackTrace();
}
int randomIndex = new Random().nextInt(0, words.size());
return words.get(randomIndex);
}
}
If I use "this.movieRepository.save(movieObject);" to save a movie in the MovieRestController the same way, it works. I also tried adding the "#Autowire" annotation, but this didn't work.
I always get the error
java.lang.NullPointerException: Cannot invoke "com.domain.springboot.repositories.MovieRepository.save(Object)" because "this.movieRepository" is null
How can I get to use the movieRepository in other Java classes like in the RestControllers?
java.lang.NullPointerException: Cannot invoke
"com.domain.springboot.repositories.MovieRepository.save(Object)"
because "this.movieRepository" is null
Above is perfectly valid if we look at your following shared code.
public class MovieImport {
private MovieRepository movieRepository;
public void saveToDb() {
// Create movie
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie("Iron Man", "284cb8fgf");
this.movieRepository.save(springbootMovie);
}
}
You've to correct certain things in your code base.
First you're not initializing the movieRepository and therefore, you're getting the null pointer exception. As you've been using the springboot you can use construction injection to initialized the field by spring container. Also. this class should be scanned by spring and you should also put some annotation such as Component or Service on top of it.
Following will work if your MovieImport and MovieRepository classess will scan by springboot.
package com.domain;
import com.domain.omdbapi.entities.Movie;
import com.domain.springboot.repositories.MovieRepository;
#Service
public class MovieImport {
private final MovieRepository movieRepository;
public MovieImport(MovieRepository movieRepository){
this.movieRepository = movieRepository;
}
public void saveToDb() {
// Create movie
com.domain.springboot.entities.Movie springbootMovie = new com.domain.springboot.entities.Movie("Iron Man", "284cb8fgf");
this.movieRepository.save(springbootMovie);
}
}
Updated
#SpringBootApplication
public class InfMApplication implements CommandLineRunner {
#Autowired
private MovieImport movieImport;
public static void main(String[] args) {
SpringApplication.run(InfMApplication.class, args);
}
#Override
public void run(String... args) throws Exception {
movieImport.saveToDb();
}
}

kafka streams abandoned cart development - session window

I am attempting to build out a kstreams app that takes in records from an input topic that is a simple json payload (id and timestamp included - the key is a simple 3 digit string) (there is also no schema required). for the output topic I wish to produce only the records in which have been abandoned for 30 minutes or more (session window). based on this link, I have begun to develop a kafka streams app:
package io.confluent.developer;
import org.apache.kafka.clients.admin.AdminClient;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.serialization.Serdes;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.streams.KafkaStreams;
import org.apache.kafka.streams.KeyValue;
import org.apache.kafka.streams.StreamsBuilder;
import org.apache.kafka.streams.StreamsConfig;
import org.apache.kafka.streams.Topology;
import org.apache.kafka.streams.kstream.Consumed;
import org.apache.kafka.streams.kstream.Produced;
import org.apache.kafka.streams.kstream.SessionWindows;
import java.io.FileInputStream;
import java.io.IOException;
import java.time.Duration;
import java.time.Instant;
import java.time.ZoneId;
import java.time.format.DateTimeFormatter;
import java.time.format.FormatStyle;
import java.time.temporal.ChronoUnit;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.CountDownLatch;
public class SessionWindow {
private final DateTimeFormatter timeFormatter = DateTimeFormatter.ofLocalizedTime(FormatStyle.LONG)
.withLocale(Locale.US)
.withZone(ZoneId.systemDefault());
public Topology buildTopology(Properties allProps) {
final StreamsBuilder builder = new StreamsBuilder();
final String inputTopic = allProps.getProperty("input.topic.name");
final String outputTopic = allProps.getProperty("output.topic.name");
builder.stream(inputTopic, Consumed.with(Serdes.String(), Serdes.String()))
.groupByKey()
.windowedBy(SessionWindows.ofInactivityGapAndGrace(Duration.ofMinutes(5), Duration.ofSeconds(10)))
.count()
.toStream()
.map((windowedKey, count) -> {
String start = timeFormatter.format(windowedKey.window().startTime());
String end = timeFormatter.format(windowedKey.window().endTime());
String sessionInfo = String.format("Session info started: %s ended: %s with count %s", start, end, count);
return KeyValue.pair(windowedKey.key(), sessionInfo);
})
.to(outputTopic, Produced.with(Serdes.String(), Serdes.String()));
return builder.build();
}
public Properties loadEnvProperties(String fileName) throws IOException {
Properties allProps = new Properties();
FileInputStream input = new FileInputStream(fileName);
allProps.load(input);
input.close();
return allProps;
}
public static void main(String[] args) throws Exception {
if (args.length < 1) {
throw new IllegalArgumentException("This program takes one argument: the path to an environment configuration file.");
}
SessionWindow tw = new SessionWindow();
Properties allProps = tw.loadEnvProperties(args[0]);
allProps.put(StreamsConfig.DEFAULT_KEY_SERDE_CLASS_CONFIG, Serdes.String().getClass());
allProps.put(StreamsConfig.DEFAULT_TIMESTAMP_EXTRACTOR_CLASS_CONFIG, ClickEventTimestampExtractor.class);
Topology topology = tw.buildTopology(allProps);
ClicksDataGenerator dataGenerator = new ClicksDataGenerator(allProps);
dataGenerator.generate();
final KafkaStreams streams = new KafkaStreams(topology, allProps);
final CountDownLatch latch = new CountDownLatch(1);
// Attach shutdown handler to catch Control-C.
Runtime.getRuntime().addShutdownHook(new Thread("streams-shutdown-hook") {
#Override
public void run() {
streams.close(Duration.ofSeconds(5));
latch.countDown();
}
});
try {
streams.cleanUp();
streams.start();
latch.await();
} catch (Throwable e) {
System.exit(1);
}
System.exit(0);
}
static class ClicksDataGenerator {
final Properties properties;
public ClicksDataGenerator(final Properties properties) {
this.properties = properties;
}
public void generate() {
properties.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
properties.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
}
}
}
package io.confluent.developer;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.streams.processor.TimestampExtractor;
public class ClickEventTimestampExtractor implements TimestampExtractor {
#Override
public long extract(ConsumerRecord<Object, Object> record, long previousTimestamp) {
System.out.println(record.value());
return record.getTimestamp();
}
}
i am having issues withe the following:
getting the code to compile - I keep getting this error (I am new to java so please bear with me). what is the correct way to call the getTimestamp?:
error: cannot find symbol
return record.getTimestamp();
^
symbol: method getTimestamp()
location: variable record of type ConsumerRecord<Object,Object>
1 error
not sure if the timestamp extractor will work for this particular scenario. I read here that 'The Timestamp extractor can only give you one timestamp'. does that mean that if there are multiple messages with different keys this wont work? some clarification or examples would help.
thanks!

Block texture is loaded successfully, but the 'BlockItem' texture isn't, Minecraft Forge 1.15.2

I've tried many tutorials and looked for documentation, but yet haven't succeeded.
I'll show you my code files directory and an image of the problem.
testmod/TestMod.java :
package com.SkySibe.testmod;
import net.minecraftforge.common.MinecraftForge;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.event.lifecycle.FMLClientSetupEvent;
import net.minecraftforge.fml.event.lifecycle.FMLCommonSetupEvent;
import net.minecraftforge.fml.javafmlmod.FMLJavaModLoadingContext;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
// The value here should match an entry in the META-INF/mods.toml file
#Mod("testmod")
public class TestMod
{
// Directly reference a log4j logger.
private static final Logger LOGGER = LogManager.getLogger();
public static final String MOD_ID = "testmod";
public static TestMod instance;
public TestMod() {
// Register the setup method for modloading
FMLJavaModLoadingContext.get().getModEventBus().addListener(this::setup);
// Register the doClientStuff method for modloading
FMLJavaModLoadingContext.get().getModEventBus().addListener(this::doClientStuff);
instance = this;
// Register ourselves for server and other game events we are interested in
MinecraftForge.EVENT_BUS.register(this);
}
private void setup(final FMLCommonSetupEvent event)
{
}
private void doClientStuff(final FMLClientSetupEvent event) {
}
}
testmod/init/BlockInit.java :
package com.SkySibe.testmod.init;
import com.SkySibe.testmod.TestMod;
import net.minecraft.block.Block;
import net.minecraft.block.OreBlock;
import net.minecraft.block.SoundType;
import net.minecraft.block.material.Material;
import net.minecraft.item.BlockItem;
import net.minecraft.item.Item;
import net.minecraft.item.ItemGroup;
import net.minecraftforge.common.ToolType;
import net.minecraftforge.event.RegistryEvent;
import net.minecraftforge.eventbus.api.SubscribeEvent;
import net.minecraftforge.fml.common.Mod;
import net.minecraftforge.fml.common.Mod.EventBusSubscriber.Bus;
import net.minecraftforge.registries.ObjectHolder;
#ObjectHolder(TestMod.MOD_ID)
#Mod.EventBusSubscriber(modid = TestMod.MOD_ID, bus = Bus.MOD)
public class BlockInit {
public static final Block ruby_ore = null;
#SubscribeEvent
public static void registerBlocks(final RegistryEvent.Register<Block> event) {
event.getRegistry().register(new OreBlock(Block.Properties.create(Material.ROCK).hardnessAndResistance(3.0F, 3.0F).sound(SoundType.STONE).harvestTool(ToolType.PICKAXE)).setRegistryName("ruby_ore"));
}
#SubscribeEvent
public static void registerBlockItems(final RegistryEvent.Register<Item> event) {
event.getRegistry().register(new BlockItem(ruby_ore,new Item.Properties().group(ItemGroup.BUILDING_BLOCKS)).setRegistryName("ruby_ore"));
}
}
assets/testmod/blockstates/ruby_ore.json :
{
"variants": {
"": { "model": "testmod:block/ruby_ore"}
}
}
assets/testmod/models/block/ruby_ore.json :
{
"parent": "block/cube_all",
"textures": {
"all": "testmod:blocks/ruby_ore"
}
}
assets/testmod/models/item/ruby_ore.json :
{
"parent": "testmod:block/ruby_block"
}
(From comment)
You don't write the good item item in the file assets/testmod/models/item/ruby_ore.json. It should be:
{
"parent": "testmod:block/ruby_ore"
}

Spring Boot Autowired Repository null [duplicate]

This question already has answers here:
Why is my Spring #Autowired field null?
(21 answers)
Closed 3 years ago.
I am creating a Netty UDP server using the spring framework. I have 3 classes and 1 interface.
UDPServer.java
package com.example.nettyUDPserver;
import java.net.InetAddress;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.data.jpa.repository.config.EnableJpaRepositories;
import org.springframework.stereotype.Component;
import akka.actor.ActorRef;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioDatagramChannel;
public class UDPServer {
private int port;
ActorRef serverActor = null;
public UDPServer(int port) {
this.port = port;
}
public void run() throws Exception {
final NioEventLoopGroup group = new NioEventLoopGroup();
try {
final Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new ChannelInitializer<NioDatagramChannel>() {
#Override
public void initChannel(final NioDatagramChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast(new IncomingPacketHandler());
}
});
Integer pPort = port;
InetAddress address = InetAddress.getLocalHost();
//InetAddress address = InetAddress.getByName("192.168.1.53");
System.out.println("Localhost address is: " + address.toString());
b.bind(address, pPort).sync().channel().closeFuture().await();
} finally {
group.shutdownGracefully().sync();
}
}
public static void main(String[] args) throws Exception {
int port = 6001;
new UDPServer(port).run();
}
}
IncomingPacketHandler.java
package com.example.nettyUDPserver;
import java.net.InetAddress;
import java.nio.charset.StandardCharsets;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.stereotype.Component;
import com.example.dao.SensorRepository;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import io.netty.buffer.ByteBuf;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.DatagramPacket;
#ComponentScan("com.example.dao")
public class IncomingPacketHandler extends SimpleChannelInboundHandler<DatagramPacket> {
#Autowired
SensorRepository repo;
IncomingPacketHandler(){
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, DatagramPacket packet) throws Exception {
final InetAddress srcAddr = packet.sender().getAddress();
ByteBuf buffer = packet.content();
packet.replace(buffer);
int len = buffer.readableBytes();
byte[] message = new byte[len];
buffer.readBytes(message);
String str = new String(message, StandardCharsets.UTF_8);
ObjectMapper mapper = new ObjectMapper();
JsonNode actualObj = mapper.readTree(str);
int id = actualObj.get("sensor_id").asInt();
String status = actualObj.get("status").asText();
System.out.println("==========================================================");
System.out.println("Source address of datagram received: " + srcAddr.toString());
System.out.println("String message received: " + str);
show();
}
public void show() {
System.out.println("In show function, we will perform our CRUD operations");
System.out.println(repo);
// try {
// this.repo.findAll().forEach(x -> System.out.println(x));
// } catch (NullPointerException e) {
// e.printStackTrace();
// }
}
}
Sensor.java
package com.example.models;
import javax.persistence.Entity;
import javax.persistence.Id;
#Entity
public class Sensor {
#Id
private int sensor_id;
private String status;
private double batLev;
public int getSensor_id() {
return sensor_id;
}
public void setSensor_id(int sensor_id) {
this.sensor_id = sensor_id;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public double getBatLev() {
return batLev;
}
public void setBatLev(double batLev) {
this.batLev = batLev;
}
}
SensorRepository.java
package com.example.dao;
import org.springframework.data.repository.CrudRepository;
import org.springframework.stereotype.Repository;
import com.example.models.Sensor;
#Repository
public interface SensorRepository extends CrudRepository<Sensor, Integer> {
}
I am running my server in the class UDPServer.java and I can successfully get and decode datagrams. The problem is with the SensorRepository in the IncomingPacketHandler.java class. I am using the #Autowired notation in the variable and I am using the #Repository annotation in the interface, but when I print the value of the autowired repository, it is null, so I cannot make SQL queries. Any ideas?
UPDATE
Thank you for your answers guys, much appreciated. I am denoting the IncomingPacketHandler class as a component and I am autowiring it in the UDPServer class. When I run it I get this:
[nioEventLoopGroup-2-1] DEBUG io.netty.channel.DefaultChannelPipeline - Discarded inbound message DatagramPacket(/192.168.61.64:59905 => /192.168.61.64:6001, PooledUnsafeDirectByteBuf(ridx: 0, widx: 38, cap: 2048)) that reached at the tail of the pipeline. Please check your pipeline configuration.
This is probably out of the scope of this question, but you maybe can show me tha direction. Thank you once again.
Your class IncomingPacketHandler is not managed by Spring, but created by you personally:
ChannelPipeline p = ch.pipeline();
p.addLast(new IncomingPacketHandler());
As such, even if you add a million Spring annotations, they won't do anything. What you want instead is to have Spring create this handler, and pass the Spring-created handler as argument to p.addLast
The IncomingPacketHandler class has been created manually and not by Spring and hence bean is not available.
Add #Component to IncomingPacketHandler class:
...
import org.springframework.stereotype.Component;
#Component
public class IncomingPacketHandler extends
...
And then in UDPServer.java:
...
import org.springframework.beans.factory.annotation.Autowired;
#Component
public class UDPServer {
#Autowired
private IncomingPacketHandler incomingPacketHandler;
...

Spring Integration File reading

I am newbie to Spring Integration. I am working on solution, but I am stuck on a specific issue while using inbound file adapter ( FileReadingMessageSource ).
I have to read files from different directories and process them and save the files in different directories. As I understand, the directory name is fixed at the start of the flow.
Can some one help me on changing the directory name for different requests.
I attempted the following. First of all, I am not sure whether it is correct way to about and although it worked for only one directory. I think Poller was waiting for more files and never came back to read another directory.
#SpringBootApplication
#EnableIntegration
#IntegrationComponentScan
public class SiSampleFileProcessor {
#Autowired
MyFileProcessor myFileProcessor;
#Value("${si.outdir}")
String outDir;
#Autowired
Environment env;
public static void main(String[] args) throws IOException {
ConfigurableApplicationContext ctx = new SpringApplication(SiSampleFileProcessor.class).run(args);
FileProcessingService gateway = ctx.getBean(FileProcessingService.class);
boolean process = true;
while (process) {
System.out.println("Please enter the input Directory: ");
String inDir = new Scanner(System.in).nextLine();
if ( inDir.isEmpty() || inDir.equals("exit") ) {
process=false;
} else {
System.out.println("Processing... " + inDir);
gateway.processFilesin(inDir);
}
}
ctx.close();
}
#MessagingGateway(defaultRequestChannel="requestChannel")
public interface FileProcessingService {
String processFilesin( String inputDir );
}
#Bean(name = PollerMetadata.DEFAULT_POLLER)
public PollerMetadata poller() {
return Pollers.fixedDelay(1000).get();
}
#Bean
public MessageChannel requestChannel() {
return new DirectChannel();
}
#ServiceActivator(inputChannel = "requestChannel")
#Bean
GenericHandler<String> fileReader() {
return new GenericHandler<String>() {
#Override
public Object handle(String p, Map<String, Object> map) {
FileReadingMessageSource fileSource = new FileReadingMessageSource();
fileSource.setDirectory(new File(p));
Message<File> msg;
while( (msg = fileSource.receive()) != null ) {
fileInChannel().send(msg);
}
return null; // Not sure what to return!
}
};
}
#Bean
public MessageChannel fileInChannel() {
return MessageChannels.queue("fileIn").get();
}
#Bean
public IntegrationFlow fileProcessingFlow() {
return IntegrationFlows.from(fileInChannel())
.handle(myFileProcessor)
.handle(Files.outboundAdapter(new File(outDir)).autoCreateDirectory(true).get())
.get();
}
}
EDIT: Based on Gary's response replaced some methods as
#MessagingGateway(defaultRequestChannel="requestChannel")
public interface FileProcessingService {
boolean processFilesin( String inputDir );
}
#ServiceActivator(inputChannel = "requestChannel")
public boolean fileReader(String inDir) {
FileReadingMessageSource fileSource = new FileReadingMessageSource();
fileSource.setDirectory(new File(inDir));
fileSource.afterPropertiesSet();
fileSource.start();
Message<File> msg;
while ((msg = fileSource.receive()) != null) {
fileInChannel().send(msg);
}
fileSource.stop();
System.out.println("Sent all files in directory: " + inDir);
return true;
}
Now it is working as expected.
You can use this code
FileProcessor.java
import org.springframework.messaging.Message;
import org.springframework.stereotype.Component;
#Component
public class FileProcessor {
private static final String HEADER_FILE_NAME = "file_name";
private static final String MSG = "%s received. Content: %s";
public void process(Message<String> msg) {
String fileName = (String) msg.getHeaders().get(HEADER_FILE_NAME);
String content = msg.getPayload();
//System.out.println(String.format(MSG, fileName, content));
System.out.println(content);
}
}
LastModifiedFileFilter.java
package com.example.demo;
import org.springframework.integration.file.filters.AbstractFileListFilter;
import java.io.File;
import java.util.HashMap;
import java.util.Map;
public class LastModifiedFileFilter extends AbstractFileListFilter<File> {
private final Map<String, Long> files = new HashMap<>();
private final Object monitor = new Object();
#Override
protected boolean accept(File file) {
synchronized (this.monitor) {
Long previousModifiedTime = files.put(file.getName(), file.lastModified());
return previousModifiedTime == null || previousModifiedTime != file.lastModified();
}
}
}
Main Class= DemoApplication.java
package com.example.demo;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.EnableAutoConfiguration;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.autoconfigure.jdbc.DataSourceAutoConfiguration;
import org.springframework.boot.autoconfigure.orm.jpa.HibernateJpaAutoConfiguration;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import org.apache.commons.io.FileUtils;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.integration.annotation.Aggregator;
import org.springframework.integration.annotation.InboundChannelAdapter;
import org.springframework.integration.annotation.Poller;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.channel.QueueChannel;
import org.springframework.integration.core.MessageSource;
import org.springframework.integration.dsl.IntegrationFlow;
import org.springframework.integration.dsl.IntegrationFlows;
import org.springframework.integration.dsl.channel.MessageChannels;
import org.springframework.integration.dsl.core.Pollers;
import org.springframework.integration.file.FileReadingMessageSource;
import org.springframework.integration.file.filters.CompositeFileListFilter;
import org.springframework.integration.file.filters.SimplePatternFileListFilter;
import org.springframework.integration.file.transformer.FileToStringTransformer;
import org.springframework.integration.scheduling.PollerMetadata;
import org.springframework.messaging.MessageChannel;
import org.springframework.messaging.PollableChannel;
import org.springframework.stereotype.Component;
#SpringBootApplication
#Configuration
public class DemoApplication {
private static final String DIRECTORY = "E:/usmandata/logs/input/";
public static void main(String[] args) throws IOException, InterruptedException {
SpringApplication.run(DemoApplication.class, args);
}
#Bean
public IntegrationFlow processFileFlow() {
return IntegrationFlows
.from("fileInputChannel")
.transform(fileToStringTransformer())
.handle("fileProcessor", "process").get();
}
#Bean
public MessageChannel fileInputChannel() {
return new DirectChannel();
}
#Bean
#InboundChannelAdapter(value = "fileInputChannel", poller = #Poller(fixedDelay = "1000"))
public MessageSource<File> fileReadingMessageSource() {
CompositeFileListFilter<File> filters =new CompositeFileListFilter<>();
filters.addFilter(new SimplePatternFileListFilter("*.log"));
filters.addFilter(new LastModifiedFileFilter());
FileReadingMessageSource source = new FileReadingMessageSource();
source.setAutoCreateDirectory(true);
source.setDirectory(new File(DIRECTORY));
source.setFilter(filters);
return source;
}
#Bean
public FileToStringTransformer fileToStringTransformer() {
return new FileToStringTransformer();
}
#Bean
public FileProcessor fileProcessor() {
return new FileProcessor();
}
}
The FileReadingMessageSource uses a DirectoryScanner internally; it is normally set up by Spring after the properties are injected. Since you are managing the object outside of Spring, you need to call Spring bean initialization and lifecycle methods afterPropertiesSet() , start() and stop().
Call stop() when the receive returns null.
> return null; // Not sure what to return!
If you return nothing, your calling thread will hang in the gateway waiting for a response. You could change the gateway to return void or, since your gateway is expecting a String, just return some value.
However, your calling code is not looking at the result anyway.
> gateway.processFilesin(inDir);
Also, remove the #Bean from the #ServiceActivator; with that style, the bean type must be MessageHandler.

Categories