Unable to consume kafka messages using apache storm - java

I have developed an application to consume the kafka message using apache storm, when i run topology using in LocalCluster in eclipse it works fine and messages consumes properly, but when i run this using storm command (bin\storm jar ..\kafka-storm-0.0.1-SNAPSHOT.jar com.kafka_storm.util.Topology storm-kafka-topology), the topology started but unable to consume any message, it there something wrong i am doing, or guide me what things i can do to find the problem
Topology Code
public class Topology {
public Properties configs;
public BoltBuilder boltBuilder;
public SpoutBuilder spoutBuilder;
public Topology(String configFile) throws Exception {
configs = new Properties();
InputStream is = null;
try {
is = this.getClass().getResourceAsStream("/application.properties");
configs.load(is);
//configs.load(Topology.class.getResourceAsStream("/application.properties"));
boltBuilder = new BoltBuilder(configs);
spoutBuilder = new SpoutBuilder(configs);
} catch (Exception ex) {
ex.printStackTrace();
System.exit(0);
}
}
private void submitTopology() throws Exception {
System.out.println("Entered in submitTopology");
TopologyBuilder builder = new TopologyBuilder();
KafkaSpout<?, ?> kafkaSpout = spoutBuilder.buildKafkaSpout();
SinkTypeBolt sinkTypeBolt = boltBuilder.buildSinkTypeBolt();
MongoDBBolt mongoBolt = boltBuilder.buildMongoDBBolt();
//set the kafkaSpout to topology
//parallelism-hint for kafkaSpout - defines number of executors/threads to be spawn per container
int kafkaSpoutCount = Integer.parseInt(configs.getProperty(Keys.KAFKA_SPOUT_COUNT));
builder.setSpout(configs.getProperty(Keys.KAFKA_SPOUT_ID), kafkaSpout, kafkaSpoutCount);
//set the sinktype bolt
int sinkBoltCount = Integer.parseInt(configs.getProperty(Keys.SINK_BOLT_COUNT));
builder.setBolt(configs.getProperty(Keys.SINK_TYPE_BOLT_ID),sinkTypeBolt,sinkBoltCount).shuffleGrouping(configs.getProperty(Keys.KAFKA_SPOUT_ID));
//set the mongodb bolt
int mongoBoltCount = Integer.parseInt(configs.getProperty(Keys.MONGO_BOLT_COUNT));
builder.setBolt(configs.getProperty(Keys.MONGO_BOLT_ID),mongoBolt,mongoBoltCount).shuffleGrouping(configs.getProperty(Keys.SINK_TYPE_BOLT_ID),Keys.MONGODB_STREAM);
String topologyName = configs.getProperty(Keys.TOPOLOGY_NAME);
Config conf = new Config();
//Defines how many worker processes have to be created for the topology in the cluster.
conf.setNumWorkers(1);
System.out.println("Submitting Topology");
//StormSubmitter.submitTopology(topologyName, conf, builder.createTopology());
System.out.println("Topology submitted");
LocalCluster cluster = new LocalCluster();
cluster.submitTopology(topologyName, conf, builder.createTopology());
}
public static void main(String[] args) throws Exception {
String configFile;
if (args.length == 0) {
System.out.println("Missing input : config file location, using default");
configFile = "application.properties";
} else{
configFile = args[0];
}
Topology ingestionTopology = new Topology(configFile);
ingestionTopology.submitTopology();
}
}
Spout Code
public class SpoutBuilder {
public Properties configs = null;
public SpoutBuilder(Properties configs) {
this.configs = configs;
}
public KafkaSpout<?, ?> buildKafkaSpout() {
String servers = configs.getProperty(Keys.KAFKA_BROKER);
String topic = configs.getProperty(Keys.KAFKA_TOPIC);
String group = configs.getProperty(Keys.KAFKA_CONSUMERGROUP);
return new KafkaSpout<>(getKafkaSpoutConfig(servers,topic,group));
}
protected KafkaSpoutConfig<String, String> getKafkaSpoutConfig(String bootstrapServers, String topic, String group) {
return KafkaSpoutConfig.builder(bootstrapServers, new String[]{topic})
.setProp(ConsumerConfig.GROUP_ID_CONFIG, group)
.setRetry(getRetryService())
.setOffsetCommitPeriodMs(10_000)
.setFirstPollOffsetStrategy(FirstPollOffsetStrategy.UNCOMMITTED_LATEST)
.setMaxUncommittedOffsets(250)
.setProcessingGuarantee(ProcessingGuarantee.AT_LEAST_ONCE)
.setTupleTrackingEnforced(true)
.setEmitNullTuples(false)
.setRecordTranslator(new DefaultRecordTranslator<String, String>())
.build();
}
protected KafkaSpoutRetryService getRetryService() {
return new KafkaSpoutRetryExponentialBackoff(TimeInterval.microSeconds(500),
TimeInterval.milliSeconds(2), Integer.MAX_VALUE, TimeInterval.seconds(10));
}
}
Bolt Builder
public class BoltBuilder {
public Properties configs = null;
public BoltBuilder(Properties configs) {
this.configs = configs;
}
public SinkTypeBolt buildSinkTypeBolt() {
return new SinkTypeBolt();
}
public MongoDBBolt buildMongoDBBolt() {
String host = configs.getProperty(Keys.MONGO_HOST);
int port = Integer.parseInt(configs.getProperty(Keys.MONGO_PORT));
String db = configs.getProperty(Keys.MONGO_DATABASE);
String collection = configs.getProperty(Keys.MONGO_COLLECTION);
return new MongoDBBolt(host, port, db, collection);
}
}
SinkTypeBolt Code
public class SinkTypeBolt extends BaseRichBolt {
private static final long serialVersionUID = 1L;
private OutputCollector collector;
public void execute(Tuple tuple) {
String value = tuple.getString(4);
System.out.println("Received in SinkType bolt : "+value);
if (value != null && !value.isEmpty()){
collector.emit(Keys.MONGODB_STREAM,new Values(value));
System.out.println("Emitted : "+value);
}
collector.ack(tuple);
}
public void prepare(Map conf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declareStream(Keys.MONGODB_STREAM, new Fields("content"));
}
}
MongoDB Bolt
public class MongoDBBolt extends BaseRichBolt {
private static final long serialVersionUID = 1L;
private OutputCollector collector;
private MongoDatabase mongoDB;
private MongoClient mongoClient;
private String collection;
public String host;
public int port ;
public String db;
protected MongoDBBolt(String host, int port, String db,String collection) {
this.host = host;
this.port = port;
this.db = db;
this.collection = collection;
}
public void prepare(Map stormConf, TopologyContext context, OutputCollector collector) {
this.collector = collector;
this.mongoClient = new MongoClient(host,port);
this.mongoDB = mongoClient.getDatabase(db);
}
public void execute(Tuple input) {
Document mongoDoc = getMongoDocForInput(input);
try{
mongoDB.getCollection(collection).insertOne(mongoDoc);
collector.ack(input);
}catch(Exception e) {
e.printStackTrace();
collector.fail(input);
}
}
#Override
public void cleanup() {
this.mongoClient.close();
}
public void declareOutputFields(OutputFieldsDeclarer declarer) {
// TODO Auto-generated method stub
}
public Document getMongoDocForInput(Tuple input) {
Document doc = new Document();
String content = (String) input.getValueByField("content");
String[] parts = content.trim().split(" ");
System.out.println("Received in MongoDB bolt "+content);
try {
for(String part : parts) {
String[] subParts = part.split(":");
String fieldName = subParts[0];
String value = subParts[1];
doc.append(fieldName, value);
}
} catch(Exception e) {
}
return doc;
}
}
pom.xml code
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.7</maven.compiler.source>
<maven.compiler.target>1.7</maven.compiler.target>
</properties>
<dependencies>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>3.8.1</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-core</artifactId>
<version>1.2.2</version>
<scope>provided</scope>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>1.1.0</version>
<exclusions>
<exclusion>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-log4j12</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.storm</groupId>
<artifactId>storm-kafka-client</artifactId>
<version>1.2.2</version>
</dependency>
<dependency>
<groupId>org.mongodb</groupId>
<artifactId>mongo-java-driver</artifactId>
<version>3.0.4</version>
</dependency>
<dependency>
<groupId>com.googlecode.json-simple</groupId>
<artifactId>json-simple</artifactId>
<version>1.1</version>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>1.4</version>
<configuration>
<createDependencyReducedPom>true</createDependencyReducedPom>
</configuration>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<transformers>
<transformer
implementation="org.apache.maven.plugins.shade.resource.ServicesResourceTransformer" />
<transformer
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
<mainClass>com.kafka_storm.util.Topology</mainClass>
</transformer>
</transformers>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-resources-plugin</artifactId>
<version>2.4</version>
</plugin>
</plugins>
<resources>
<resource>
<directory>src/main/java</directory>
<includes>
<include> **/*.properties</include>
</includes>
</resource>
</resources>
</build>
Storm UI

Just to be sure, you are remembering to use the StormSubmitter line in Topology, rather than the LocalCluster when you submit the topology with storm jar, right?
Also please check that you've started all the right daemons, i.e. storm nimbus, storm supervisor should be running as a minimum (plus your Zookeeper install)
The next places to look would be in your log files. In the Storm directory, you'll have a logs directory. Look in the logs/worker-artifacts/<your-topology-id>/<your-worker-port>/worker.log files. Those will hopefully get you on the right track to figuring out what's going on. I'd open Storm UI, find your spout and look up which worker ports it's running on, so you can look in the right log files.

Related

luckperms non-static method getUserManager() cannot be referenced from a static context

I'm trying to get user with uuid but an error pops up and says this:
non-static method getUserManager() cannot be referenced from a static context
I tried to place it in a different method and call that method but didn't worked, whatever I do that error still pops up. It could be because I wrote LuckPerms instead of luckPerms but I don't think so.
And here is the code (I deleted some useless stuff)
I tried searching but couldn't find anything, also I've read API like 5 times and it was useless
// BUNGEECORD
import net.md_5.bungee.api.plugin.Plugin;
// BUNGEECORD
//JAVA
import java.util.EnumSet;
//JAVA
//LuckPerms
import net.luckperms.api.LuckPermsProvider;
import net.luckperms.api.LuckPerms;
//LuckPerms
public void onPrivateMessageReceived(final PrivateMessageReceivedEvent event) {
UUID uuid = UUID.nameUUIDFromBytes(("OfflinePlayer:" + messages.get(0).getContentDisplay()).getBytes());
net.luckperms.api.model.user.User user = LuckPerms.getUserManager().getUser(uuid); // AND HERE IS THE ERROR
DataMutateResult result = user.data().add(Node.builder("group.admin").build());
};
And here is my pom.xml
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>area</groupId>
<artifactId>amogus</artifactId>
<version>1.0.0-SNAPSHOT</version>
<properties>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
<repositories>
<repository>
<id>dv8tion</id>
<name>m2-dv8tion</name>
<url>https://m2.dv8tion.net/releases</url>
</repository>
<repository>
<id>spigot-repo</id>
<url>https://hub.spigotmc.org/nexus/content/repositories/snapshots/</url>
</repository>
<repository>
<snapshots>
<enabled>false</enabled>
</snapshots>
<id>bintray-dv8fromtheworld-maven</id>
<name>bintray</name>
<url>http://dl.bintray.com/dv8fromtheworld/maven</url>
</repository>
<repository>
<id>bungeecord-repo</id>
<url>https://oss.sonatype.org/content/repositories/snapshots</url>
</repository>
</repositories>
<dependencies>
<dependency>
<groupId>net.dv8tion</groupId>
<artifactId>JDA</artifactId>
<version>4.3.0_277</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>2.0.0-alpha4</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-simple</artifactId>
<version>2.0.0-alpha4</version>
</dependency>
<dependency>
<groupId>net.luckperms</groupId>
<artifactId>api</artifactId>
<version>5.3</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.bukkit</groupId>
<artifactId>bukkit</artifactId>
<version>1.12.2-R0.1-SNAPSHOT</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>net.md-5</groupId>
<artifactId>bungeecord-api</artifactId>
<version>1.16-R0.5-SNAPSHOT</version>
<type>jar</type>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>net.md-5</groupId>
<artifactId>bungeecord-api</artifactId>
<version>1.16-R0.5-SNAPSHOT</version>
<type>javadoc</type>
<scope>provided</scope>
</dependency>
</dependencies>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId>
<version>3.1.0</version>
<configuration>
<archive>
<manifest>
<mainClass>area.amogus</mainClass>
</manifest>
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<version>3.5.1</version>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-shade-plugin</artifactId>
<version>2.4.3</version>
<executions>
<execution>
<phase>package</phase>
<goals>
<goal>shade</goal>
</goals>
<configuration>
<artifactSet>
<excludes>
<exclude></exclude>
</excludes>
</artifactSet>
</configuration>
</execution>
</executions>
</plugin>
</plugins>
<resources>
<resource>
<directory>resources</directory>
</resource>
</resources>
</build>
</project>
Imagine we have a class:
public class MyClass {
public static void main(String[] args) {
MyClass.doThis();
}
public void doThis(){
System.out.println("do this");
}
}
In this case, you will get exactly the same error as you're getting in your code. It means, we cannot access doThis() method, before an instance of MyClass is created, like
MyClass myClass = new MyClass();
myClass.doThis(); //this is valid since we have an instance to call this method on.
I would assume you have to get an instance of LuckPerms object in some way, before calling a .getUserManager() method on it.
Like this:
LuckPerms luckPerms = new LuckPerms(); //assuming there's a respective constructor
luckPerms.getUserManager();
you cannot use LuckPerms.getUserManager(), because you have not instantiated new Object from LuckPerms. Aslo LuckPerms cannot be instantiated because it is an interface. you should implement the interface first and then instantiate and use it.
your code could be like this:
public void onPrivateMessageReceived(final PrivateMessageReceivedEvent event) {
UUID uuid = UUID.nameUUIDFromBytes(("OfflinePlayer:" + messages.get(0).getContentDisplay()).getBytes());
LuckPerms luckPerms = new LuckPerms() {
#Override
public #org.checkerframework.checker.nullness.qual.NonNull String getServerName() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull UserManager getUserManager() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull GroupManager getGroupManager() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull TrackManager getTrackManager() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull <T> PlayerAdapter<T> getPlayerAdapter(#org.checkerframework.checker.nullness.qual.NonNull Class<T> aClass) {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull Platform getPlatform() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull PluginMetadata getPluginMetadata() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull EventBus getEventBus() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull Optional<MessagingService> getMessagingService() {
return Optional.empty();
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull ActionLogger getActionLogger() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull ContextManager getContextManager() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull MetaStackFactory getMetaStackFactory() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull CompletableFuture<Void> runUpdateTask() {
return null;
}
#Override
public void registerMessengerProvider(#org.checkerframework.checker.nullness.qual.NonNull MessengerProvider messengerProvider) {
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull NodeBuilderRegistry getNodeBuilderRegistry() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull QueryOptionsRegistry getQueryOptionsRegistry() {
return null;
}
#Override
public #org.checkerframework.checker.nullness.qual.NonNull NodeMatcherFactory getNodeMatcherFactory() {
return null;
}
};
net.luckperms.api.model.user.User user = luckPerms.getUserManager().getUser(uuid); // AND HERE IS THE ERROR
DataMutateResult result = user.data().add(Node.builder("group.admin").build());
}

The problem of connecting Apache Flink to elasticsearch

I used a piece of code inside the Flink site to connect Apache Flink to Elastic Search. I want to run this piece of code from NetBeans software through maven project.
public class FlinkElasticCon {
public static void main(String[] args) throws Exception {
final int port;
try {
final ParameterTool params = ParameterTool.fromArgs(args);
port = params.getInt("port");
} catch (Exception e) {
System.err.println("No port specified. Please run 'SocketWindowWordCount --port <port>'");
return;
}
// get the execution environment
final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
// get input data by connecting to the socket
DataStream<String> text = env.socketTextStream("localhost", port, "\n");
// parse the data, group it, window it, and aggregate the counts
DataStream<WordWithCount> windowCounts = text
.flatMap((String value, Collector<WordWithCount> out) -> {
for (String word : value.split("\\s")) {
out.collect(new WordWithCount(word, 1L));
}
})
.keyBy("word")
.timeWindow(Time.seconds(5))
.reduce(new ReduceFunction<WordWithCount>() {
#Override
public WordWithCount reduce(WordWithCount a, WordWithCount b) {
return new WordWithCount(a.word, a.count + b.count);
}
});
// print the results with a single thread, rather than in parallel
//windowCounts.print().setParallelism(1);
env.execute("Socket Window WordCount");
List<HttpHost> httpHosts = new ArrayList<>();
httpHosts.add(new HttpHost("127.0.0.1", 9200, "http"));
httpHosts.add(new HttpHost("10.2.3.1", 9200, "http"));
ElasticsearchSink.Builder<String> esSinkBuilder = new ElasticsearchSink.Builder<>(
httpHosts,
new ElasticsearchSinkFunction<String>() {
public IndexRequest createIndexRequest(String element) {
Map<String, String> json = new HashMap<>();
json.put("data", element);
return Requests
.indexRequest()
.index("my-index")
.type("my-type")
.source(json);
}
#Override
public void process(String element, RuntimeContext ctx, RequestIndexer indexer) {
indexer.add(createIndexRequest(element));
}
}
);
windowCounts.addSink((SinkFunction<WordWithCount>) esSinkBuilder);
}
public static class WordWithCount {
public String word;
public long count;
public WordWithCount() {}
public WordWithCount(String word, long count) {
this.word = word;
this.count = count;
}
#Override
public String toString() {
return word + " : " + count;
}
}
}
When adding Dependency, it does not identify the elasticsearchsink class. Given that I added different Dependency to it, but the problem is still not resolved. When importing :
import org.apache.flink.streaming.connectors.elasticsearch6.ElasticsearchSink
The red line is created as unknown in the code.
my pom:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-
instance" xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-
4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>org.apache.flink</groupId>
<artifactId>mavenproject1</artifactId>
<version>1.0-SNAPSHOT</version>
<packaging>jar</packaging>
<dependencies>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-elasticsearch6_2.11</artifactId>
<version>1.8.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_2.11</artifactId>
<version>1.8.1</version>
<scope>provided</scope>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-elasticsearch-base_2.11</artifactId>
<version>1.8.1</version>
</dependency>
<dependency>
<groupId>org.elasticsearch</groupId>
<artifactId>elasticsearch</artifactId>
<version>6.0.0-alpha1</version>
<!--<version>6.0.0-alpha1</version>-->
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-java</artifactId>
<version>1.8.1</version>
<type>jar</type>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-clients_2.11</artifactId>
<version>1.8.1</version>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-core</artifactId>
<version>0.8.1</version>
</dependency>
</dependencies>
<properties>
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
<maven.compiler.source>1.8</maven.compiler.source>
<maven.compiler.target>1.8</maven.compiler.target>
</properties>
apache flink version : 1.8.1
elasticsearch version : 7.4.2
netbeans version : 8.2
java version : 8
please help me.
Flink Elasticsearch Connector 7
Please find a working and detailed answer which I have provided here.

#StreamListener not receiving message from kafka topic

I am able to send and receive the message using code:
#EnableBinding(Processor.class)
public class KafkaStreamsConfiguration {
#StreamListener(Processor.INPUT)
#SendTo(Processor.OUTPUT)
public String processMessage(String message) {
System.out.println("message = " + message);
return message.replaceAll("my", "your");
}
}
#RunWith(SpringRunner.class)
#SpringBootTest
#DirtiesContext
public class StreamApplicationIT {
private static String topicToPublish = "eventUpdateFromEventModel";
#BeforeClass
public static void setup() {
System.setProperty("spring.kafka.bootstrap-servers", embeddedKafka.getBrokersAsString());
}
#Autowired
private KafkaMessageSender<String> kafkaMessageSenderToTestErrors;
#Autowired
private KafkaMessageSender<EventNotificationDto> kafkaMessageSender;
#ClassRule
public static KafkaEmbedded embeddedKafka = new KafkaEmbedded(1, true, topicToPublish);
#Autowired
private Processor pipe;
#Autowired
private MessageCollector messageCollector;
#Rule
public OutputCapture outputCapture = new OutputCapture();
#Test
public void working() {
pipe.input()
.send(MessageBuilder.withPayload("This is my message")
.build());
Object payload = messageCollector.forChannel(pipe.output())
.poll()
.getPayload();
assertEquals("This is your message", payload.toString());
}
#Test
public void non_working() {
kafkaMessageSenderToTestErrors.send(topicToPublish, "This was my message");
assertTrue(isMessageReceived("This was your message", 50));
}
private boolean isMessageReceived(final String msg, final int maxAttempt) {
return IntStream.rangeClosed(0, maxAttempt)
.peek(a -> {
try {
TimeUnit.MILLISECONDS.sleep(100);
} catch (InterruptedException e) {
fail();
}
}).anyMatch(i -> outputCapture.toString().contains(msg));
}
}
#Service
#Slf4j
public class KafkaMessageSender<T> {
private final KafkaTemplate<String, byte[]> kafkaTemplate;
private final ObjectWriter objectWriter;
public KafkaMessageSender(KafkaTemplate<String, byte[]> kafkaTemplate, ObjectMapper objectMapper) {
this.kafkaTemplate = kafkaTemplate;
this.objectWriter = objectMapper.writer();
}
public void send(String topicName, T payload) {
try {
kafkaTemplate.send(topicName, objectWriter.writeValueAsString(payload).getBytes());
} catch (JsonProcessingException e) {
log.info("error converting object into byte array {}", payload.toString().substring(0, 50));
}
log.info("sent payload to topic='{}'", topicName);
}
}
But when I send the message using kafkaTemplate to any topic, StreamListener doesn't receive the message.
spring.cloud.stream.bindings.input.group=test
spring.cloud.stream.bindings.input.destination=eventUpdateFromEventModel
my pom.xml:
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-stream-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-stream-test-support</artifactId>
<scope>test</scope>
</dependency>
<!-- Spring boot version -->
<spring.boot.version>1.5.7.RELEASE</spring.boot.version>
<spring-cloud.version>Edgware.SR3</spring-cloud.version>
<dependencyManagement>
<dependencies>
<dependency>
<!-- Import dependency management from Spring Boot -->
<groupId>org.springframework.boot</groupId>
<artifactId>spring-boot-dependencies</artifactId>
<version>${spring.boot.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
<dependency>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-dependencies</artifactId>
<version>${spring-cloud.version}</version>
<type>pom</type>
<scope>import</scope>
</dependency>
</dependencies>
</dependencyManagement>
working
Object payload = messageCollector.forChannel(pipe.output())
.poll()
.getPayload();
...
not working
KafkaTemplate
This is because you are using the TestBinder in your test, not the real Kafka broker and kafka binder.
The message collector is simply fetching it from the channel. If you want to test with a real Kafka broker, see the test-embedded-kafka sample app.
EDIT
I just tested the Ditmars (boot 1.5.x) version of the sample and it works fine...

Groovy spock test case failing which includes embeded Hazelcast in my test case

I am new to Groovy spock test framework, trying to write my 1st test using Groovy spock. I have few questions around my below test which is keep failing, not getting what to change code to make it working???
What I am trying is?
One thing I am not understanding is when should I use mock, when
should I use spy
Under then section I am trying to call classUnderTest.loadFromFile() in order to test it.
Always I see in CommonDataCache class, instance variable hazelcastCache is always showing null while I debug the test, but at the same time, I see cache is getting non-null Object. Due to this I am always getting null pointer exception as shown in below error logs.
Please anybody suggest why am I missing to make it working???
Error log:
Members [1] {
Member [127.0.0.1]:5001 - a43cbef2-ddf7-431b-9300-2b53c3ea9294 this
}
Dec 13, 2017 3:53:53 PM com.hazelcast.core.LifecycleService
INFO: [127.0.0.1]:5001 [dev] [3.7.3] [127.0.0.1]:5001 is STARTED
Dec 13, 2017 3:53:54 PM com.hazelcast.internal.partition.impl.PartitionStateManager
INFO: [127.0.0.1]:5001 [dev] [3.7.3] Initializing cluster partition table arrangement...
Condition not satisfied:
commonDataCache.getFromCache("28ef4a8f-bfbc-4ad5-bc8a-88fd96ad82a8") != null
| | |
| null false
com.realdoc.symphony.common.CommonDataCache#144ab54
at com.realdoc.symphony.common.store.MemoryStoreManagerTest.populate hazlecast cache from symphony dat file(MemoryStoreManagerTest.groovy:65)
Dec 13, 2017 3:53:54 PM com.hazelcast.instance.Node
INFO: [127.0.0.1]:5001 [dev] [3.7.3] Running shutdown hook... Current state: ACTIVE
This is my Groovy test class:
import org.springframework.core.io.ClassPathResource
import spock.lang.Specification
import spock.lang.Subject
import static io.dropwizard.testing.FixtureHelpers.fixture
class MemoryStoreManagerTest extends Specification {
/**
* Mock the any config DTOs that carries static configuration data
**/
def dw = Mock(SymphonyConfig)
def cacheConfig = Mock(CacheConfig)
/**
* This has to be spied because there is actual call happening in the target method which converts
* json string format to MemoryStoreFileData DTO object
*/
def jsonUtils = Spy(JsonUtils)
def hazelcastInstance = TestHazelcastInstanceFactory.newInstance().newHazelcastInstance()
/**
* This class is under test
**/
#Subject
def commonDataCache = new CommonDataCache(hazelcastInstance: hazelcastInstance,hazelcastCache: hazelcastInstance.getMap("default"), config: dw)
/**
* This class is under test
**/
#Subject
def classUnderTest = new MemoryStoreManager(dw:dw, jsonUtils: jsonUtils, commonDataCache: commonDataCache)
/**
* Test whether populating symphony.dat file into hazelcast cache is working
*/
def "populate hazlecast cache from symphony dat file"() {
setup:
def datFile = fixture("symphony.dat")
def resource = new ClassPathResource("symphony.dat")
def file = resource.getFile()
when:
cacheConfig.getStoreLocation() >> ""
cacheConfig.getStoreFileName() >> "symphony.dat"
dw.getUseHazelcastCache() >> true
dw.getCacheConfig() >> cacheConfig
cacheConfig.getFile() >> file
commonDataCache.postConstruct()
then:
classUnderTest.loadFromFile()
expect:
commonDataCache.getFromCache("28ef4a8f-bfbc-4ad5-bc8a-88fd96ad82a8") != null
}
}
This is my target class on which I am trying to test loadFromFile() method
#Component
public class MemoryStoreManager {
private static final Logger LOG = LoggerFactory.getLogger(MemoryStoreManager.class);
#Autowired
SymphonyConfig dw;
#Autowired
JsonUtils jsonUtils;
#Autowired
CommonDataCache commonDataCache;
private final Properties properties = new Properties();
#PostConstruct
public void loadFromFile() {
File file = dw.getCacheConfig().getFile();
LOG.info("Loading Data from file-{}", file.getAbsolutePath());
FileInputStream inStream = null;
try {
if (!file.exists()) {
Files.createFile(file.toPath());
}
inStream = new FileInputStream(file);
properties.load(inStream);
String property = properties.getProperty("data");
MemoryStoreFileData fileData;
if (StringUtils.isNotEmpty(property)) {
fileData = jsonUtils.jsonToObject(property, MemoryStoreFileData.class);
} else {
fileData = new MemoryStoreFileData(Collections.emptyMap(), Collections.emptyMap());
}
Long lastUpdatedTimeInFile = fileData.getLastUpdatedTime();
LOG.info("Last updated time in File-{}", lastUpdatedTimeInFile);
Long lastUpdatedTimeInCache = (Long) commonDataCache.getFromCache("lastUpdatedTime");
LOG.info("Last updated time in Cache-{}", lastUpdatedTimeInCache);
Map<String, DocData> loadedMap = fileData.getDocDataMap();
if (MapUtils.isEmpty(loadedMap)) {
loadedMap = new HashMap<>();
}
Map<String, ProcessStatusDto> processStatusMap = fileData.getProcessStatusMap();
if (MapUtils.isEmpty(processStatusMap)) {
processStatusMap = new HashMap<>();
}
if (lastUpdatedTimeInFile != null && (lastUpdatedTimeInCache == null || lastUpdatedTimeInCache < lastUpdatedTimeInFile)) {
LOG.info("Overwriting data from File");
commonDataCache.addAllToCache(loadedMap, processStatusMap);
} else {
String requestId;
DocData fileDocData;
DocData cacheDocData;
Map<String, String> filePageStatusMap;
Map<String, String> cachePageStatusMap;
String pageId;
String fileStatus;
String cacheStatus;
for (Entry<String, DocData> entry : loadedMap.entrySet()) {
requestId = entry.getKey();
fileDocData = entry.getValue();
cacheDocData = (DocData) commonDataCache.getFromCache(requestId);
filePageStatusMap = fileDocData.getPageStatusMap();
cachePageStatusMap = cacheDocData.getPageStatusMap();
for (Entry<String, String> pageStatus : filePageStatusMap.entrySet()) {
pageId = pageStatus.getKey();
fileStatus = pageStatus.getValue();
cacheStatus = cachePageStatusMap.get(pageId);
if (StringUtils.equals("IN_PROCESS", cacheStatus) && !StringUtils.equals("IN_PROCESS", fileStatus)) {
cachePageStatusMap.put(pageId, fileStatus);
LOG.info("PageId: {} status: {} updated", pageId, fileStatus);
}
}
commonDataCache.addToCache(requestId, cacheDocData);
}
}
} catch (Exception e) {
LOG.error("ErrorCode-{}, Component-{}, Message-{}. Error Loading cache data from file-{}. Exiting system", "OR-51010", "ORCHESTRATION", "Symphony cache loading exception", file.getAbsoluteFile(), e);
System.exit(0);
}
}
}
This is my cache utility class where store and retrieve methods are defines.
#Component
public class CommonDataCache {
private static final Logger LOG = LoggerFactory.getLogger(CommonDataCache.class);
#Autowired
HazelcastInstance hazelcastInstance;
#Autowired
SymphonyConfig config;
public static String LAST_UPDATED_TIME = "lastUpdatedTime";
private IMap<String, Object> hazelcastCache = null;
private boolean useHazelcast = false;
private final Map<String, Object> cache = new ConcurrentHashMap<>();
#PostConstruct
public void postConstruct() {
hazelcastCache = hazelcastInstance.getMap("default");
// Enable only if logging level is DEBUG
if (LOG.isDebugEnabled()) {
hazelcastCache.addEntryListener(new HazelcastMapListener(), true);
}
useHazelcast = config.getUseHazelcastCache();
}
public Map<String, Object> getAllDataFromCache() {
return hazelcastCache;
}
public void addToCache(String key, Object value) {
if (useHazelcast) {
hazelcastCache.put(key, value);
hazelcastCache.put(LAST_UPDATED_TIME, System.currentTimeMillis());
} else {
cache.put(key, value);
cache.put(LAST_UPDATED_TIME, System.currentTimeMillis());
}
}
public Object getAndRemoveFromCache(String key) {
if (useHazelcast) {
return hazelcastCache.remove(key);
} else {
return cache.remove(key);
}
}
public Object getFromCache(String key) {
if (useHazelcast) {
return hazelcastCache.get(key);
} else {
return cache.get(key);
}
}
/**
*
* #param cacheDataMap
*/
public void addAllToCache(Map<String, DocData> cacheDataMap, Map<String, ProcessStatusDto> processStatusMap) {
hazelcastCache.putAll(cacheDataMap);
hazelcastCache.putAll(processStatusMap);
hazelcastCache.put(LAST_UPDATED_TIME, System.currentTimeMillis());
}
public void lockKey(String key) {
if (useHazelcast) {
hazelcastCache.lock(key);
}
}
public void unlockKey(String key) {
if (useHazelcast) {
hazelcastCache.unlock(key);
}
}
public Map<String, Object> getByKeyContains(String keyString) {
Map<String, Object> values;
if (useHazelcast) {
Set<String> foundKeys = hazelcastCache.keySet(entry -> ((String)entry.getKey()).contains(keyString));
values = hazelcastCache.getAll(foundKeys);
} else {
values = Maps.filterEntries(cache, entry -> entry.getKey().contains(keyString));
}
return values;
}
}
Here is maven dependencies for groovy tests.
<!-- Dependencies for GROOVY TEST -->
<dependency>
<groupId>com.hazelcast</groupId>
<artifactId>hazelcast</artifactId>
<version>${hazelcast.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>com.hazelcast</groupId>
<artifactId>hazelcast</artifactId>
<version>${hazelcast.version}</version>
<classifier>tests</classifier>
<scope>test</scope>
</dependency>
<!-- GROOVY TEST FRAMEWORK -->
<dependency>
<groupId>org.aspectj</groupId>
<artifactId>aspectjweaver</artifactId>
<version>${aspectj.version}</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>${junit.version}</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-test</artifactId>
<version>${org.springframework.version}</version>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-core</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-assets</artifactId>
</dependency>
<dependency>
<groupId>io.dropwizard</groupId>
<artifactId>dropwizard-testing</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>realdoc</groupId>
<artifactId>dropwizard-spring</artifactId>
<version>${realdoc.dropwizard-spring.version}</version>
</dependency>
<dependency>
<groupId>org.codehaus.groovy</groupId>
<artifactId>groovy-all</artifactId>
<!-- any version of Groovy \>= 1.5.0 should work here -->
<version>${groovy-all.version}</version>
<!--<scope>test</scope>-->
</dependency>
<dependency>
<groupId>org.spockframework</groupId>
<artifactId>spock-core</artifactId>
<version>${spock.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.spockframework</groupId>
<artifactId>spock-spring</artifactId>
<version>${spock.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>cglib</groupId>
<artifactId>cglib-nodep</artifactId>
<version>${cglib-nodep.version}</version>
<scope>test</scope>
</dependency>
<!-- GROOVY TEST FRAMEWORK -->
<build>
<testSourceDirectory>src/test/groovy</testSourceDirectory>
<testResources>
<testResource>
<directory>src/test/resources</directory>
<filtering>true</filtering>
</testResource>
</testResources>
...
...
</build>

NoNodeAvailableException elasticsearch in one IntelliJ project but not in other with same programmatic and elasticsearch.yml configuration and code

I know question related to NoNodeAvailbleException has already been asked multiple times on this platform previously. I had followed through all the solutions been provided by experts previously; yet none work with me. The problem with the Exception I get is unique.
I am facing NoNodeAvailableException in my Java Client. The usual solution is to ensure consistency in confiurations been set through programmatically and the one present in elasticsearch.yml. I had already ensured this.
Just for being on the safe side, I had created another IntelliJ project with same Maven dependencies with same elasticsearch client code and same configurations, but I didn't get NoNodeAvailableException in it.
I could work with this new Intellij Project, by shipping code from my previous IntelliJ project but I will lose all the commits I had previously have and other services written in my old project.
Old client code:
public class ElasticSearchTest {
private static TransportClient client;
public static void main(String[] args) {
try {
Settings elasticsearchSettings = Settings.builder().put("cluster.name", "elasticsearch_cse").put("node.name","elasticsearch_cse_data").build();
client = new PreBuiltTransportClient(elasticsearchSettings)
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300))
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300));
String sampleWebsite = "www.samplewebsite.com";
List<PageModel> pages = new ArrayList<PageModel>();
pages.add(new PageModel("www.samplewebsite.com/page1", "darkknight wins a lot", "First page"));
pages.add(new PageModel("www.samplewebsite.com/page1/page2", "superman survives", "Second page"));
pages.add(new PageModel("www.samplewebsite.com/page3", "wonderwoman releases", "third page"));
DomainModel domainModel = new DomainModel(sampleWebsite, pages);
insertIndicesFromDomain(domainModel);
boolean isResultFound = false;
List<PageModel> pageModels = getSearchResults(domainModel, "superman");
for(PageModel pageModel: pageModels){
if(pageModel.getPageContent().equals("superman survives")){
isResultFound = true;
System.out.println(pageModel);
}
}
// closeService();
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
public static void insertIndicesFromDomain(DomainModel domainModel){
int i = 1;
for(PageModel pageModel: domainModel.getPages()){
client.prepareIndex("websiteindex", domainModel.getDomainName(), i + "").setSource(pageModel.getPageModelInMapFormat()).execute().actionGet();
i += 1;
System.out.println(i);
}
}
public static List<PageModel> getSearchResults(DomainModel domainModel, String searchKeyWords){
SearchResponse searchResponse = client.prepareSearch("websiteindex").setTypes(domainModel.getDomainName()).setSearchType(SearchType.QUERY_AND_FETCH)
.setQuery(QueryBuilders.termQuery("content", searchKeyWords)).execute().actionGet();
SearchHit[] results = searchResponse.getHits().getHits();
List<PageModel> pageModels = new ArrayList<PageModel>();
for(SearchHit searchHit: results){
pageModels.add(new PageModel(searchHit.getSource()));
}
return pageModels;
}
public static void getIndices(){
ImmutableOpenMap<String, IndexMetaData> indicies = client.admin().cluster().prepareState().get().getState().getMetaData().getIndices();
for(ObjectObjectCursor<String, IndexMetaData> entry: indicies){
System.out.printf(entry.key + " " + entry.value);
}
}
public static void closeService(){
client.close();
}
}
StackTrace of old client code:
Exception in thread "main" NoNodeAvailableException[None of the configured nodes are available: [{#transport#-1}{H90yHKj8R_aa0MeTciqjvA}{localhost}{127.0.0.1:9300}]]
at org.elasticsearch.client.transport.TransportClientNodesService.ensureNodesAreAvailable(TransportClientNodesService.java:347)
at org.elasticsearch.client.transport.TransportClientNodesService.execute(TransportClientNodesService.java:245)
at org.elasticsearch.client.transport.TransportProxyClient.execute(TransportProxyClient.java:59)
at org.elasticsearch.client.transport.TransportClient.doExecute(TransportClient.java:363)
at org.elasticsearch.client.support.AbstractClient.execute(AbstractClient.java:408)
at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:80)
at org.elasticsearch.action.ActionRequestBuilder.execute(ActionRequestBuilder.java:54)
at net.media.sitesearch.models.ElasticSearchTest.insertIndicesFromDomain(ElasticSearchTest.java:66)
at net.media.sitesearch.models.ElasticSearchTest.main(ElasticSearchTest.java:42)
New client Code:
public class Main {
private static TransportClient client;
public static void main(String[] args) {
try {
Settings elasticsearchSettings = Settings.builder().put("cluster.name", "elasticsearch_cse").put("node.name","elasticsearch_cse_data").build();
client = new PreBuiltTransportClient(elasticsearchSettings)
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300))
.addTransportAddress(new InetSocketTransportAddress(InetAddress.getByName("localhost"), 9300));
String sampleWebsite = "www.samplewebsite.com";
List<PageModel> pages = new ArrayList<PageModel>();
pages.add(new PageModel("www.samplewebsite.com/page1", "darkknight wins a lot", "First page"));
pages.add(new PageModel("www.samplewebsite.com/page1/page2", "superman survives", "Second page"));
pages.add(new PageModel("www.samplewebsite.com/page3", "wonderwoman releases", "third page"));
DomainModel domainModel = new DomainModel(sampleWebsite, pages);
insertIndicesFromDomain(domainModel);
boolean isResultFound = false;
List<PageModel> pageModels = getSearchResults(domainModel, "superman");
for(PageModel pageModel: pageModels){
if(pageModel.getPageContent().equals("superman survives")){
isResultFound = true;
System.out.println(pageModel);
}
}
// closeService();
} catch (UnknownHostException e) {
e.printStackTrace();
}
}
public static void insertIndicesFromDomain(DomainModel domainModel){
int i = 1;
for(PageModel pageModel: domainModel.getPages()){
client.prepareIndex("websiteindex", domainModel.getDomainName(), i + "").setSource(pageModel.getPageModelInMapFormat()).execute().actionGet();
i += 1;
System.out.println(i);
}
}
public static List<PageModel> getSearchResults(DomainModel domainModel, String searchKeyWords){
SearchResponse searchResponse = client.prepareSearch("websiteindex").setTypes(domainModel.getDomainName()).setSearchType(SearchType.QUERY_AND_FETCH)
.setQuery(QueryBuilders.termQuery("content", searchKeyWords)).execute().actionGet();
SearchHit[] results = searchResponse.getHits().getHits();
List<PageModel> pageModels = new ArrayList<PageModel>();
for(SearchHit searchHit: results){
pageModels.add(new PageModel(searchHit.getSource()));
}
return pageModels;
}
public static void getIndices(){
ImmutableOpenMap<String, IndexMetaData> indicies = client.admin().cluster().prepareState().get().getState().getMetaData().getIndices();
for(ObjectObjectCursor<String, IndexMetaData> entry: indicies){
System.out.printf(entry.key + " " + entry.value);
}
}
public static void closeService(){
client.close();
}
}
Output of new client code:
2
3
4
PageModel{pageUrl='www.samplewebsite.com/page1/page2', pageContent='superman survives', pageTitle='Second page', map={title=Second page, url=www.samplewebsite.com/page1/page2, content=superman survives}}
The only difference between my old code and new code, is that old code is under Git working tree and the new one is not.
New edit:
I found another important difference between my two projects. In my new Intellij Project, I haven't shipped all the other services from old one and just kept necessary ElasticSearch Client code. Hence, the new Intellij Project did not contain all the dependencies of the old project, but only one dependency of the elasticsearch.
In my new client code which does not contain all the maven dependencies except for elastic search, I don't get NoNodeAvailableException. Could the dependencies between elastic search and other maven apis be an indirect reason for that exception?
New pom.xml
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>com.sampleelasticsearch</groupId>
<artifactId>SampleES</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>5.5.1</version>
</dependency>
</dependencies>
</project>
Old pom.xml:
<?xml version="1.0" encoding="UTF-8"?>
<project xmlns="http://maven.apache.org/POM/4.0.0"
xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
<modelVersion>4.0.0</modelVersion>
<groupId>net.media.siteresearch</groupId>
<artifactId>SiteSearch</artifactId>
<version>1.0-SNAPSHOT</version>
<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>1.8</source>
<target>1.8</target>
</configuration>
</plugin>
</plugins>
</build>
<dependencies>
<dependency>
<groupId>com.sparkjava</groupId>
<artifactId>spark-core</artifactId>
<version>2.5</version>
</dependency>
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.12</version>
</dependency>
<dependency>
<groupId>net.media.crawler</groupId>
<artifactId>MnetCrawler</artifactId>
<version>0.995</version>
</dependency>
<dependency>
<groupId>org.elasticsearch.client</groupId>
<artifactId>transport</artifactId>
<version>5.5.1</version>
</dependency>
<dependency>
<groupId>org.apache.logging.log4j</groupId>
<artifactId>log4j-core</artifactId>
<version>2.8.2</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
<version>1.7.24</version>
</dependency>
<dependency>
<!-- jsoup HTML parser library # https://jsoup.org/ -->
<groupId>org.jsoup</groupId>
<artifactId>jsoup</artifactId>
<version>1.10.3</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-core</artifactId>
<version>2.8.9</version>
</dependency>
<dependency>
<groupId>com.fasterxml.jackson.core</groupId>
<artifactId>jackson-databind</artifactId>
<version>2.5.3</version>
</dependency>
</dependencies>
</project>
If authors here want to check logs from elastic search server or any other, then please ask (I have not added them because the question details were already filled with a lot of other relevant snippets from my perspective. Also, the question details would get cluttered).

Categories