RabbitMQ Play Java Akka - java

I am using Play Framework 2.2.2 and I am implementing a RabbitMQ consumer application using JavaAkka (Akka Actor System). So I have a MainActor that is initialized when the Play application comes up using Global.OnStart functions. The MainActor creates a RabbitMQ channel and then starts a consuming from a queue. Each message in that queue is a name of another queue that has to assigned to another child actor or sub actor that has to start consuming from the queue mentioned in the message. So essentially, I have one MainActor that is subscribed to ONE RabbitMQ queue and several child actors that are created by the Main actor, each of the child actor are subscribed to their own RabbitMQ queues. The problem is that I can't bring up more than 7 child actors for some reason. I suspect it is the while(true) construct in the child actor that waits for messages from RabbitMQ. Here is my implementation:
Main Actor:
import play.Logger;
import com.typesafe.config.ConfigFactory;
import java.io.IOException;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.actor.ActorRef;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.QueueingConsumer;
import play.libs.Akka;
import util.RabbitMQConnection;
public class MainActor extends UntypedActor {
#Override
public void onReceive(Object msg) throws Exception {
try{
Connection connection = RabbitMQConnection.getConnection();
Channel channel = connection.createChannel();
String main_queue_name = ConfigFactory.load().getString("rabbitmq.default_queue");
channel.queueDeclare(main_queue_name, false, false, false, null);
QueueingConsumer consumer = new QueueingConsumer(channel);
channel.basicConsume(main_queue_name, true, consumer);
while (true) {
QueueingConsumer.Delivery delivery = consumer.nextDelivery();
String message = new String(delivery.getBody());
System.out.println(" [x] Received '" + message + "'");
ActorRef childActor = getContext().actorOf(Props.create(childActor.class));
childActor.tell(message, getSelf());
}
}catch (Exception e){
System.out.println(e.toString());
}
}
}
Child Actor:
import play.Logger;
import com.typesafe.config.ConfigFactory;
import java.io.IOException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.ObjectWriter;
import play.libs.Akka;
import play.libs.Json;
import akka.actor.UntypedActor;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.QueueingConsumer;
import util.RabbitMQConnection;
public class childActor extends UntypedActor {
#Override
public void onReceive(Object msg) throws Exception {
ObjectWriter ow = new ObjectMapper().writer().withDefaultPrettyPrinter();
String queue_name = ow.writeValueAsString(msg);
try{
Connection connection = RabbitMQConnection.getConnection();
Channel channel = connection.createChannel();
channel.queueDeclare(queue_name, false, false, false, null);
QueueingConsumer consumer = new QueueingConsumer(channel);
channel.basicConsume(queue_name, true, consumer);
while (true) {
QueueingConsumer.Delivery delivery = consumer.nextDelivery();
String message = new String(delivery.getBody());
JsonNode jsonMsg = Json.parse(message);
// Call some function to process the message
}
}catch (Exception e){
System.out.println(e.toString());
}
}
}

I think you are not using the Actor correctly in this case. In my opinion you should not have a while(true) inside of a receive method for a given actor. Also, QueueingConsumer was deprecated and the rabbitmq guys recommend to implement you consumer using the interface Consumer or the default no-op implementation DefaultConsumer.
The way I would do it is:
Implement a customized consumer for rabbitmq that will send a message to the actor every time it gets something.
Use that implementation for the main actor. Send the queue name as a message and start a new child actor with the queue name as a constructor field.
Use that implementation for the child actors. Send the message received to the actor and do the JSON parsing in the actor itself.
Some code here: (WARNING: NOT COMPILED OR TEST)
Custom rabbitmq consumer:
public class MyCustomRabbitMQConsumer extends DefaultConsumer {
private ActorRef destinationActor;
public MyCustomRabbitMQConsumer(ActorRef destinationActor) {
this.destinationActor = destinationActor;
}
#Override
public void handleDelivery(String consumerTag, Envelope envelope, AMQP.BasicProperties properties, byte[] body) {
destinationActor.tell(new String(body));
}
}
Main actor:
import play.Logger;
import com.typesafe.config.ConfigFactory;
import java.io.IOException;
import akka.actor.Props;
import akka.actor.UntypedActor;
import akka.actor.ActorRef;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.QueueingConsumer;
import play.libs.Akka;
import util.RabbitMQConnection;
public class MainActor extends UntypedActor {
private MyCustomRabbitMQConsumer rabbitConsumer;
#Override
public void preStart() {
Connection connection = RabbitMQConnection.getConnection();
Channel channel = connection.createChannel();
String main_queue_name = ConfigFactory.load().getString("rabbitmq.default_queue");
channel.queueDeclare(main_queue_name, false, false, false, null);
rabbitConsumer = new MyCustomRabbitMQConsumer(getSelf());
channel.basicConsume(main_queue_name, true, rabbitConsumer);
}
#Override
public void onReceive(Object msg) throws Exception {
if(msg instanceOf String) {
String queueName = (String) msg;
System.out.println(" [x] Received '" + queueName + "'");
getContext().actorOf(Props.create(childActor.class, queueName));
}
}
}
ChildActor:
import akka.actor.UntypedActor;
import com.rabbitmq.client.ConnectionFactory;
import com.rabbitmq.client.Connection;
import com.rabbitmq.client.Channel;
import com.rabbitmq.client.QueueingConsumer;
import util.RabbitMQConnection;
public class ChildActor extends UntypedActor {
private MyCustomRabbitMQConsumer rabbitConsumer;
private String queueName;
public ChildActor(String queueName) {
this.queueName = queueName;
}
#Override
public void preStart() {
Connection connection = RabbitMQConnection.getConnection();
Channel channel = connection.createChannel();
String main_queue_name = ConfigFactory.load().getString("rabbitmq.default_queue");
channel.queueDeclare(queueName, false, false, false, null);
rabbitConsumer = new MyCustomRabbitMQConsumer(getSelf());
channel.basicConsume(queueName, true, rabbitConsumer);
}
#Override
public void onReceive(Object msg) throws Exception {
if(msg instanceOf String) {
String strMsg = (String) msg;
JsonNode jsonMsg = Json.parse(message);
// Call some function to process the message
}
}
}
This should work for n number of actors.

Related

Cannot read email body with Spring: javax.mail.FolderClosedException

I'm trying to listen my Gmail inbox for incoming mails. Every time new mail arrives, I want to see it's subject and content.
So far, I have this:
import java.io.IOException;
import javax.mail.BodyPart;
import javax.mail.Folder;
import javax.mail.internet.ContentType;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMessage;
import javax.mail.internet.MimeMultipart;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.mail.util.MimeMessageParser;
import org.springframework.context.support.ClassPathXmlApplicationContext;
import org.springframework.integration.channel.DirectChannel;
import org.springframework.integration.mail.transformer.MailToStringTransformer;
import org.springframework.messaging.Message;
import org.springframework.messaging.MessageHandler;
import org.springframework.messaging.MessagingException;
public class GmailInboundImapIdleAdapterTestApp {
private static Log logger = LogFactory.getLog(GmailInboundImapIdleAdapterTestApp.class);
public static void main (String[] args) throws Exception {
#SuppressWarnings("resource")
ClassPathXmlApplicationContext ac = new ClassPathXmlApplicationContext("/META-INF/spring/integration/gmail-imap-idle-config.xml");
DirectChannel inputChannel = ac.getBean("receiveChannel", DirectChannel.class);
inputChannel.subscribe(new MessageHandler() {
public void handleMessage(Message<?> message){
MimeMessage mm = (MimeMessage) message.getPayload();
try {
System.out.println("Subject: "+mm.getSubject());
System.out.println("Body: "+readPlainContent(mm));
}
catch (javax.mail.MessagingException e) {
System.out.println("MessagingException: "+e.getMessage());
e.printStackTrace();
}
catch (Exception e) {
System.out.println("Exception: "+e.getMessage());
e.printStackTrace();
}
}
});
}
private static String readHtmlContent(MimeMessage message) throws Exception {
return new MimeMessageParser(message).parse().getHtmlContent();
}
private static String readPlainContent(MimeMessage message) throws Exception {
return new MimeMessageParser(message).parse().getPlainContent();
}
}
It can read the mail subject correctly. But no luck with mail body.javax.mail.FolderClosedException hit me. How to fix this?
As Gary said: simple-content="true" or since recently autoCloseFolder = false: https://docs.spring.io/spring-integration/docs/5.2.0.RELEASE/reference/html/mail.html#mail-inbound
Starting with version 5.2, the autoCloseFolder option is provided on the mail receiver. Setting it to false doesn’t close the folder automatically after a fetch, but instead an IntegrationMessageHeaderAccessor.CLOSEABLE_RESOURCE header (see MessageHeaderAccessor API for more information) is populated into every message to producer from the channel adapter. It is the target application’s responsibility to call the close() on this header whenever it is necessary in the downstream flow:

IBM MQ listener service - onMessage not getting triggered

I want to read messages from IBM MQ and just print them.
//my main class
import javax.jms.JMSException;
import javax.jms.MessageConsumer;
import javax.jms.Queue;
import javax.jms.Session;
import com.ibm.mq.jms.MQQueue;
import com.ibm.mq.jms.MQQueueConnection;
import com.ibm.mq.jms.MQQueueConnectionFactory;
import com.ibm.mq.jms.MQQueueSession;
import com.ibm.msg.client.wmq.WMQConstants;
public class QueueConnector {
public static void main(String[] args) throws JMSException {
MQQueueConnectionFactory qcf = new MQQueueConnectionFactory();
MQQueueConnection qc;
Queue queue;
MQQueueSession queueSession;
MessageConsumer consumer;
qcf.setHostName ("XYZ");
qcf.setPort (Integer.parseInt("abc"));
qcf.setQueueManager ("ABC");
qcf.setChannel ("IJK");
qcf.setTransportType (WMQConstants.WMQ_CM_CLIENT);
qc = (MQQueueConnection) qcf.createQueueConnection ();
queue = new MQQueue("QUEUE_NAME");
queueSession = (MQQueueSession) qc.createQueueSession (false,Session.AUTO_ACKNOWLEDGE);
consumer = queueSession.createConsumer(queue);
Listener listener = new Listener();
consumer.setMessageListener(listener);
qc.start();
}
}
// listener class
import javax.jms.JMSException;
import javax.jms.Message;
import javax.jms.MessageListener;
import javax.jms.TextMessage;
public class Listener implements MessageListener {
#Override
public void onMessage(Message message) {
// TODO Auto-generated method stub
try {
System.out.println(((TextMessage)message).getText());
} catch (JMSException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("From Listener!");
}
}
The main class is getting terminated without calling onMessage, even if there are messages in queue. Can anyone help me with this.
You need to keep your program running. When you reach the end of main - the program will exit and hence your listener won't fire.
Try placing new Scanner(System.in).nextLine(); at the end of your method to verify that it works. Of course, you may want to change this exit condition to something more appropriate (or actually close the scanner).

change client and handler from DatagramPacket to POJO

I want to modify the client handler to use Foo instead of Datagram -- what changes are required in the client itself?
Surely it's not necessary to strictly keep to datagrams to send and receive with Netty? The Factorial example uses BigInteger, so, surely, it's possible to use POJO's.
Any and all attempts to create a class like:
class FooClientHandler extends SimpleChannelInboundHandler<Foo> are just non-starters for me, it literally won't send or receive with a server. (Yes, both client and server use similar handlers, generic classes with Foo.) So, I'm coming at this now from working code.
What's the key distinction between the factorial handler and the the datagram handler below? Or, is the primary distinction in how it's used in the client?
client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new DatagramClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
log.info("wrote packet");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
handler:
package net.bounceme.dur.netty;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.DatagramPacket;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
public class DatagramClientHandler extends SimpleChannelInboundHandler<DatagramPacket> {
private static final Logger log = Logger.getLogger(DatagramClientHandler.class.getName());
#Override
public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
log.info(response);
DatagramPacket foo = new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress("localhost", 4454));
ctx.writeAndFlush(foo);
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
}
I omitted the server code, it's almost exactly as in the Ghandi quote example.
What changes do I need to make to the client so that the handler can use Foo instead of DatagramPacket?
All I can say with certainty is that this client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
import net.bounceme.dur.client.jdbc.Title;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new TitleClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
ch.writeAndFlush(new Title());
log.info("wrote packets");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
and handler:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.client.jdbc.Title;
public class TitleClientHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(TitleClientHandler.class.getName());
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
}
}
don't, seemingly, communicate at all with the server -- even when the server has been modified accordingly.

remote akka system port conflict

I am working on a play framework 2 system using java .The play framework uses the akka system integrated with play framework to connect with a remote akka system. the remote akka system is made up of a master node and a worker node. Both systems are on the same computer with eclipse juno IDE
I have configured two ports 2552 for the master node and port 2553 for the worker node.the akka node on the play 2 framework is selected by the system itself. the akka system in the play framework is expected to ppass a messgae to the remote master node by remote lookup using the akka configuration. the master node interns also pass the message to the remote worker for processing by a remote lookup as well. The master node and the worker node have thier application.conf files in the following formart :
src/main/resources/application.conf
however on start up both the master node and the worker node decide to use port number 2552 for their communication. I present the code snippets below :
this i the code for the play framewrok application.config file.
localNode {
akka {
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
transport = "akka.remote.netty.NettyRemoteTransport"
netty {
hostname = "127.0.0.1"
port = 0
}
}
}
}
this is the configuation for the play localNode
package controllers;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Map;
import org.codehaus.jackson.JsonNode;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
import play.libs.F.Callback;
import play.mvc.WebSocket;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.serialization.Serialization;
import akka.serialization.SerializationExtension;
import com.typesafe.config.ConfigFactory;
import Com.RubineEngine.GesturePoints.*;
public class LocalNode {
ActorSystem csystem;
ActorRef localActor ;
public LocalNode() {
//We create the actor container and a child upon initialization
csystem = ActorSystem.create("LocalNode", ConfigFactory.load().getConfig("localNode"));
localActor = csystem.actorOf(new Props(LocalActor.class),"localActor");
}
public void connectMaster (final String classname)
{
localActor.tell(classname);
}
public void connectMaster ()
{
}
public void connectMaster (final WebSocket.In<JsonNode> in, final WebSocket.Out<JsonNode> out )
{
in.onMessage(new Callback<JsonNode>() {
public void invoke(JsonNode event) throws JsonParseException, JsonMappingException, IOException {
ObjectMapper mapper = new ObjectMapper();
#SuppressWarnings("unchecked")
Map<String,ArrayList<Object>> jsonMap = mapper.readValue(event, Map.class);
GesturePoints gp = new GesturePoints();
gp.setPoints(jsonMap);
localActor.tell(gp);
}
}); }
}
this is the code for the akka actor in the play framework
package controllers;
import Com.RubineEngine.GesturePoints.*;
import akka.actor.ActorRef;
import akka.actor.UntypedActor;
import akka.event.Logging;
import akka.event.LoggingAdapter;
public class LocalActor extends UntypedActor {
/**
*
*/
ActorRef masterActor; // = getContext().actorFor("akka://MasterNode#127.0.0.1:2552/user/masterActor");
LoggingAdapter log = Logging.getLogger(getContext().system(), this);
#Override
public void onReceive(Object arg) throws Exception {
System.out.println(" Local Actor 1");
if(arg instanceof GesturePoints)
{ System.out.println(" local Actor 2");
masterActor.tell(" Welcome home " , getSelf());
System.out.println(" Local Actor 3");}
else
{unhandled(arg);}
}
public void preStart()
{
masterActor = getContext().actorFor("akka://MasterNode#127.0.0.1:2553/user/masterActor");
}
}
this is the code for the Master node application.conf
masterNode {
akka {
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
transport = "akka.remote.netty.NettyRemoteTransport"
netty {
hostname = "127.0.0.1"
remote.netty.port = 2553
}
}
}
}
this is the code for the master node
package Rubine_Cluster;
import java.util.Arrays;
import com.typesafe.config.ConfigFactory;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.kernel.Bootable;
/**
* Hello world!
*
*/
public class MasterNode implements Bootable
{
final ActorSystem system;
ActorRef masterActor;
public MasterNode() {
//Create a child actor of this actor upon initialization
system = ActorSystem.create("MasterNode", ConfigFactory.load()
.getConfig("masterNode"));
masterActor = system.actorOf(new Props(MasterActor.class),"masterActor");
}
public void startup() {
}
public void shutdown() {
system.shutdown();
}
}
this is the code for the master akka actor
package Rubine_Cluster;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Map;
import Com.RubineEngine.GesturePoints.*;
import akka.actor.*;
import akka.serialization.Serialization;
import akka.serialization.SerializationExtension;
import akka.serialization.Serializer;
public class MasterActor extends UntypedActor {
/**
*
*/
ActorRef worker1;
#Override
public void onReceive(Object message) throws Exception {
System.out.println(" Master Actor 5");
System.out.println(message);
if(message instanceof GesturePoints)
{ //GesturePoints gp = (GesturePoints) message;
System.out.println(" Master Actor 1");
try { worker1.tell(message, getSelf());
System.out.println(" Master Actor 2");
} catch (Exception e) {
getSender().tell(new akka.actor.Status.Failure(e), getSelf());
throw e;
}
}
else{ unhandled(message);}
}
public void preStart()
{
worker1 = getContext().actorFor("akka://WorkerNode#127.0.0.1:2552/user/workerActor");
}
}
this is the code for the worker node application.conf
workerNode {
akka {
actor {
provider = "akka.remote.RemoteActorRefProvider"
}
remote {
transport = "akka.remote.netty.NettyRemoteTransport"
netty {
hostname = "127.0.0.1"
remote.netty.port = 2552
}
}
}
}
this is the code for the worker nide
package com.theta.gesture;
import com.typesafe.config.ConfigFactory;
import akka.actor.ActorRef;
import akka.actor.ActorSystem;
import akka.actor.Props;
import akka.kernel.Bootable;
public class WorkerNode implements Bootable{
ActorSystem system;
ActorRef worker;
WorkerNode(){
system = ActorSystem.create("WorkerNode", ConfigFactory.load()
.getConfig("workerNode"));
ActorRef workerActor = system.actorOf(new Props(WorkerActor.class),"workerActor");
}
public void shutdown() {
system.shutdown();
}
public void startup() {
}
}
this is the code for the actor in the worker project
package com.theta.gesture;
import java.util.ArrayList;
import java.util.Map;
import Com.RubineEngine.GesturePoints.GesturePoints;
import akka.actor.*;
public class WorkerActor extends UntypedActor {
private static double DIST_SQ_THRESHOLD = 3 * 3; /* threshold to eliminate mouse jitter */
#Override
public void onReceive(Object msg) throws Exception {
if(msg instanceof GesturePoints)
{ GesturePoints message = (GesturePoints) msg;
initial_Theta(message);}
else {unhandled(msg);}
}
public void initial_Theta(GesturePoints p)
{ System.out.println(" Worker Actor 1");
if(p.getPoints().get("X").size() < 3) //The number of x coordinates as size
{ return;}
System.out.println(" Worker Actor 2");
double magsq,dx,dy, recip;
dx = (double) ((Integer)p.getPoints().get("x").get(2) - (Integer)p.getPoints().get("x").get(0)) ;
dy = ((Double)p.getPoints().get("y").get(2)) - ((Double)p.getPoints().get("y").get(0));
magsq = dx * dx + dy * dy;
if(magsq > DIST_SQ_THRESHOLD)
{
recip = 1/Math.sqrt(magsq);
double initial_cos = dx * recip;
System.out.println(" Worker Actor 3");
double initial_sin = dy * recip;
System.out.println("Feature cos " + initial_cos);
System.out.println("Gesture sin " + initial_sin);
}
} }
this is the console information on the Worker Node
[INFO] [10/08/2012 12:12:44.486] [main] [ActorSystem(WorkerNode)] REMOTE:
RemoteServerStarted#akka://WorkerNode#127.0.0.1:2552
this is the console information on the master nODE
[INFO] [10/08/2012 12:13:34.633] [main] [ActorSystem(MasterNode)] REMOTE:
RemoteServerStarted#akka://MasterNode#127.0.0.1:2552
any idea about the possible course of this situation and a suggested solution is dearly sought after
Your "remote.netty.port = X" inside the nested sections should be "port = X"

Jboss Netty - Fails to send data continuously?

Using JBOSS Netty, I'm trying to send data continuously to the connected client. In the example below,
I try to send the time every 5 secs to the client, as soon as the client gets connected (channelConnected).
But this is not working. It works only if I comment the while loop.
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.string.StringEncoder;
public class SRNGServer {
public static void main(String[] args) throws Exception {
// Configure the server.
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(),
Executors.newCachedThreadPool()));
// Configure the pipeline factory.
bootstrap.setPipelineFactory(new SRNGServerPipelineFactoryP());
// Bind and start to accept incoming connections.
bootstrap.bind(new InetSocketAddress(8080));
}
private static class SRNGServerHandlerP extends SimpleChannelUpstreamHandler {
private static final Logger logger = Logger.getLogger(SRNGServerHandlerP.class.getName());
#Override
public void channelConnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
// Send greeting for a new connection.
e.getChannel().write("Welcome to " + InetAddress.getLocalHost().getHostName() + "!\r\n");
while(true){
e.getChannel().write("It is " + new Date() + " now.\r\n");
Thread.sleep(1000*5);
}
}
#Override
public void exceptionCaught(
ChannelHandlerContext ctx, ExceptionEvent e) {
logger.log(
Level.WARNING,
"Unexpected exception from downstream.",
e.getCause());
e.getChannel().close();
}
}
private static class SRNGServerPipelineFactoryP implements ChannelPipelineFactory {
public ChannelPipeline getPipeline() throws Exception {
// Create a default pipeline implementation.
ChannelPipeline pipeline = Channels.pipeline();
pipeline.addLast("encoder", new StringEncoder());
pipeline.addLast("handler", new SRNGServerHandlerP());
return pipeline;
}
}
}
The Netty documentation actually states that you should never make a Handler wait because it might eventually deadlock. The reason is that handler methods are called directly by I/O threads. One I/O thread in Netty performs multiple I/O operations in a sequence, so it's not one thread per operation.
In the channelConnected method you should start a new thread with a reference to the channel and make that thread send the time every 5 seconds. This would spawn one thread per connection.
Alternatively, you can have one single thread looping over a list of clients every 5 seconds and sending the time to each of them in a sequence.
Anyway, it's important to use a different thread for sending than the one that calls the Handler.
For what its worth, I figured the solution and here's the working code. After the "write" of time, I register the future with my ChannelFuturelistener. And then from operationComplete I keep registering the new future for every write. This works for what I want to accomplish, without using any extra threads.
import java.net.InetSocketAddress;
import java.nio.channels.ClosedChannelException;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.string.StringEncoder;
public class SRNGServer {
public static void main(String[] args) throws Exception {
// Configure the server.
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(),
//Executors.newCachedThreadPool()
Executors.newFixedThreadPool(2),2
));
// Configure the pipeline factory.
bootstrap.setPipelineFactory(new SRNGServerPipelineFactoryP());
// Bind and start to accept incoming connections.
bootstrap.bind(new InetSocketAddress(8080));
}
private static class SRNGServerHandlerP extends SimpleChannelUpstreamHandler {
private static final Logger logger = Logger.getLogger(SRNGServerHandlerP.class.getName());
#Override
public void channelConnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
// Send greeting for a new connection.
Channel ch=e.getChannel();
ChannelFuture writeFuture=e.getChannel().write("It is " + new Date() + " now.\r\n");
SRNGChannelFutureListener srngcfl=new SRNGChannelFutureListener();
writeFuture.addListener(srngcfl);
}
#Override
public void exceptionCaught(
ChannelHandlerContext ctx, ExceptionEvent e) {
logger.log(
Level.WARNING,
"Unexpected exception from downstream.",
e.getCause());
if(e.getCause() instanceof ClosedChannelException){
logger.log(Level.INFO, "****** Connection closed by client - Closing Channel");
}
e.getChannel().close();
}
}
private static class SRNGServerPipelineFactoryP implements ChannelPipelineFactory {
public ChannelPipeline getPipeline() throws Exception {
// Create a default pipeline implementation.
ChannelPipeline pipeline = Channels.pipeline();
pipeline.addLast("encoder", new StringEncoder());
pipeline.addLast("handler", new SRNGServerHandlerP());
return pipeline;
}
}
private static class SRNGChannelFutureListener implements ChannelFutureListener{
public void operationComplete(ChannelFuture future) throws InterruptedException{
Thread.sleep(1000*5);
Channel ch=future.getChannel();
if(ch!=null && ch.isConnected()){
ChannelFuture writeFuture=ch.write("It is " + new Date() + " now.\r\n");
//-- Add this instance as listener itself.
writeFuture.addListener(this);
}
}
}
}
Seems that the I/O thread is getting blocked as a result of sleep, so try using 2 worker threads instead:
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory( Executors.newCachedThreadPool(),
Executors.newCachedThreadPool(), 2 ) );

Categories