Echo Server concurrently with 1000 clients (lost messages + error connection) - java

I am reading "Netty In Action V5". When reading to chapter 2.3 and 2.4, I tried with example EchoServer and EchoClient, when I tested one client connected to server, everything worked perfectly ... then I modified the example to multi clients could connect to server. My purpose was to run a stresstest : 1000 clients would connect to server, and each of client would echo 100 messages to server, and when all clients finished, I would get total time of all of process. Server was deployed on linux machine (VPS), and clients were deployed on window machine.
When run stresstest, I got 2 problems:
Some clients got error message:
java.io.IOException: An existing connection was forcibly closed by the remote host
at sun.nio.ch.SocketDispatcher.read0(Native Method)
at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:43)
at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
at sun.nio.ch.IOUtil.read(IOUtil.java:192)
at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:379)
at io.netty.buffer.UnpooledUnsafeDirectByteBuf.setBytes(UnpooledUnsafeDirectByteBuf.java:447)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:881)
at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:242)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:119)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)\at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:110)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
at java.lang.Thread.run(Thread.java:745)
But some clients did not received message from server
Working Enviroment:
Netty-all-4.0.30.Final
JDK1.8.0_25
Echo Clients were deployed on Window 7 Ultimate
Echo Server was deployed on Linux Centos 6
Class NettyClient:
public class NettyClient {
private Bootstrap bootstrap;
private EventLoopGroup group;
public NettyClient(final ChannelInboundHandlerAdapter handler) {
group = new NioEventLoopGroup();
bootstrap = new Bootstrap();
bootstrap.group(group);
bootstrap.channel(NioSocketChannel.class);
bootstrap.handler(new ChannelInitializer<SocketChannel>() {
#Override
protected void initChannel(SocketChannel channel) throws Exception {
channel.pipeline().addLast(handler);
}
});
}
public void start(String host, int port) throws Exception {
bootstrap.remoteAddress(new InetSocketAddress(host, port));
bootstrap.connect();
}
public void stop() {
try {
group.shutdownGracefully().sync();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
Class NettyServer:
public class NettyServer {
private EventLoopGroup parentGroup;
private EventLoopGroup childGroup;
private ServerBootstrap boopstrap;
public NettyServer(final ChannelInboundHandlerAdapter handler) {
parentGroup = new NioEventLoopGroup(300);
childGroup = new NioEventLoopGroup(300);
boopstrap = new ServerBootstrap();
boopstrap.group(parentGroup, childGroup);
boopstrap.channel(NioServerSocketChannel.class);
boopstrap.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
protected void initChannel(SocketChannel channel) throws Exception {
channel.pipeline().addLast(handler);
}
});
}
public void start(int port) throws Exception {
boopstrap.localAddress(new InetSocketAddress(port));
ChannelFuture future = boopstrap.bind().sync();
System.err.println("Start Netty server on port " + port);
future.channel().closeFuture().sync();
}
public void stop() throws Exception {
parentGroup.shutdownGracefully().sync();
childGroup.shutdownGracefully().sync();
}
}
Class EchoClient
public class EchoClient {
private static final String HOST = "203.12.37.22";
private static final int PORT = 3344;
private static final int NUMBER_CONNECTION = 1000;
private static final int NUMBER_ECHO = 10;
private static CountDownLatch counter = new CountDownLatch(NUMBER_CONNECTION);
public static void main(String[] args) throws Exception {
List<NettyClient> listClients = Collections.synchronizedList(new ArrayList<NettyClient>());
for (int i = 0; i < NUMBER_CONNECTION; i++) {
new Thread(new Runnable() {
#Override
public void run() {
try {
NettyClient client = new NettyClient(new EchoClientHandler(NUMBER_ECHO) {
#Override
protected void onFinishEcho() {
counter.countDown();
System.err.println((NUMBER_CONNECTION - counter.getCount()) + "/" + NUMBER_CONNECTION);
}
});
client.start(HOST, PORT);
listClients.add(client);
} catch (Exception ex) {
ex.printStackTrace();
}
}
}).start();
}
long t1 = System.currentTimeMillis();
counter.await();
long t2 = System.currentTimeMillis();
System.err.println("Totla time: " + (t2 - t1));
for (NettyClient client : listClients) {
client.stop();
}
}
private static class EchoClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
private static final String ECHO_MSG = "Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo Echo";
private int numberEcho;
private int curNumberEcho = 0;
public EchoClientHandler(int numberEcho) {
this.numberEcho = numberEcho;
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.writeAndFlush(Unpooled.copiedBuffer(ECHO_MSG, CharsetUtil.UTF_8));
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf in) throws Exception {
curNumberEcho++;
if (curNumberEcho >= numberEcho) {
onFinishEcho();
} else {
ctx.writeAndFlush(Unpooled.copiedBuffer(ECHO_MSG, CharsetUtil.UTF_8));
}
}
protected void onFinishEcho() {
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
cause.printStackTrace();
ctx.close();
}
}
}
Class EchoServer:
public class EchoServer {
private static final int PORT = 3344;
public static void main(String[] args) throws Exception {
NettyServer server = new NettyServer(new EchoServerHandler());
server.start(PORT);
System.err.println("Start server on port " + PORT);
}
#Sharable
private static class EchoServerHandler extends ChannelInboundHandlerAdapter {
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
ctx.write(msg);
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
ctx.close();
}
}
}

You might change 2 things:
Create only one client bootstrap and reuse it for all your clients instead of creating one per client. So extract your bootstrap build out of the Client part and keep only the connect as you've done in your start. This will limit the number of threads internally.
Close the connection on client side when the number of ping pong is reached. Currently you do only a call to the empty method onFinishEcho, which causes no close at all on client side, so there is no client stopping... And therefore no channel closing too...
You might have reach some limitations on the number of threads on client side.
Also one other element could be an issue: you don't specify any codec (string codec or whatever) which could lead to partial sending from client or server treated as full response however.
For instance you might have a first block of "Echo Echo Echo" sending one packet containing the beginning of your buffer, while the other parts (more "Echo") Will be send through later packets.
To prevent this, you should use one codec to ensure your final handler is getting a real full message, not partial one. If not, you might fall in other issues such as error on the server side trying to send extra packet while the channel would be closed by the client sooner as expected...

Related

Netty- ChannelRead reports that Object msg is of SimpleLeakAwareByteBuf Type

I am making a Curl post curl -X POST -d "dsds" 10.0.0.211:5201 to my Netty socket server but in my ChannelRead when I try to cast Object msg into FullHttpRequest It throws following exception.
java.lang.ClassCastException: io.netty.buffer.SimpleLeakAwareByteBuf cannot be cast to io.netty.handler.codec.http.FullHttpRequest
at edu.clemson.openflow.sos.host.netty.HostPacketHandler.channelRead(HostPacketHandler.java:42)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:334)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:326)
at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1320)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:334)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:905)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:123)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:563)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:504)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:418)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:390)
at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:742)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:145)
at java.lang.Thread.run(Thread.java:748)
Following is my Socket Handler class
#ChannelHandler.Sharable
public class HostPacketHandler extends ChannelInboundHandlerAdapter {
private static final Logger log = LoggerFactory.getLogger(HostPacketHandler.class);
private RequestParser request;
public HostPacketHandler(RequestParser request) {
this.request = request;
log.info("Expecting Host at IP {} Port {}",
request.getClientIP(), request.getClientPort());
}
public void setRequestObject(RequestParser requestObject) {
this.request = requestObject;
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
// Discard the received data silently.
InetSocketAddress socketAddress = (InetSocketAddress) ctx.channel().remoteAddress();
log.info("Got Message from {} at Port {}",
socketAddress.getHostName(),
socketAddress.getPort());
//FullHttpRequest request = (FullHttpRequest) msg;
log.info(msg.getClass().getSimpleName());
//((ByteBuf) msg).release();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
// Close the connection when an exception is raised.
cause.printStackTrace();
ctx.close();
}
}
Pipeline:
public class NettyHostSocketServer implements IClientSocketServer {
protected static boolean isClientHandlerRunning = false;
private static final Logger log = LoggerFactory.getLogger(SocketManager.class);
private static final int CLIENT_DATA_PORT = 9877;
private static final int MAX_CLIENTS = 5;
private HostPacketHandler hostPacketHandler;
public NettyHostSocketServer(RequestParser request) {
hostPacketHandler = new HostPacketHandler(request);
}
private boolean startSocket(int port) {
NioEventLoopGroup group = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(group)
.channel(NioServerSocketChannel.class)
.localAddress(new InetSocketAddress(port))
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch)
throws Exception {
ch.pipeline().addLast(
hostPacketHandler);
}
});
ChannelFuture f = b.bind().sync();
log.info("Started host-side socket server at Port {}",CLIENT_DATA_PORT);
return true;
// Need to do socket closing handling. close all the remaining open sockets
//System.out.println(EchoServer.class.getName() + " started and listen on " + f.channel().localAddress());
//f.channel().closeFuture().sync();
} catch (InterruptedException e) {
log.error("Error starting host-side socket");
e.printStackTrace();
return false;
} finally {
//group.shutdownGracefully().sync();
}
}
#Override
public boolean start() {
if (!isClientHandlerRunning) {
isClientHandlerRunning = true;
return startSocket(CLIENT_DATA_PORT);
}
return true;
}
#Override
public int getActiveConnections() {
return 0;
}
}
I also used wireshark to check If I am getting valid packets or not. Below is the screenshot of Wireshark dump.
Your problem is that you never decode the ByteBuf into an actual HttpRequest object which is why you get an error. You can't cast a ByteBuf to a FullHttpRequest object.
You should do something like this:
#Override
public void initChannel(Channel channel) throws Exception {
channel.pipeline().addLast(new HttpRequestDecoder()) // Decodes the ByteBuf into a HttpMessage and HttpContent (1)
.addLast(new HttpObjectAggregator(1048576)) // Aggregates the HttpMessage with its following HttpContent into a FullHttpRequest
.addLast(hostPacketHandler);
}
(1) If you also want to send HttpResponse use this handler HttpServerCodec which adds the HttpRequestDecoder and HttpResponseEncoder.

cannot connect with ssl in netty [duplicate]

This question already has answers here:
Official reasons for "Software caused connection abort: socket write error"
(14 answers)
Closed 5 years ago.
I'm trying to go through the SSL example and EchoServer example in Netty and for some reason, when I add my sslContext on the client side, I keep getting, an established connection was aborted by the software in your host machine.
EchoServerBootstrap
public class EchoServerBootstrap {
private final int port;
public EchoServerBootstrap(int port) {
this.port = port;
}
public static void main(String[] args) throws Exception {
new EchoServerBootstrap(3000).start();
}
public void start() throws Exception {
SelfSignedCertificate ssc = new SelfSignedCertificate();
final SslContext sslContext = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build();
EventLoopGroup group = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(group)
.channel(NioServerSocketChannel.class)
.localAddress(new InetSocketAddress(port))
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
protected void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(sslContext.newHandler(ch.alloc()));
ch.pipeline().addLast(new EchoServerHandler());
}
});
ChannelFuture f = b.bind().sync();
f.channel().closeFuture().sync();
} finally {
group.shutdownGracefully().sync();
}
}
}
EchoServerHandler
public class EchoServerHandler extends ChannelInboundHandlerAdapter {
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
ByteBuf in = (ByteBuf) msg;
System.out.println("Received: " + in.toString(CharsetUtil.UTF_8));
ctx.write(in);
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
System.out.println("channel read complete");
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
EchoClientHandler
public class EchoClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
public void channelActive(ChannelHandlerContext ctx) {
ctx.writeAndFlush(Unpooled.copiedBuffer("Netty rocks", CharsetUtil.UTF_8));
}
#Override
public void channelRead0(ChannelHandlerContext ctx, ByteBuf in) {
System.out.println("Client receive: " + in.toString(CharsetUtil.UTF_8));
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
EchoClientBootstrap
public class EchoClientBootstrap {
private final String host;
private final int port;
public EchoClientBootstrap(String host, int port) {
this.port = port;
this.host = host;
}
public void start() throws Exception {
EventLoopGroup group = new NioEventLoopGroup();
final SslContext sslContext = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioSocketChannel.class)
.remoteAddress(new InetSocketAddress(host, port))
.handler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(sslContext.newHandler(ch.alloc(), host, port)); // WHEN I ADD THIS LINE IT FAILS WITH I/O EXCEPTION 'an established connection was aborted...'
ch.pipeline().addLast(new EchoClientHandler());
}
});
ChannelFuture f = b.connect(host, port).sync();
f.channel().closeFuture().sync();
} finally {
group.shutdownGracefully().sync();
}
}
public static void main(String[] args) throws Exception {
new EchoClientBootstrap("localhost", 3000).start();
}
}
Is there something obvious I'm missing? I tried following this example and altering it a bit (http://netty.io/4.1/xref/io/netty/example/securechat/package-summary.html), but I keep getting that exception when I add the sslContext to the client channel. Any thoughts?
I added some logging to figure out what was going on, and without encryption, EchoClient sends over the "Netty rocks" message and the server reads the message and closes the channel. But for some reason if SSL is enabled, the EchoServerHandler calls channelReadComplete before the EchoClient can send "Netty rocks" which is essentially this method
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
System.out.println("channel read complete");
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER).addListener(ChannelFutureListener.CLOSE);
}
which was closing my channel. I am not sure why there is that discrepancy when using SSL.

Netty Channel fail when write and flush too many and too fast

When I write a producer to publish message to my server. I've seen this:
java.io.IOException: Connection reset by peer
at sun.nio.ch.FileDispatcherImpl.read0(Native Method)
at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:39)
at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:223)
at sun.nio.ch.IOUtil.read(IOUtil.java:192)
at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:384)
at io.netty.buffer.UnpooledUnsafeDirectByteBuf.setBytes(UnpooledUnsafeDirectByteBuf.java:447)
at io.netty.buffer.AbstractByteBuf.writeBytes(AbstractByteBuf.java:881)
at io.netty.channel.socket.nio.NioSocketChannel.doReadBytes(NioSocketChannel.java:242)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:119)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
at java.lang.Thread.run(Thread.java:745)
I've searched all around and was told that because of channel is closed.
But, in my code. I'm just close my channel when my channel pool destroy the channel.
Here my code:
public static class ChannelFactory implements PoolableObjectFactory<Channel> {
private final Bootstrap bootstrap;
private String host;
private int port;
public ChannelFactory(Bootstrap bootstrap, String host, int port) {
this.bootstrap = bootstrap;
this.host = host;
this.port = port;
}
#Override
public Channel makeObject() throws Exception {
System.out.println("Create new channel!!!");
bootstrap.validate();
return bootstrap.connect(host, port).channel();
}
#Override
public void destroyObject(Channel channel) throws Exception {
ChannelFuture close = channel.close();
if (close.isSuccess()) {
System.out.println(channel + " close successfully");
}
}
#Override
public boolean validateObject(Channel channel) {
System.out.println("Validate object");
return (channel.isOpen());
}
#Override
public void activateObject(Channel channel) throws Exception {
System.out.println(channel + " is activated");
}
#Override
public void passivateObject(Channel channel) throws Exception {
System.out.println(channel + " is passivated");
}
/**
* #return the host
*/
public String getHost() {
return host;
}
/**
* #param host the host to set
* #return
*/
public ChannelFactory setHost(String host) {
this.host = host;
return this;
}
/**
* #return the port
*/
public int getPort() {
return port;
}
/**
* #param port the port to set
* #return
*/
public ChannelFactory setPort(int port) {
this.port = port;
return this;
}
}
And here is my Runner:
public static class Runner implements Runnable {
private Channel channel;
private ButtyMessage message;
private MyChannelPool channelPool;
public Runner(MyChannelPool channelPool, Channel channel, ButtyMessage message) {
this.channel = channel;
this.message = message;
this.channelPool = channelPool;
}
#Override
public void run() {
channel.writeAndFlush(message.content()).syncUninterruptibly().addListener(new ChannelFutureListener() {
#Override
public void operationComplete(ChannelFuture future) throws Exception {
channelPool.returnObject(future.channel());
}
});
}
}
And my main:
public static void main(String[] args) throws InterruptedException {
final String host = "127.0.0.1";
final int port = 8080;
int jobSize = 100;
int jobNumber = 10000;
final Bootstrap b = func(host, port);
final MyChannelPool channelPool = new MyChannelPool(new ChannelFactory(b, host, port));
ExecutorService threadPool = Executors.newFixedThreadPool(1);
for (int i = 0; i < jobNumber; i++) {
try {
threadPool.execute(new Runner(channelPool, channelPool.borrowObject(), new ButtyMessage()));
} catch (Exception ex) {
System.out.println("ex = " + ex.getMessage());
}
}
}
With ButtyMessage extends ByteBufHolder.
In my Runner class, if I sleep(10) after writeAndFlush it run quite OK. But I don't want to reply on sleep. So I use ChannelFutureListener, but the result is bad. If I send about 1000 to 10.000 messages, it will crash and throw exception above. Is there any way to avoid this?
Thanks all.
Sorry for my bad explain and my English :)
You have several issues that could explain this. Most of them are related to wrong usage of asynchronous operations and future usage.
I don't know if it could be in link with your issue but, if you really want to print when the channel is really closed, you have to wait on the future, since the future on close() (or any other operations) immediately returns, without waiting for the real close. Therefore your test if (close.isSuccess()) shall be always false.
public void destroyObject(final Channel channel) throws Exception {
channel.close().addListener(new ChannelFutureListener() {
#Override
public void operationComplete(ChannelFuture close) {
if (close.isSuccess()) {
System.out.println(channel + " close successfully");
}
}
});
}
However, as I suppose it is only for debug purpose, it is not mandatory.
Another one: you send back to your pool a channel that is not already connected (which could explain your sleep(10) maybe?). You have to wait on the connect().
public Channel makeObject() throws Exception {
System.out.println("Create new channel!!!");
//bootstrap.validate(); // this is implicitely called in connect()
ChannelFuture future = bootstrap.connect(host, port).awaitUninterruptibly();
if (future.isSuccess()) {
return future.channel();
} else {
// do what you need to do when the connection is not done
}
}
third one: validation of a connected channel might be better using isActive():
#Override
public boolean validateObject(Channel channel) {
System.out.println("Validate object");
return channel.isActive(); // instead of isOpen()
}
fourth one: in your runner, you wrongly await on the future while you should not. You can remove your syncUninterruptibly() and let the rest as is.
#Override
public void run() {
Channel.writeAndFlush(message.content()).addListener(new ChannelFutureListener() {
#Override
public void operationComplete(ChannelFuture future) throws Exception {
channelPool.returnObject(future.channel());
}
});
}
And finally, I suppose you know your test is completely sequential (1 thread in your pool), such that each client will reuse over and over the very same channel?
Could you try to change the 4 points to see if it corrects your issue?
EDIT: after requester comment
For syncUntinterruptibly(), I did not read carefully. If you want to block on write, then you don't need the extra addListener since the future is done once the sync is over. So you can directly call your channelPool.returnObject as next command just after your sync.
So you should write it this way, simpler.
#Override
public void run() {
Channel.writeAndFlush(message.content()).syncUntinterruptibly();
channelPool.returnObject(future.channel());
}
For fireChannelActive, it will be called as soon as the connect finished (so from makeObject, sometime in the future). Moreover, once disconnected (as you did have notice in your exception), the channel is no more usable and must be recreated from zero. So I would suggest to use isActive however, such that, if not active, it will be removed using destroyObject...
Take a look at the channel state model here.
Finally, I've found a solution for myself. But, I'm still think about another solution. (this solution is exactly copy from 4.0.28 netty release note)
final String host = "127.0.0.1";
final int port = 8080;
int jobNumber = 100000;
final EventLoopGroup group = new NioEventLoopGroup(100);
ChannelPoolMap<InetSocketAddress, MyChannelPool> poolMap = new AbstractChannelPoolMap<InetSocketAddress, MyChannelPool>() {
#Override
protected MyChannelPool newPool(InetSocketAddress key) {
Bootstrap bootstrap = func(group, key.getHostName(), key.getPort());
return new MyChannelPool(bootstrap, new _AbstractChannelPoolHandler());
}
};
ChannelPoolMap<InetSocketAddress, FixedChannelPool> poolMap1 = new AbstractChannelPoolMap<InetSocketAddress, FixedChannelPool>() {
#Override
protected FixedChannelPool newPool(InetSocketAddress key) {
Bootstrap bootstrap = func(group, key.getHostName(), key.getPort());
return new FixedChannelPool(bootstrap, new _AbstractChannelPoolHandler(), 10);
}
};
final ChannelPool myChannelPool = poolMap.get(new InetSocketAddress(host, port));
final CountDownLatch latch = new CountDownLatch(jobNumber);
for (int i = 0; i < jobNumber; i++) {
final int counter = i;
final Future<Channel> future = myChannelPool.acquire();
future.addListener(new FutureListener<Channel>() {
#Override
public void operationComplete(Future<Channel> f) {
if (f.isSuccess()) {
Channel ch = f.getNow();
// Do somethings
ch.writeAndFlush(new ButtyMessage().content()).addListener(new ChannelFutureListener() {
#Override
public void operationComplete(ChannelFuture future) throws Exception {
if (future.isSuccess()) {
System.out.println("counter = " + counter);
System.out.println("future = " + future.channel());
latch.countDown();
}
}
});
// Release back to pool
myChannelPool.release(ch);
} else {
System.out.println(f.cause().getMessage());
f.cause().printStackTrace();
}
}
});
}
try {
latch.await();
System.exit(0);
} catch (InterruptedException ex) {
System.out.println("ex = " + ex.getMessage());
}
As you can see, I use SimpleChannelPool and FixedChannelPool (an implementation of SimpleChannelPool provided by netty).
What it can do:
SimpleChannelPool: open channels as much as it need ---> if you has 100.000 msg -> cuz error, of course. Many socket open, then IOExeption: Too many file open occur. (is that really pool? Create as much as possible and throw exception? I don't call this is pooling)
FixedChannelPool: not work in my case (Still study why? =)) Sorry for my stupidness)
Indeed, I want to use ObjectPool instead. And I may post it as soon as when I finish. Tks #Frederic Brégier for helping me so much!

Java, Netty, TCP and UDP connection integration : No buffer space available for UDP connection

I have application which uses both TCP and UDP protocols. Main assumption is that the client connects to server via TCP protocol and when connection is established, UDP datagrams are being send.
I have to support two scenarios of connecting to server:
- client connects when server is running
- client connects when server is down and retries connection until server starts again
For the first scenario everything works pretty fine: I got working both connections.
The problem is with second scenario. When client tries few times to connect via TCP and finally connects, the UDP connection function throws an exception:
java.net.SocketException: No buffer space available (maximum connections reached?): bind
at sun.nio.ch.Net.bind0(Native Method)
at sun.nio.ch.Net.bind(Net.java:344)
at sun.nio.ch.DatagramChannelImpl.bind(DatagramChannelImpl.java:684)
at sun.nio.ch.DatagramSocketAdaptor.bind(DatagramSocketAdaptor.java:91)
at io.netty.channel.socket.nio.NioDatagramChannel.doBind(NioDatagramChannel.java:192)
at io.netty.channel.AbstractChannel$AbstractUnsafe.bind(AbstractChannel.java:484)
at io.netty.channel.DefaultChannelPipeline$HeadContext.bind(DefaultChannelPipeline.java:1080)
at io.netty.channel.AbstractChannelHandlerContext.invokeBind(AbstractChannelHandlerContext.java:430)
at io.netty.channel.AbstractChannelHandlerContext.bind(AbstractChannelHandlerContext.java:415)
at io.netty.channel.DefaultChannelPipeline.bind(DefaultChannelPipeline.java:903)
at io.netty.channel.AbstractChannel.bind(AbstractChannel.java:197)
at io.netty.bootstrap.AbstractBootstrap$2.run(AbstractBootstrap.java:350)
at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:380)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:357)
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
at java.lang.Thread.run(Thread.java:722)
When I restart client application without doing anything with server, client will connect with any problems.
What can cause a problem?
In below I attach source code of classes. All source code comes from examples placed in official Netty project page. The only thing which I have midified is that I replaced static variables and functions with non-static ones. It was caused that in future I will need many TCP-UDP connections to multiple servers.
public final class UptimeClient {
static final String HOST = System.getProperty("host", "192.168.2.193");
static final int PORT = Integer.parseInt(System.getProperty("port", "2011"));
static final int RECONNECT_DELAY = Integer.parseInt(System.getProperty("reconnectDelay", "5"));
static final int READ_TIMEOUT = Integer.parseInt(System.getProperty("readTimeout", "10"));
private static UptimeClientHandler handler;
public void runClient() throws Exception {
configureBootstrap(new Bootstrap()).connect();
}
private Bootstrap configureBootstrap(Bootstrap b) {
return configureBootstrap(b, new NioEventLoopGroup());
}
#Override
protected Object clone() throws CloneNotSupportedException {
return super.clone(); //To change body of generated methods, choose Tools | Templates.
}
Bootstrap configureBootstrap(Bootstrap b, EventLoopGroup g) {
if(handler == null){
handler = new UptimeClientHandler(this);
}
b.group(g)
.channel(NioSocketChannel.class)
.remoteAddress(HOST, PORT)
.handler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new IdleStateHandler(READ_TIMEOUT, 0, 0), handler);
}
});
return b;
}
void connect(Bootstrap b) {
b.connect().addListener(new ChannelFutureListener() {
#Override
public void operationComplete(ChannelFuture future) throws Exception {
if (future.cause() != null) {
handler.startTime = -1;
handler.println("Failed to connect: " + future.cause());
}
}
});
}
}
#Sharable
public class UptimeClientHandler extends SimpleChannelInboundHandler<Object> {
UptimeClient client;
public UptimeClientHandler(UptimeClient client){
this.client = client;
}
long startTime = -1;
#Override
public void channelActive(ChannelHandlerContext ctx) {
try {
if (startTime < 0) {
startTime = System.currentTimeMillis();
}
println("Connected to: " + ctx.channel().remoteAddress());
new QuoteOfTheMomentClient(null).run();
} catch (Exception ex) {
Logger.getLogger(UptimeClientHandler.class.getName()).log(Level.SEVERE, null, ex);
}
}
#Override
public void channelRead0(ChannelHandlerContext ctx, Object msg) throws Exception {
}
#Override
public void userEventTriggered(ChannelHandlerContext ctx, Object evt) {
if (!(evt instanceof IdleStateEvent)) {
return;
}
IdleStateEvent e = (IdleStateEvent) evt;
if (e.state() == IdleState.READER_IDLE) {
// The connection was OK but there was no traffic for last period.
println("Disconnecting due to no inbound traffic");
ctx.close();
}
}
#Override
public void channelInactive(final ChannelHandlerContext ctx) {
println("Disconnected from: " + ctx.channel().remoteAddress());
}
#Override
public void channelUnregistered(final ChannelHandlerContext ctx) throws Exception {
println("Sleeping for: " + UptimeClient.RECONNECT_DELAY + 's');
final EventLoop loop = ctx.channel().eventLoop();
loop.schedule(new Runnable() {
#Override
public void run() {
println("Reconnecting to: " + UptimeClient.HOST + ':' + UptimeClient.PORT);
client.connect(client.configureBootstrap(new Bootstrap(), loop));
}
}, UptimeClient.RECONNECT_DELAY, TimeUnit.SECONDS);
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
void println(String msg) {
if (startTime < 0) {
System.err.format("[SERVER IS DOWN] %s%n", msg);
} else {
System.err.format("[UPTIME: %5ds] %s%n", (System.currentTimeMillis() - startTime) / 1000, msg);
}
}
}
public final class QuoteOfTheMomentClient {
private ServerData config;
public QuoteOfTheMomentClient(ServerData config){
this.config = config;
}
public void run() throws Exception {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new QuoteOfTheMomentClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress("192.168.2.193", 8193))).sync();
if (!ch.closeFuture().await(5000)) {
System.err.println("QOTM request timed out.");
}
}
catch(Exception ex)
{
ex.printStackTrace();
}
finally {
group.shutdownGracefully();
}
}
}
public class QuoteOfTheMomentClientHandler extends SimpleChannelInboundHandler<DatagramPacket> {
#Override
public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
if (response.startsWith("QOTM: ")) {
System.out.println("Quote of the Moment: " + response.substring(6));
ctx.close();
}
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
If your server is Windows Server 2008 (R2 or R2 SP1), this problem is likely described and solved by this stackoverflow answer which refers to Microsoft KB article #2577795
This issue occurs because of a race condition in the Ancillary Function Driver
for WinSock (Afd.sys) that causes sockets to be leaked. With time, the issue
that is described in the "Symptoms" section occurs if all available socket
resources are exhausted.
If your server is Windows Server 2003, this problem is likely described and solved by this stackoverflow answer which refers to Microsoft KB article #196271
The default maximum number of ephemeral TCP ports is 5000 in the products that
are included in the "Applies to" section. A new parameter has been added in
these products. To increase the maximum number of ephemeral ports, follow these
steps...
...which basically means that you have run out of ephemeral ports.

Simple Netty Echo Server/Client not receiving messages

I'm trying to write a simple echo server with Netty. I'm reading Netty in Action MEAP v8 to get down some theory and learn the core basics of Netty. The client connects successfully, but no messages get through from the client. I am able to telnet a message to the server and receive the response, so I guess the issue is on the client, I just have no idea what is wrong, due to me being new to Netty.
Here is the client:
public class Client {
private final String host;
private final int port;
public Client(String host, int port) {
this.host = host;
this.port = port;
}
public void start() throws Exception {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group).channel(NioSocketChannel.class)
.remoteAddress(new InetSocketAddress(host, port))
.handler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast(new EchoClientHandler());
}
});
ChannelFuture f = b.connect().sync();
f.channel().closeFuture().sync();
} finally {
group.shutdownGracefully().sync();
}
}
public static void main (String [] args) throws Exception {
new Client("127.0.0.1", 11235).start();
}
}
And the Client handler: (I did try appending '\r\n' to the sent message, but that did not make a difference, which I found here: Netty Client to Server message)
#Sharable
public class EchoClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
public void channelActive(ChannelHandlerContext ctx) {
System.out.println("Connected");
ctx.write(Unpooled.copiedBuffer("Netty MAY rock!", CharsetUtil.UTF_8));
}
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf in) throws Exception {
System.out.println(
"Client received: " + in.toString(CharsetUtil.UTF_8));
}
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
The server:
public class EchoServer {
private final int port;
public EchoServer(int port) {
this.port = port;
}
public void start() throws Exception {
EventLoopGroup group = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(group)
.channel(NioServerSocketChannel.class)
.localAddress(new InetSocketAddress(port))
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
System.out.println("New client connected: " + ch.localAddress());
ch.pipeline().addLast(new EchoServerHandler());
}
});
ChannelFuture f = b.bind().sync();
f.channel().closeFuture().sync();
} finally {
group.shutdownGracefully().sync();
}
}
public static void main (String [] args) throws Exception {
new EchoServer(11235).start();
}
}
The server handler:
#Sharable
public class EchoServerHandler extends ChannelInboundHandlerAdapter {
public void channelRead(ChannelHandlerContext ctx, Object msg) {
ByteBuf in = (ByteBuf) msg;
System.out.println(
"Server received: " + in.toString(CharsetUtil.UTF_8));
ctx.write(in);
}
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.writeAndFlush(Unpooled.EMPTY_BUFFER)
.addListener(ChannelFutureListener.CLOSE);
}
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
cause.printStackTrace();
ctx.close();
}
}
It must be something small I'm missing, so any help will preserve my fleeting sanity and will be much appreciated!
Instead of write use writeAndFlush in your ClientHandler:
public void channelActive(ChannelHandlerContext ctx) {
System.out.println("Connected");
ctx.writeAndFlush(Unpooled.copiedBuffer("Netty MAY rock!", CharsetUtil.UTF_8));
}

Categories