**chatclient.java**
package com.examplechat;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStreamReader;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.Channel;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.sctp.nio.NioSctpChannel;
public class ChatClient {
public static void main(String[] args) throws InterruptedException, IOException {
new ChatClient("localhost", 8000).run();
}
private final String host;
private final int port;
public ChatClient(String host, int port) {
super();
this.host = host;
this.port = port;
}
public void run() throws InterruptedException, IOException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap bootstrap = new Bootstrap()
.group(group)
.channel(NioSctpChannel.class)
.handler(new ChatClientInitilizer());
Channel channel = bootstrap.connect(host, port).sync().channel();
BufferedReader in = new BufferedReader(new InputStreamReader(System.in));
while(true) {
channel.write(in.readLine()+ "\r\n");
}
}
finally {
group.shutdownGracefully(); }
}
}
**ChatServer.java**
package com.examplechat;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
public class ChatServer {
public static void main(String[] args) throws InterruptedException {
new ChatServer(8000).run();
}
private final int port;
public ChatServer(int port) {
super();
this.port = port;
}
public void run() throws InterruptedException {
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap bootstrap = new ServerBootstrap()
.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new ChatServerInitializer());
bootstrap.bind(port).sync().channel().closeFuture().sync();
}
finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}}
I'm trying to create a chat Application using netty. When I try to run the chat client once I initiate chatserver java application. It shows
Exception in thread "main" io.netty.channel.ChannelException: Unable
to create Channel from class class
io.netty.channel.sctp.nio.NioSctpChannel at
io.netty.channel.ReflectiveChannelFactory.newChannel
(ReflectiveChannelFactory.java:40)
I've even changed the dependencies at pom.xml still i'm receiving the same error every time I run it
Update
Error-1: Unable to create Channel error has been cleared by myself
I've answered it myself please help me fix this second issue
Error-2: I'm getting a new error called WARNING: Failed to initialize a channel. Closing: [id: 0x9e4e05e0] java.lang.ClassCastException: io.netty.channel.sctp.nio.NioSctpChannel cannot be cast to io.netty.channel.socket.SocketChannel
Here's the github link if you need https://github.com/cyborgsri/Chat-Application/tree/master/netty-chat-youtube/src/main/java/com/examplechat.
I was getting error as
Unable to create Channel from class class io.netty.channel which
is Caused by: java.lang.UnsupportedOperationException:
ibsctp.so.1: cannot open shared object file: No such file or
directory`
This error was cleared once I install lksctp-tools by:
sudo apt-get install lksctp-tools
Now channel has been registered. But I'm getting a new error that
WARNING:
Failed to initialize a channel. Closing: [id: 0x9e4e05e0]
java.lang.ClassCastException: io.netty.channel.sctp.nio.NioSctpChannel
cannot be cast to io.netty.channel.socket.SocketChannel
The error happens because you use:
public class ChatServerInitializer extends ChannelInitializer<SocketChannel>
Change it to:
public class ChatServerInitializer extends ChannelInitializer<Channel>
Related
I'm trying to create a TCP server that read data periodically from a database (Redis) and send it to the appropriate client.
However, since I'm pretty new to Netty, I don't know how could I schedule this. I do know that I need to use a Scheduled Executor Service like this:
ScheduledExecutorService e = Executors.newSingleThreadScheduledExecutor();
e.scheduleAtFixedRate(() -> {
System.out.println("Calling...");
// Do something
}, 1, 1, TimeUnit.SECONDS);
However, when I tried to put that in the server code, It's only calling the method once. I've tried to put that in different place but still can't seem to get it right. What should I do?
Here's the code of the server:
package com.example.test.app;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class Server {
public static void main(String[] args) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
final ServerHandler handler = new ServerHandler();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup);
b.channel(NioServerSocketChannel.class);
b.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
protected void initChannel(SocketChannel ch) throws Exception
{
ch.pipeline().addLast(handler);
}
});
b.option(ChannelOption.SO_BACKLOG, 128);
b.childOption(ChannelOption.SO_KEEPALIVE, true);
ScheduledExecutorService e = Executors.newSingleThreadScheduledExecutor();
e.scheduleAtFixedRate(() -> {
System.out.println("Calling...");
handler.saySomething();
}, 1, 1, TimeUnit.SECONDS);
ChannelFuture f = b.bind(1337).sync();
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
}
}
And here's the server handler:
package com.example.test.app;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
public class ServerHandler extends ChannelInboundHandlerAdapter {
private ChannelHandlerContext ctx;
#Override
public void channelActive(ChannelHandlerContext ctx)
{
this.ctx = ctx;
System.out.println("Someone's connedted!");
}
public void saySomething()
{
final ChannelFuture f = ctx.writeAndFlush("Sup!");
f.addListener((ChannelFutureListener) (ChannelFuture future) -> {
System.out.println("Something has been said!");
});
}
}
The method saySomething() generates NullPointerException for calling final ChannelFuture f = ctx.writeAndFlush("Sup!"); while ctx is null.
EventExecutorGroup.scheduleAtFixedRate javadoc description says that "If any execution of the task encounters an exception, subsequent executions are suppressed". So this is why you get is called only once...
Also, seems like Netty allows you to re-use a handler instance for different pipeline instances only if you annotate this handler's class as #Sharable. Otherwise, it will throw exception. If your handler is stateless (which is not your case, as yours has the ctx member) then you should annotate it as #Sharable and re-use it to all created pipelines. If it is stateful, create a new instance for every new pipeline (new client connection).
Finally, to schedule your task for each connected client you can use the executor which can be referenced by the ctx of the connected client's channel (by default, as in your case, the channel's EventLoop) on your channelActive() implementation. This executor implements ScheduledExecutorService, so you have also scheduleAtFixedRate.
Take a look at my version of your code and see if it suits you.
Server:
package com.example.test.app;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
public class Server {
public static void main(String[] args) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup);
b.channel(NioServerSocketChannel.class);
b.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
protected void initChannel(SocketChannel ch) throws Exception
{
ch.pipeline().addLast(new ServerHandler());
}
});
b.option(ChannelOption.SO_BACKLOG, 128);
b.childOption(ChannelOption.SO_KEEPALIVE, true);
// ScheduledExecutorService e = Executors.newSingleThreadScheduledExecutor();
// e.scheduleAtFixedRate(() -> {
// System.out.println("Calling...");
// handler.saySomething();
// }, 1, 1, TimeUnit.SECONDS);
ChannelFuture f = b.bind(1337).sync();
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
}
}
ServerHandler:
package com.example.test.app;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
public class ServerHandler extends ChannelInboundHandlerAdapter {
private ScheduledFuture sf;
#Override
public void channelActive(ChannelHandlerContext ctx)
{
System.out.println("Someone's connedted! "+ctx.channel());
sf = ctx.executor().scheduleAtFixedRate(() -> {
System.out.println("Calling...");
saySomething(ctx);
}, 1, 1, TimeUnit.SECONDS);
}
#Override
public void channelInactive(ChannelHandlerContext ctx) {
System.out.println("Someone's disconnected! "+ctx.channel());
sf.cancel(false);
}
private void saySomething(ChannelHandlerContext ctx)
{
final ChannelFuture f = ctx.writeAndFlush("Sup!");
f.addListener((ChannelFutureListener) (ChannelFuture future) -> {
System.out.println("Something has been said!");
});
}
}
I am new to java and was trying to use Netty to build a sample tcp server. Here is what i have currently
package http_server;
import java.net.InetSocketAddress;
import java.nio.channels.SocketChannel;
import netty_tutorial.EchoServerHandler;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelInboundHandlerAdapter;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.ServerSocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
class server
{
ServerBootstrap bootstrap;
int port;
server(int port_)
{
port = port_;
bootstrap = new ServerBootstrap();
bootstrap.group(new NioEventLoopGroup());
bootstrap.channel(NioServerSocketChannel.class);
bootstrap.localAddress(new InetSocketAddress(port));
/**
* Add handlers using anonymous class
*/
/****PROBLEMATIC LINE*****/
bootstrap.childHandler(new ChannelInitializer<SocketChannel>()
{
#Override
protected void initChannel(SocketChannel ch) throws Exception {
// TODO Auto-generated method stub
System.out.println("hello");
}
}
);
}
}
public class simple_server
{
public static void main(String args[])
{
server server_obj = new server(8080);
}
}
I was planning to add my handlers inside the initChannel method, but somehow i am not able to compile the current program. As soon as i try to compile this sample program, i get the following error:
Exception in thread "main" java.lang.Error: Unresolved compilation problem:
Bound mismatch: The type SocketChannel is not a valid substitute for the bounded parameter <C extends Channel> of the type ChannelInitializer<C>
at http_server.server.<init>
at http_server.simple_server.main
Any idea on what exactly is going wrong?
You have imported the SocketChannel from the wrong package.
Replace import java.nio.channels.SocketChannel;
with import io.netty.channel.socket.SocketChannel;
I want to modify the client handler to use Foo instead of Datagram -- what changes are required in the client itself?
Surely it's not necessary to strictly keep to datagrams to send and receive with Netty? The Factorial example uses BigInteger, so, surely, it's possible to use POJO's.
Any and all attempts to create a class like:
class FooClientHandler extends SimpleChannelInboundHandler<Foo> are just non-starters for me, it literally won't send or receive with a server. (Yes, both client and server use similar handlers, generic classes with Foo.) So, I'm coming at this now from working code.
What's the key distinction between the factorial handler and the the datagram handler below? Or, is the primary distinction in how it's used in the client?
client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new DatagramClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
log.info("wrote packet");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
handler:
package net.bounceme.dur.netty;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.DatagramPacket;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
public class DatagramClientHandler extends SimpleChannelInboundHandler<DatagramPacket> {
private static final Logger log = Logger.getLogger(DatagramClientHandler.class.getName());
#Override
public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
log.info(response);
DatagramPacket foo = new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress("localhost", 4454));
ctx.writeAndFlush(foo);
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
}
I omitted the server code, it's almost exactly as in the Ghandi quote example.
What changes do I need to make to the client so that the handler can use Foo instead of DatagramPacket?
All I can say with certainty is that this client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
import net.bounceme.dur.client.jdbc.Title;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new TitleClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
ch.writeAndFlush(new Title());
log.info("wrote packets");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
and handler:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.client.jdbc.Title;
public class TitleClientHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(TitleClientHandler.class.getName());
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
}
}
don't, seemingly, communicate at all with the server -- even when the server has been modified accordingly.
Unfortunately, I don't understand this output from the netty server:
BUILD SUCCESSFUL
Total time: 3 seconds
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler channelRegistered
INFO: [id: 0xcad25a31] REGISTERED
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler bind
INFO: [id: 0xcad25a31] BIND(0.0.0.0/0.0.0.0:4454)
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler channelActive
INFO: [id: 0xcad25a31, /0:0:0:0:0:0:0:0:4454] ACTIVE
Jul 27, 2014 2:04:59 AM io.netty.handler.logging.LoggingHandler logMessage
INFO: [id: 0xcad25a31, /0:0:0:0:0:0:0:0:4454] RECEIVED: [id: 0xff40b8a2, /127.0.0.1:37558 => /127.0.0.1:4454]
Jul 27, 2014 2:04:59 AM net.bounceme.dur.netty.ServerHandler <init>
INFO: starting..
Jul 27, 2014 2:04:59 AM io.netty.channel.DefaultChannelPipeline$TailContext exceptionCaught
WARNING: An exceptionCaught() event was fired, and it reached at the tail of the pipeline. It usually means the last handler in the pipeline did not handle the exception.
io.netty.handler.codec.TooLongFrameException: Adjusted frame length exceeds 1048576: 2901213193 - discarded
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.fail(LengthFieldBasedFrameDecoder.java:501)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.failIfNecessary(LengthFieldBasedFrameDecoder.java:477)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.decode(LengthFieldBasedFrameDecoder.java:403)
at io.netty.handler.codec.serialization.ObjectDecoder.decode(ObjectDecoder.java:68)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.decode(LengthFieldBasedFrameDecoder.java:343)
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:241)
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:149)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:333)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:319)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:787)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:125)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
at java.lang.Thread.run(Thread.java:744)
^Cthufir#dur:~/NetBeansProjects/AgentServer$
thufir#dur:~/NetBeansProjects/AgentServer$
Presumably the netty-based server is complaining that it's receiving bad data in some respect?
client code:
package net.bounceme.dur.client.gui;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.util.logging.FileHandler;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import net.bounceme.dur.client.jdbc.Title;
public final class ApplicationDriver {
private static final Logger log = Logger.getLogger(ApplicationDriver.class.getName());
private TitlesGUI gui = null;
private Handler handler = null;
public ApplicationDriver() throws IOException, ClassNotFoundException {
handler = new FileHandler("application.log");
handler.setFormatter(new SimpleFormatter());
log.setLevel(Level.INFO);
log.addHandler(handler);
log.info("starting log..");
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
guiThread();
readWrite(host, port);
}
private void guiThread() {
Thread g;
g = new Thread() {
#Override
public void run() {
try {
gui = new TitlesGUI();
} catch (IOException ex) {
log.severe(ex.toString());
}
gui.setVisible(true);
}
};
g.start();
}
public static void main(String... args) throws IOException, ClassNotFoundException {
new ApplicationDriver();
}
private void readWrite(final String host, final int port) throws IOException {
Thread inputOutput;
final Socket socket = new Socket(host, port);
inputOutput = new Thread() {
#Override
public void run() {
while (true) {
try (ObjectOutputStream objectOutputStream = new ObjectOutputStream(socket.getOutputStream());
ObjectInputStream objectInputStream = new ObjectInputStream(socket.getInputStream())) {
gui.setTitle((Title) objectInputStream.readObject());
Thread.sleep(1000);
} catch (IOException | ClassNotFoundException | InterruptedException ex) {
log.severe(ex.toString());
}
}
}
};
inputOutput.start();
}
}
is it a problem that the client is using regular sockets instead of netty? Both on the client and server side POJO's are being sent. (The Title class is serializable and the serialVersionUID values match up.)
a method from the GUI client (which is a bit large, it's a Netbeans Swing JFrame):
public void setTitle(Title title) {
this.title = title;
text.setText(title.toString());
}
the point of the above method is for something to send objects to the GUI, which is then updated accordingly. Similarly, I want to fire updates, or other-wise wire the GUI to socket i/o.
I don't really understand the output from the netty server. Is it a problem that the server uses netty while the client uses sockets? Both use the same POJO, with the serialVersionUID value. Here's the netty handler code:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Title;
public class ServerHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(ServerHandler.class.getName());
public ServerHandler() {
log.info("starting..");
}
#Override
public boolean acceptInboundMessage(Object msg) throws Exception {
log.info(msg.toString());
return true;
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
log.info(msg.toString());
ctx.write(new Title());
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
chc.write(new Title());
}
}
Apparently, none of the server handler code is executed, as everything explodes immediately after the client connects.
server code:
package net.bounceme.dur.netty;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.serialization.ClassResolvers;
import io.netty.handler.codec.serialization.ObjectDecoder;
import io.netty.handler.codec.serialization.ObjectEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import java.security.cert.CertificateException;
import java.util.logging.Logger;
import javax.net.ssl.SSLException;
public final class Server {
private static final Logger log = Logger.getLogger(Server.class.getName());
public static void main(String[] args) throws Exception {
MyProps p = new MyProps();
int port = p.getServerPort();
new Server().startServer(port, false);
}
private void startServer(int port, boolean ssl) throws CertificateException, SSLException, InterruptedException {
EventLoopGroup bossGroup = new NioEventLoopGroup(1);
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast(
new ObjectEncoder(),
new ObjectDecoder(ClassResolvers.cacheDisabled(null)),
new ServerHandler());
}
});
b.bind(port).sync().channel().closeFuture().sync();
log.info("connected!");
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
The TooLongFrameException raised by LengthFieldBasedFrameDecoder means one of the following:
The remote peer sent a very large message, which exceeds the limit. The default maximum length of a message is 1 MiB. If you expect to receive a message larger than that, specify an alternative maximum length when you construct a LengthFieldBasedFrameDecoder.
You passed wrong parameters to LengthFieldBasedFrameDecoder so that it is decoding a wrong place in your message. In this case, you'd better re-read the Javadoc of LengthFieldBasedFrameDecoder to specify the correct values for you.
Using JBOSS Netty, I'm trying to send data continuously to the connected client. In the example below,
I try to send the time every 5 secs to the client, as soon as the client gets connected (channelConnected).
But this is not working. It works only if I comment the while loop.
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.string.StringEncoder;
public class SRNGServer {
public static void main(String[] args) throws Exception {
// Configure the server.
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(),
Executors.newCachedThreadPool()));
// Configure the pipeline factory.
bootstrap.setPipelineFactory(new SRNGServerPipelineFactoryP());
// Bind and start to accept incoming connections.
bootstrap.bind(new InetSocketAddress(8080));
}
private static class SRNGServerHandlerP extends SimpleChannelUpstreamHandler {
private static final Logger logger = Logger.getLogger(SRNGServerHandlerP.class.getName());
#Override
public void channelConnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
// Send greeting for a new connection.
e.getChannel().write("Welcome to " + InetAddress.getLocalHost().getHostName() + "!\r\n");
while(true){
e.getChannel().write("It is " + new Date() + " now.\r\n");
Thread.sleep(1000*5);
}
}
#Override
public void exceptionCaught(
ChannelHandlerContext ctx, ExceptionEvent e) {
logger.log(
Level.WARNING,
"Unexpected exception from downstream.",
e.getCause());
e.getChannel().close();
}
}
private static class SRNGServerPipelineFactoryP implements ChannelPipelineFactory {
public ChannelPipeline getPipeline() throws Exception {
// Create a default pipeline implementation.
ChannelPipeline pipeline = Channels.pipeline();
pipeline.addLast("encoder", new StringEncoder());
pipeline.addLast("handler", new SRNGServerHandlerP());
return pipeline;
}
}
}
The Netty documentation actually states that you should never make a Handler wait because it might eventually deadlock. The reason is that handler methods are called directly by I/O threads. One I/O thread in Netty performs multiple I/O operations in a sequence, so it's not one thread per operation.
In the channelConnected method you should start a new thread with a reference to the channel and make that thread send the time every 5 seconds. This would spawn one thread per connection.
Alternatively, you can have one single thread looping over a list of clients every 5 seconds and sending the time to each of them in a sequence.
Anyway, it's important to use a different thread for sending than the one that calls the Handler.
For what its worth, I figured the solution and here's the working code. After the "write" of time, I register the future with my ChannelFuturelistener. And then from operationComplete I keep registering the new future for every write. This works for what I want to accomplish, without using any extra threads.
import java.net.InetSocketAddress;
import java.nio.channels.ClosedChannelException;
import java.util.Date;
import java.util.concurrent.Executors;
import java.util.logging.Level;
import java.util.logging.Logger;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.ChannelStateEvent;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.string.StringEncoder;
public class SRNGServer {
public static void main(String[] args) throws Exception {
// Configure the server.
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory(
Executors.newCachedThreadPool(),
//Executors.newCachedThreadPool()
Executors.newFixedThreadPool(2),2
));
// Configure the pipeline factory.
bootstrap.setPipelineFactory(new SRNGServerPipelineFactoryP());
// Bind and start to accept incoming connections.
bootstrap.bind(new InetSocketAddress(8080));
}
private static class SRNGServerHandlerP extends SimpleChannelUpstreamHandler {
private static final Logger logger = Logger.getLogger(SRNGServerHandlerP.class.getName());
#Override
public void channelConnected(
ChannelHandlerContext ctx, ChannelStateEvent e) throws Exception {
// Send greeting for a new connection.
Channel ch=e.getChannel();
ChannelFuture writeFuture=e.getChannel().write("It is " + new Date() + " now.\r\n");
SRNGChannelFutureListener srngcfl=new SRNGChannelFutureListener();
writeFuture.addListener(srngcfl);
}
#Override
public void exceptionCaught(
ChannelHandlerContext ctx, ExceptionEvent e) {
logger.log(
Level.WARNING,
"Unexpected exception from downstream.",
e.getCause());
if(e.getCause() instanceof ClosedChannelException){
logger.log(Level.INFO, "****** Connection closed by client - Closing Channel");
}
e.getChannel().close();
}
}
private static class SRNGServerPipelineFactoryP implements ChannelPipelineFactory {
public ChannelPipeline getPipeline() throws Exception {
// Create a default pipeline implementation.
ChannelPipeline pipeline = Channels.pipeline();
pipeline.addLast("encoder", new StringEncoder());
pipeline.addLast("handler", new SRNGServerHandlerP());
return pipeline;
}
}
private static class SRNGChannelFutureListener implements ChannelFutureListener{
public void operationComplete(ChannelFuture future) throws InterruptedException{
Thread.sleep(1000*5);
Channel ch=future.getChannel();
if(ch!=null && ch.isConnected()){
ChannelFuture writeFuture=ch.write("It is " + new Date() + " now.\r\n");
//-- Add this instance as listener itself.
writeFuture.addListener(this);
}
}
}
}
Seems that the I/O thread is getting blocked as a result of sleep, so try using 2 worker threads instead:
ServerBootstrap bootstrap = new ServerBootstrap(
new NioServerSocketChannelFactory( Executors.newCachedThreadPool(),
Executors.newCachedThreadPool(), 2 ) );