Netty client not receiving the complete data sent by the Server - java

I am designing a Netty based solution to transfer a file from Server to the Client over TCP. The client specifies the location of the file and then the server sends the file to the client.
Currently, the solution works fine for files with small size (< 2MB of data).
If the file to be sent is larger than ~5MB only partial data is sent and this varies (every time not the same amount of data is sent). Also, it is seen from the log that the Server has sent the complete amount of data (file).
The issue is the client is not receiving complete data sent by the server. What is wrong in my below code? or Can someone point me to right direction.
Below is my client, server and their handlers:
(For brevity I have listed only the methods that is of importance)
Client:
public class FileClient {
private final static int PORT = 8992;
private final static String HOST = "127.0.0.1";
public class ClientChannelInitializer extends ChannelInitializer<SocketChannel> {
private SslContext sslContext = null;
private String srcFile = "";
private String destFile = "";
public ClientChannelInitializer(String srcFile, String destFile, SslContext sslCtx) {
this.sslContext = sslCtx;
this.srcFile = srcFile;
this.destFile = destFile;
}
#Override
protected void initChannel(SocketChannel socketChannel) throws Exception {
ChannelPipeline pipeline = socketChannel.pipeline();
pipeline.addLast(sslContext.newHandler(socketChannel.alloc(), HOST, PORT));
pipeline.addLast("clientHandler", new FileClientHandler(srcFile, destFile));
}
}
private void startUp(String srcFile, String destFile) throws Exception {
SslContext sslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build();
EventLoopGroup workerGroup = new NioEventLoopGroup();
Bootstrap clientBootstrap = new Bootstrap();
clientBootstrap.group(workerGroup);
clientBootstrap.channel(NioSocketChannel.class);
clientBootstrap.option(ChannelOption.TCP_NODELAY, true);
clientBootstrap.handler(new LoggingHandler(LogLevel.INFO));
clientBootstrap.handler(new ClientChannelInitializer(srcFile, destFile, sslCtx));
Channel channel = clientBootstrap.connect(new InetSocketAddress(HOST, PORT)).sync().channel();
channel.closeFuture().sync();
}
}
public static void main(String[] args) throws Exception {
String src = "/Users/home/src/test.mp4";
String dest = "/Users/home/dest/test.mp4";
new FileClient().startUp(src, dest);
}
}
ClientHandler:
public class FileClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
private final String sourceFileName;
private OutputStream outputStream;
private Path destFilePath;
private byte[] buffer = new byte[0];
public FileClientHandler(String SrcFileName, String destFileName) {
this.sourceFileName = SrcFileName;
this.destFilePath = Paths.get(destFileName);
System.out.println("DestFilePath-" + destFilePath);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.writeAndFlush(ToByteBuff(this.sourceFileName));
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf byteBuff) throws Exception {
if (this.outputStream == null) {
Files.createDirectories(this.destFilePath.getParent());
if (Files.exists(this.destFilePath)) {
Files.delete(this.destFilePath);
}
this.outputStream = Files.newOutputStream(this.destFilePath, StandardOpenOption.CREATE,
StandardOpenOption.APPEND);
}
int size = byteBuff.readableBytes();
if (size > this.buffer.length) {
this.buffer = new byte[size];
}
byteBuff.readBytes(this.buffer, 0, size);
this.outputStream.write(this.buffer, 0, size);
}
FileServer:
public class FileServer {
private final int PORT = 8992;
public void run() throws Exception {
SelfSignedCertificate ssc = new SelfSignedCertificate();
final SslContext sslCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build();
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).option(ChannelOption.SO_BACKLOG, 100)
.handler(new LoggingHandler(LogLevel.INFO)).childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(sslCtx.newHandler(ch.alloc()));
pipeline.addLast(new ChunkedWriteHandler());
pipeline.addLast(new FilServerFileHandler());
}
});
ChannelFuture f = b.bind(PORT).sync();
f.channel().closeFuture().sync();
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
public static void main(String[] args) throws Exception {
new FileServer().run();
}
}
FileServerHandler:
public class FilServerFileHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf buff) throws Exception {
String filePathStr = byteBuf.toString(CharsetUtil.UTF_8);
File file = new File(filePathStr);
RandomAccessFile raf = null;
ChannelFuture sendFileFuture;
try {
raf = new RandomAccessFile(file, "r");
sendFileFuture = ctx.writeAndFlush(new ChunkedNioFile(raf.getChannel()),
ctx.newProgressivePromise());
sendFileFuture.addListener(new ChannelProgressiveFutureListener() {
public void operationComplete(ChannelProgressiveFuture future) throws Exception {
System.err.println("Transfer complete.");
}
public void operationProgressed(ChannelProgressiveFuture future, long progress, long total)
throws Exception {
if (total < 0) { // total unknown
System.err.println("Transfer progress: " + progress);
} else {
System.err.println("Transfer progress: " + progress + " / " + total);
}
}
});
} catch (FileNotFoundException fnfe) {
} finally {
if (raf != null)
raf.close();
}
}
I have checked SO Q1 and SO Q2

Fixed your problem by a little tweak in FilServerFileHandler:
public class FileServerHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf buff) throws Exception {
String filePathStr = buff.toString(CharsetUtil.UTF_8);
File file = new File(filePathStr);
RandomAccessFile raf = new RandomAccessFile(file, "r");
ChannelFuture sendFileFuture;
try {
sendFileFuture = ctx.writeAndFlush(new ChunkedNioFile(raf.getChannel()), ctx.newProgressivePromise());
sendFileFuture.addListener(new ChannelProgressiveFutureListener() {
public void operationComplete(ChannelProgressiveFuture future) throws Exception {
System.err.println("Transfer complete.");
if (raf != null) {
raf.close();
}
}
public void operationProgressed(ChannelProgressiveFuture future, long progress, long total)
throws Exception {
if (total < 0) { // total unknown
System.err.println("Transfer progress: " + progress);
} else {
System.err.println("Transfer progress: " + progress + " / " + total);
}
}
});
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
I moved raf.close() into operationComplete method.
The partial transportation is caused by the closing of raf during write operation. Note that ctx.writeAndFlush is an async call, so the raf.close() in finally block might be triggered before the write operation complete, especially when the size of the file is large enough.

Related

Using Netty with ClamAV Instream

I have been stuggling with a configuration using Netty to stream bytes to a ClamAV service. I am running in an Apache Camel route.
Using Netty, I am unable to intercept the "INSTREAM size limit exceeded" message.
INSTREAM
It is mandatory to prefix this command with n or z.
Scan a stream of data. The stream is sent to clamd in chunks, after INSTREAM, on the same socket on which the command was sent. This avoids the overhead of establishing new TCP connections and problems with NAT. The format of the chunk is: '' where is the size of the following data in bytes expressed as a 4 byte unsigned integer in network byte order and is the actual chunk. Streaming is terminated by sending a zero-length chunk. Note: do not exceed StreamMaxLength as defined in clamd.conf, otherwise clamd will reply with INSTREAM size limit exceeded and close the connection.
Using a straight synchronous socket connection I have no issues. Can anyone point me in the right direction for how I should be using Netty to do this? Or should I just stick with a synchronous socket connection.
Implementation using synchronous sockets. Credit to https://github.com/solita/clamav-java "Antti Virtanen".
private class UseSocket implements Processor{
#Override
public void process(Exchange exchange) throws Exception{
try (BufferedInputStream message = new BufferedInputStream(exchange.getIn().getBody(InputStream.class));
Socket socket = new Socket("localhost", 3310);
BufferedOutputStream socketOutput = new BufferedOutputStream(socket.getOutputStream())){
byte[] command = "zINSTREAM\0".getBytes();
socketOutput.write(command);
socketOutput.flush();
byte[] chunk = new byte[2048];
int chunkSize;
try(BufferedInputStream socketInput = new BufferedInputStream(socket.getInputStream())){
for(chunkSize = message.read(chunk);chunkSize > -1;chunkSize = message.read(chunk)){
socketOutput.write(ByteBuffer.allocate(4).putInt(chunkSize).array());
socketOutput.write(chunk, 0, chunkSize);
socketOutput.flush();
if(processReply(socketInput, exchange)){
return;
}
}
socketOutput.write(ByteBuffer.allocate(4).putInt(0).array());
socketOutput.flush();
processReply(socketInput, exchange);
}
}
}
private boolean processReply(BufferedInputStream in, Exchange exchange) throws Exception{
if(in.available() > 0) {
logger.info("processing reply");
byte[] replyBytes = new byte[256];
int replySize = in.read(replyBytes);
if (replySize > 0) {
String reply = new String(replyBytes, 0, replySize, StandardCharsets.UTF_8);
String avStatus = "infected";
if ("stream: OK\0".equals(reply)) {
avStatus = "clean";
} else if ("INSTREAM size limit exceeded. ERROR\0".equals(reply)) {
avStatus = "overflow";
}
exchange.getIn().setHeader("av-status", avStatus);
return true;
}
}
return false;
}
}
Implementation using Netty with inbound and outbound channel handlers.
private class UseNetty implements Processor{
#Override
public void process(Exchange exchange) throws Exception{
logger.info(CLASS_NAME + ": Creating Netty client");
EventLoopGroup eventLoopGroup = new NioEventLoopGroup();
try{
Bootstrap bootstrap = new Bootstrap();
bootstrap.group(eventLoopGroup);
bootstrap.channel(NioSocketChannel.class);
bootstrap.remoteAddress(new InetSocketAddress("localhost", 3310));
bootstrap.handler(new ClamAvChannelIntializer(exchange));
ChannelFuture channelFuture = bootstrap.connect().sync();
channelFuture.channel().closeFuture().sync();
}catch(Exception ex) {
logger.error(CLASS_NAME + ": ERROR", ex);
}
finally
{
eventLoopGroup.shutdownGracefully();
logger.info(CLASS_NAME + ": Netty client closed");
}
}
}
public class ClamAvChannelIntializer extends ChannelInitializer<SocketChannel> {
private Exchange exchange;
public ClamAvChannelIntializer(Exchange exchange){
this.exchange = exchange;
}
#Override
protected void initChannel(SocketChannel socketChannel) throws Exception {
socketChannel.pipeline().addLast(new ClamAvClientWriter());
socketChannel.pipeline().addLast(new ClamAvClientHandler(exchange));
}
}
public class ClamAvClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
String CLASS_NAME;
Logger logger;
private Exchange exchange;
public static final int MAX_BUFFER = 2048;
public ClamAvClientHandler(Exchange exchange){
super();
CLASS_NAME = this.getClass().getName();
logger = LoggerFactory.getLogger(CLASS_NAME);
this.exchange = exchange;
}
#Override
public void channelActive(ChannelHandlerContext channelHandlerContext) throws Exception{
logger.info(CLASS_NAME + ": Entering channelActive");
channelHandlerContext.write(exchange);
logger.info(CLASS_NAME + ": Exiting channelActive");
}
#Override
public void exceptionCaught(ChannelHandlerContext channelHandlerContext, Throwable cause){
cause.printStackTrace();
channelHandlerContext.close();
}
#Override
protected void channelRead0(ChannelHandlerContext channelHandlerContext, ByteBuf byteBuf) {
logger.info(CLASS_NAME + ": Entering channelRead0");
String reply = byteBuf.toString(CharsetUtil.UTF_8);
logger.info(CLASS_NAME + ": Reply = " + reply);
String avStatus = "infected";
if ("stream: OK\0".equals(reply)) {
avStatus = "clean";
} else if ("INSTREAM size limit exceeded. ERROR\0".equals(reply)) {
avStatus = "overflow";
} else{
logger.warn("Infected or unknown reply = " + reply);
}
exchange.getIn().setHeader("av-status", avStatus);
logger.info(CLASS_NAME + ": Exiting channelRead0");
channelHandlerContext.close();
}
}
public class ClamAvClientWriter extends ChannelOutboundHandlerAdapter {
String CLASS_NAME;
Logger logger;
public static final int MAX_BUFFER = 64000;//2^16
public ClamAvClientWriter(){
CLASS_NAME = this.getClass().getName();
logger = LoggerFactory.getLogger(CLASS_NAME);
}
#Override
public void write(ChannelHandlerContext channelHandlerContext, Object o, ChannelPromise channelPromise) throws Exception{
logger.info(CLASS_NAME + ": Entering write");
Exchange exchange = (Exchange)o;
try(BufferedInputStream message = new BufferedInputStream(exchange.getIn().getBody(InputStream.class))){
channelHandlerContext.writeAndFlush(Unpooled.copiedBuffer("zINSTREAM\0".getBytes()));
byte[] chunk = new byte[MAX_BUFFER];
for(int i=message.read(chunk);i>-1;i=message.read(chunk)){
byte[] chunkSize = ByteBuffer.allocate(4).putInt(i).array();
channelHandlerContext.write(Unpooled.copiedBuffer(chunkSize));
channelHandlerContext.writeAndFlush(Unpooled.copiedBuffer(chunk, 0, i));
}
channelHandlerContext.writeAndFlush(Unpooled.copiedBuffer(ByteBuffer.allocate(4).putInt(0).array()));
}
logger.info(CLASS_NAME + ": Exiting write");
}
}
I finally gave up on trying to use Netty for this. I created a new Camel Processor and packaged the socket stream in it. Code below in case anyone runs into a similar issue.
public class ClamAvInstream implements Processor {
Logger logger;
private final int MAX_BUFFER = 2048;
public ClamAvInstream() {
logger = LoggerFactory.getLogger(this.getClass().getName());
}
#Override
public void process(Exchange exchange) throws Exception {
try (BufferedInputStream message = new BufferedInputStream(exchange.getIn().getBody(InputStream.class));
Socket socket = new Socket("localhost", 3310);
BufferedOutputStream socketOutput = new BufferedOutputStream(socket.getOutputStream())) {
byte[] command = "zINSTREAM\0".getBytes();
socketOutput.write(command);
socketOutput.flush();
byte[] chunk = new byte[MAX_BUFFER];
int chunkSize;
try (BufferedInputStream socketInput = new BufferedInputStream(socket.getInputStream())) {
for (chunkSize = message.read(chunk); chunkSize > -1; chunkSize = message.read(chunk)) {
socketOutput.write(ByteBuffer.allocate(4).putInt(chunkSize).array());
socketOutput.write(chunk, 0, chunkSize);
socketOutput.flush();
receivedReply(socketInput, exchange);
}
socketOutput.write(ByteBuffer.allocate(4).putInt(0).array());
socketOutput.flush();
receivedReply(socketInput, exchange);
} catch(ClamAvException ex){ //close socketInput
logger.warn(ex.getMessage());
}
}//close message, socket, socketOutput
}
private class ClamAvException extends Exception{
private ClamAvException(String error){
super(error);
}
}
private void receivedReply(BufferedInputStream in, Exchange exchange) throws Exception{
if(in.available() > 0){
byte[] replyBytes = new byte[256];
int replySize = in.read(replyBytes);
if (replySize > 0) {
String reply = new String(replyBytes, 0, replySize, StandardCharsets.UTF_8);
logger.info("reply="+reply);
if(reply.contains("OK")){
exchange.getIn().setHeader("av-status", "clean");
}else if(reply.contains("ERROR")){
if(reply.equals("INSTREAM size limit exceeded. ERROR\0")){
exchange.getIn().setHeader("av-status", "overflow");
}else {
exchange.getIn().setHeader("av-status", "error");
}
throw new ClamAvException(reply);
}else if(reply.contains("FOUND")){
exchange.getIn().setHeader("av-status", "infected");
}else{
exchange.getIn().setHeader("av-status", "unknown");
}
}
}
}
}

Netty client fail to read response from non-netty server

I have a Tcp client that connect to a old mainframe (52 years) that send and receive request and response from it.
Here is core connection part of the my client ,
public class SimpleConnector {
private String carrier;
private SocketChannel socketChannel;
public static final byte END_OF_MESSAGE_BYTE = (byte) 0x2b;
public SimpleConnector(String carrier, InetSocketAddress inetSocketAddress) throws IOException {
this.carrier = this.carrier;
socketChannel = SocketChannel.open();
socketChannel.socket().connect(inetSocketAddress, 30000);
}
public void shutDown() throws IOException {
this.socketChannel.close();
}
//Send Request
public String sendRequest(String request) throws Exception {
final CharsetEncoder charsetEncoder = Charset.forName("ISO-8859-1").newEncoder();
int requestLength = 12 + request.length() + 1;
ByteBuffer buffer = ByteBuffer.allocate(requestLength);
buffer.order(ByteOrder.BIG_ENDIAN);
buffer.putInt(requestLength);
buffer.put(charsetEncoder.encode(CharBuffer.wrap(carrier)));
buffer.put(charsetEncoder.encode(CharBuffer.wrap(request)));
buffer.put(END_OF_MESSAGE_BYTE);
buffer.flip();
socketChannel.write(buffer);
return readResponse();
}
//Read Response
protected String readResponse() throws Exception {
CharsetDecoder charsetDecoder = Charset.forName("ISO-8859-1").newDecoder();
int responseHeaderLength = 12;
ByteBuffer responseHeaderBuf = ByteBuffer.allocate(responseHeaderLength);
responseHeaderBuf.order(ByteOrder.BIG_ENDIAN);
int bytesRead = 0;
do {
bytesRead = socketChannel.read(responseHeaderBuf);
} while (bytesRead!=-1 && responseHeaderBuf.position()<responseHeaderLength);
if (bytesRead==-1) {
throw new IOException(carrier + " : Remote connection closed unexpectedly");
}
responseHeaderBuf.flip();
int lengthField = responseHeaderBuf.getInt();
int responseLength = lengthField - responseHeaderLength;
responseHeaderBuf.clear();
ByteBuffer responseBuf = ByteBuffer.allocate(responseLength);
bytesRead = socketChannel.read(responseBuf);
if (bytesRead>responseBuf.limit() || bytesRead ==-1) {
throw new IOException(carrier + " : Remote connection closed unexpectedly");
}
responseBuf.flip();
if (responseBuf.get(responseBuf.limit()-1)==END_OF_MESSAGE_BYTE) {
responseBuf.limit(responseBuf.limit()-1);
}
responseBuf.clear();
String response = charsetDecoder.decode(responseBuf).toString();
return response;
}
public static void main(String[] args) throws Exception{
SimpleConnector simpleConnector = new SimpleConnector("carrier",new InetSocketAddress("localhost",9999));
String response=simpleConnector.sendRequest("Request");
System.out.println(response);
}
}
I'm trying to rewrite the following piece using Netty. By using following tutorial as reference.
http://tutorials.jenkov.com/netty/netty-tcp-client.html
https://www.baeldung.com/netty
https://github.com/deepanprabhu/netty-twoway-tcp-client-server
The problem I'm facing is I was able to connect to server but couldn't read or write from it . I'm using a ChannelInboundHandlerAdapter to do the read and write operations.
Here is my Netty Client
public class NettyClient {
int port;
Channel channel;
EventLoopGroup workGroup = new NioEventLoopGroup();
public NettyClient(int port){
this.port = port;
}
public ChannelFuture connectLoop() throws Exception {
try{
Bootstrap b = new Bootstrap();
b.group(workGroup);
b.channel(NioSocketChannel.class);
b.option(ChannelOption.SO_KEEPALIVE, true);
b.handler(new ChannelInitializer<SocketChannel>() {
protected void initChannel(SocketChannel socketChannel) throws Exception {
socketChannel.pipeline().addLast(new NettyClientHandler());
}
});
ChannelFuture channelFuture = b.connect("remote-ip", this.port).sync();
this.channel = channelFuture.channel();
return channelFuture;
}finally{
}
}
public void shutdown(){
workGroup.shutdownGracefully();
}
public static void main(String[] args) throws Exception{
try {
NettyClient nettyClient = new NettyClient(12000);
ChannelFuture channelFuture = nettyClient.connectLoop();
System.out.println("Sleep 2sec");
Thread.sleep(2000);
String command ="username";
final Charset charset = Charset.forName("ISO-8859-1");
int length = 13 + command.length();
if (channelFuture.isSuccess()) {
ByteBuf byteBuf = Unpooled.buffer(1024);
byteBuf.writeInt(length);
byteBuf.writeCharSequence("Some Info",charset);
byteBuf.writeCharSequence(command,charset);
channelFuture.channel().writeAndFlush(byteBuf).addListener(new ListenerImpl());
}
}
catch(Exception e){
System.out.println(e.getMessage());
System.out.println("Try Starting Server First !!");
}
finally {
}
}
private static final class ListenerImpl implements ChannelFutureListener{
public void operationComplete(ChannelFuture channelFuture) throws Exception {
if (channelFuture.isSuccess()){
System.out.println("Success"); //I can see success in Listener after write, but couldn't read response
}else {
System.out.println("Failed");
}
}
}
}
Handler
public class NettyClientHandler extends ChannelInboundHandlerAdapter {
#Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
super.channelReadComplete(ctx);
}
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception {
System.out.println("NettyClientHandler : channelRead" );
ByteBuf byteBuf = (ByteBuf) msg;
String message = byteBuf.toString(Charset.defaultCharset());
System.out.println("Received Message : " + message);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
super.channelActive(ctx);
System.out.println("NettyClientHandler : channelActive" );
}
}
I Initially thought netty will work only with netty servers.But this answer clear my doubt about that
Does a Netty client work with a netty server only?
Can some one guide me, what I'm doing wrong ???
I think the problem is with your ClientHandler. you should writeAndFlush() in channelActive method invoked when a connection has been established between the tcp server and client. Please use the below updated code and see whether it fixes the problem.
#Sharable
public class NettyClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
public void channelRead0(ChannelHandlerContext ctx, ByteBuf byteBuf) throws Exception {
String message = byteBuf.toString(Charset.defaultCharset());
System.out.println("Received Message : " + message);
}
#Override
public void channelActive(ChannelHandlerContext channelHandlerContext){
channelHandlerContext.writeAndFlush(Unpooled.copiedBuffer("Netty Rocks!", CharsetUtil.UTF_8));
}
}

Why file transfer is very very slow on netty (4.1.5-Final)

In my socket based server implementation using Netty 4.1.5-Final, when I transfer the video/image file into chunks (20K Chunk Size) I found that around 350+ ms is the difference between two chunks, not sure how to reduce that.
Here is my main server code :
public class MultimediaServer extends Thread implements IMultimediaServer, BeanFactoryAware {
/**
* Logger Instance
*/
protected Logger logger = Logger.getLogger(this.getClass());
#Autowired
private Properties props;
private RequestHandler requestHandler;
private BeanFactory beanFactory;
private int port;
private int maxConnection;
private int timeout = 30000;
private EventLoopGroup bossGroup = null;
private EventLoopGroup workerGroup = null;
#Override
public void run() {
try {
bossGroup = new NioEventLoopGroup();
workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast("frameDecoder", new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, Const.PACKET_HEADER_LENGTH, 0, Const.PACKET_HEADER_LENGTH));
ch.pipeline().addLast("messageDecoder", new MessageDecoder());
ch.pipeline().addLast("frameEncoder", new ResponseHandler(Const.PACKET_HEADER_LENGTH));
ch.pipeline().addLast("bytesEncoder", new ByteArrayEncoder());
ch.pipeline().addLast(getHandler());
}
}).option(ChannelOption.SO_BACKLOG, maxConnection)
.option(ChannelOption.SO_KEEPALIVE, true)
.option(ChannelOption.TCP_NODELAY, true)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.MAX_MESSAGES_PER_READ, Integer.MAX_VALUE)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeout)
.option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
.option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 128 * 1024)
.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.childOption(ChannelOption.SO_KEEPALIVE, true)
.childOption(ChannelOption.TCP_NODELAY, true)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childOption(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeout)
.childOption(ChannelOption.MAX_MESSAGES_PER_READ, Integer.MAX_VALUE)
.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
.childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 128 * 1024);
// Bind and start to accept incoming connections.
ChannelFuture f = serverBootstrap.bind(this.port).sync();
// Wait until the server socket is closed.
// In this example, this does not happen, but you can do that to
// gracefully shut down your server.
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
} catch (Throwable e) {
logger.error("ERROR : While starting the Konvx service ", e);
}
}
#Override
public void startServer(int port) {
super.setName("KonvxMultimediaServer : " + port);
this.port = port;
this.start();
}
#Override
public void stopServer() {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
public RequestHandler getRequestHandler() {
return requestHandler;
}
public void setRequestHandler(RequestHandler requestHandler) {
this.requestHandler = requestHandler;
}
/**
* Return Request Handler
* #return RequestHandler
*/
private RequestHandler getHandler() {
return (RequestHandler) beanFactory.getBean("requestHandler", RequestHandler.class);
}
#Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
#Value("${konvx.maxConnection}")
public void setMaxConnection(String maxConnection) {
this.maxConnection = Integer.parseInt(maxConnection);
}
#Value("${konvx.socket.timeout}")
public void setTimeout(String timeout) {
this.timeout = Integer.parseInt(timeout);
}
}
Here is the channel handler
public class RequestHandler extends SimpleChannelInboundHandler<KonvxMessage> {
/**
* Logger Instance
*/
private Logger logger = Logger.getLogger(this.getClass());
#Autowired
private Router router;
#Autowired
protected UserPool userPool;
#Override
public void channelRead0(ChannelHandlerContext ctx, KonvxMessage message) throws Exception {
Packet packet = new Packet();
packet.setCtx(ctx);
try {
if (message == null) {
logger.warn("Warning - message is empty");
return;
}
// Throw the exception if in-bound message does not magic cookie
if (!message.hasCookie()) {
logger.error("ERROR: Bad Cookie :" + message);
return;
}
// Checking if user is a valid/registered to our application
if (!userPool.isValidUser(message.getUserId())) {
packet.writeMessage(KonvxMessageFactory.getInvalidUserMessage(message));
return;
}
packet.setInMessage(message);
router.route(packet);
} catch (Exception e) {
logger.error("ERROR : Whie receiving/processing the in-bound message ", e);
packet.writeMessage(KonvxMessageFactory.getErrorMessage(message, KonvxError.UNKNOWN_ERROR));
}
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.warn("WARN : Connection problem - " + cause.getMessage() + " Client address :" + ctx.channel().remoteAddress());
ctx.close();
return;
}
}
Here is the decoder for the packet -
public class MessageDecoder extends ByteToMessageDecoder {
/**
* Logger Instance
*/
protected Logger logger = Logger.getLogger(this.getClass());
#Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
// Parsing the object
String msg = null;
try {
byte[] bytes = new byte[in.readableBytes()];
if (bytes.length <= 0) {
logger.debug("Total readable bytes :" + in.readableBytes() + " exiting...");
return;
}
in.readBytes(bytes);
msg = new String(bytes, CharsetUtil.UTF_8);
// Return if message is empty
if (msg.isEmpty()) {
logger.warn("Message is empty...exiting...");
return;
}
KonvxMessage konvxMessage = JsonUtil.parseMessage(msg);
// Logging the incoming message
StringBuilder logMessage = new StringBuilder();
logMessage.append("Incoming message :").append(System.lineSeparator())
.append(konvxMessage)
.append(System.lineSeparator());
logger.info(logMessage.toString());
out.add(konvxMessage);
} catch (Throwable e) {
logger.error("ERROR : While receiving/parsing/decoding the message " + msg, e);
new Packet(ctx).writeMessage(KonvxMessageFactory.getParseFailedErrorMessage(msg));
}
}
}
Please help, how to fine tune the netty to improve the file transfer performance over socket between mobile device and my java server.

netty4:How to listen on multiple ports on a java process

I'm trying to listen for connections on two different ports
I start 2 Thread in a java main method,every Thread bind a port with netty4,but can't
listener success!
this is my code,the port 3333 is ok,but 1234 is not ok,it looks like 3333 is blocking!
public class ObjectServer
{
private static final Logger logger = LoggerFactory.getLogger(ObjectServer.class);
private String ip;
private int port;
public ObjectServer(int port)
{
this.port = port;
}
public void run(final ChannelInboundHandlerAdapter handler) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try
{
ServerBootstrap server = new ServerBootstrap();
server.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<SocketChannel>()
{
#Override
public void initChannel(SocketChannel ch) throws Exception
{
ch.pipeline().addLast(new ObjectEncoder(), new ObjectDecoder(ClassResolvers.cacheDisabled(null)), handler);
}
});
server.bind(port).sync().channel().closeFuture().sync();
}
catch (Exception e)
{
logger.error("开启监听失败!端口[" + port + "]", e);
throw e;
}
finally
{
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
public class SocketServer
{
private static final Logger logger = LoggerFactory.getLogger(SocketServer.class);
private static final StringDecoder DECODER = new StringDecoder();
private static final StringEncoder ENCODER = new StringEncoder();
private int port;
public SocketServer(int port)
{
this.port = port;
}
public void run(final ChannelInboundHandlerAdapter handler) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try
{
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<SocketChannel>()
{
#Override
public void initChannel(SocketChannel ch) throws Exception
{
ChannelPipeline pipeline = ch.pipeline();
// Add the text line codec combination first,
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
// the encoder and decoder are static as these are
// sharable
pipeline.addLast("encoder", ENCODER);
pipeline.addLast("decoder", DECODER);
// and then business logic.
pipeline.addLast("handler", handler);
}
});
b.bind(port).sync().channel().closeFuture().sync();
}
catch (Exception e)
{
logger.error("开启监听失败!端口[" + port + "]", e);
throw e;
}
finally
{
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
public class Test
{
public static void main(String[] args) throws Exception
{
Thread1 thread1 = new Thread1();
Thread2 thread2 = new Thread2();
thread2.start();
thread1.start();
new SocketClient("192.168.16.52", 3333).run(new TestHandler4("test4"));
new ObjectClient("192.168.16.52", 1234).run(new TestHandler3("test3"));
}
#Sharable
static class TestHandler1 extends ChannelInboundHandlerAdapter
{
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception
{
System.out.println("1234" + msg);
}
}
static class Thread1 extends Thread
{
#Override
public void run()
{
try
{
new ObjectServer(1234).run(new TestHandler1());
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
static class Thread2 extends Thread
{
#Override
public void run()
{
try
{
new SocketServer(3333).run(new TestHandler2());
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
#Sharable
static class TestHandler2 extends SimpleChannelInboundHandler<String>
{
#Override
public void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception
{
System.out.println("3333" + msg);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
System.out.println("sssssssssssssssss");
}
}
#Sharable
static class TestHandler3 extends ChannelInboundHandlerAdapter
{
private String msg;
public TestHandler3(String msg)
{
this.msg = msg;
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.writeAndFlush(msg);
}
}
#Sharable
static class TestHandler4 extends SimpleChannelInboundHandler<String>
{
private String msg;
public TestHandler4(String msg)
{
this.msg = msg;
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.writeAndFlush(msg);
}
#Override
protected void channelRead0(ChannelHandlerContext arg0, String arg1)throws Exception
{
}
}
}
In your run() implementation, you do this:
server.bind(port).sync().channel().closeFuture().sync();
.. which will block until the server socket is closed. Because you do not close the server socket, it will never return. Therefore, only the first server socket will be bound.
What you probably want is just bind and return rather than waiting for the server sockets closed.

How to manipulate Message coming from Netty server/client

I am prototyping a Netty client/server transfer for strings, now I want to pass these strings to file when it arrives to server side.
Client:
private ClientBootstrap bootstrap;
private Channel connector;
private MyHandler handler=new MyHandler();
public boolean start() {
// Standard netty bootstrapping stuff.
Executor bossPool = Executors.newCachedThreadPool();
Executor workerPool = Executors.newCachedThreadPool();
ChannelFactory factory =
new NioClientSocketChannelFactory(bossPool, workerPool);
this.bootstrap = new ClientBootstrap(factory);
// Declared outside to fit under 80 char limit
final DelimiterBasedFrameDecoder frameDecoder =
new DelimiterBasedFrameDecoder(Integer.MAX_VALUE,
Delimiters.lineDelimiter());
this.bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(
handler,
frameDecoder,
new StringDecoder(),
new StringEncoder());
}
});
ChannelFuture future = this.bootstrap
.connect(new InetSocketAddress("localhost", 12345));
if (!future.awaitUninterruptibly().isSuccess()) {
System.out.println("--- CLIENT - Failed to connect to server at " +
"localhost:12345.");
this.bootstrap.releaseExternalResources();
return false;
}
this.connector = future.getChannel();
return this.connector.isConnected();
}
public void stop() {
if (this.connector != null) {
this.connector.close().awaitUninterruptibly();
}
this.bootstrap.releaseExternalResources();
System.out.println("--- CLIENT - Stopped.");
}
public boolean sendMessage(String message) {
if (this.connector.isConnected()) {
// Append \n if it's not present, because of the frame delimiter
if (!message.endsWith("\n")) {
this.connector.write(message + '\n');
} else {
this.connector.write(message);
}
System.out.print(message);
return true;
}
return false;
}
Server:
private final String id;
private ServerBootstrap bootstrap;
private ChannelGroup channelGroup;
private MyHandler handler= new MyHandler();
public Server(String id) {
this.id = id;
}
// public methods ---------------------------------------------------------
public boolean start() {
// Pretty standard Netty startup stuff...
// boss/worker executors, channel factory, channel group, pipeline, ...
Executor bossPool = Executors.newCachedThreadPool();
Executor workerPool = Executors.newCachedThreadPool();
ChannelFactory factory =
new NioServerSocketChannelFactory(bossPool, workerPool);
this.bootstrap = new ServerBootstrap(factory);
this.channelGroup = new DefaultChannelGroup(this.id + "-all-channels");
// declared here to fit under the 80 char limit
final ChannelHandler delimiter =
new DelimiterBasedFrameDecoder(Integer.MAX_VALUE,
Delimiters.lineDelimiter());
this.bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
#Override
public ChannelPipeline getPipeline() throws Exception {
SimpleChannelHandler handshakeHandler =
new SimpleChannelHandler();
return Channels.pipeline(
handler,
delimiter,
new StringDecoder(),
new StringEncoder(),
handshakeHandler);
}
});
Channel acceptor = this.bootstrap.bind(new InetSocketAddress(12345));
if (acceptor.isBound()) {
System.out.println("+++ SERVER - bound to *:12345");
this.channelGroup.add(acceptor);
return true;
} else {
System.err.println("+++ SERVER - Failed to bind to *:12345");
this.bootstrap.releaseExternalResources();
return false;
}
}
public void stop() {
this.channelGroup.close().awaitUninterruptibly();
this.bootstrap.releaseExternalResources();
System.err.println("+++ SERVER - Stopped.");
}
Handlers used:
Client handler:
public class MyHandler extends SimpleChannelUpstreamHandler{
#Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
throws Exception {
if(e.getMessage() instanceof String){
System.out.println((String)e.getMessage());
}
System.out.println(e.getMessage().toString());
}
}
Server handler:
#Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent e)
throws Exception {
Channel channel= ctx.getChannel();
channel.write(e.getMessage());
if(e.getMessage() instanceof String){
System.out.println((String)e.getMessage());
}
System.out.println(e.getMessage().toString());
}
client runner:
public static void main(String[] args) throws InterruptedException {
final int nMessages = 5;
try {
Client c = new Client();
if (!c.start()) {
return;
}
for (int i = 0; i < nMessages; i++) {
Thread.sleep(1L);
c.sendMessage((i + 1) + "\n");
}
c.stop();
} catch (InterruptedException e) {
e.printStackTrace();
}
}
Server Runner:
public static void main(String[] args) {
final Server s = new Server("server1");
if (!s.start()) {
return;
}
Runtime.getRuntime().addShutdownHook(new Thread() {
#Override
public void run() {
s.stop();
}
});
}
now what I really need is to print the message that I wrote on the channel on both client and server side and I am really puzzled on this.
Your pipeline creation seems to be wrong at first look. At server side when decoding, the Delimiter needs to come first, then the StringDecoder and then the business handler. You could resolve this probably by just putting breakpoints in these decoders and encoders. Also take a look at this link for very good documentation on how this works.

Categories