StreamCorruptedException occurs during object decoding - java

I am writing a netty client and server application that will write JVM GC stats to a time-series database to analyse for around 300 servers. However my pipeline is throwing lots of these exceptions:
10/02/2012 10:14:23.415 New I/O server worker #2-2 ERROR GCEventsCollector - netty error
java.io.StreamCorruptedException: invalid type code: 6E
at java.io.ObjectInputStream.readObject0(Unknown Source)
at java.io.ObjectInputStream.defaultReadFields(Unknown Source)
at java.io.ObjectInputStream.readSerialData(Unknown Source)
at java.io.ObjectInputStream.readOrdinaryObject(Unknown Source)
at java.io.ObjectInputStream.readObject0(Unknown Source)
at java.io.ObjectInputStream.readObject(Unknown Source)
at org.jboss.netty.handler.codec.serialization.ObjectDecoder.decode(ObjectDecoder.java:94)
at org.jboss.netty.handler.codec.frame.FrameDecoder.callDecode(FrameDecoder.java:282)
at org.jboss.netty.handler.codec.frame.FrameDecoder.messageReceived(FrameDecoder.java:216)
at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:274)
at org.jboss.netty.channel.Channels.fireMessageReceived(Channels.java:261)
at org.jboss.netty.channel.socket.nio.NioWorker.read(NioWorker.java:349)
at org.jboss.netty.channel.socket.nio.NioWorker.processSelectedKeys(NioWorker.java:280)
at org.jboss.netty.channel.socket.nio.NioWorker.run(NioWorker.java:200)
at java.util.concurrent.ThreadPoolExecutor$Worker.runTask(Unknown Source)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source)
at java.lang.Thread.run(Unknown Source)
It looks like it's creating an OutputStream but one already exists - so it throws that specific exception. It appears in my AIT environment where >300 servers are connecting but not in DEV where 1 agent is only connecting.
I suspect it is either a bug in the object decoder or I have incorrect code somewhere. Please could anyone explain why this happens?
Here is the collector:
public class GCEventsCollector extends AbstractDataCollector {
protected static final Logger logger = Logger.getLogger(GCEventsCollector.class);
private static final ExecutorService WORKERS = Executors.newCachedThreadPool();
private static final ChannelGroup GROUP = new DefaultChannelGroup("gc-events");
private final int port;
private final ServerBootstrap bootstrap;
public GCEventsCollector(final int port) {
logger.info("Creating GC Events collector on port " + port);
this.port = port;
this.bootstrap = newServerBootstrap(port);
}
/**
* Creates a bootstrap for creating bindings to sockets. Each channel has a pipeline, which contains the
* logic for handling a message such as encoding, decoding, buffering, etc.
*
* #param port port of socket
* #return configured bootstrap
*/
private ServerBootstrap newServerBootstrap(int port) {
ExecutorService bossExecutor = Executors.newCachedThreadPool();
ExecutorService workerExecutor = Executors.newCachedThreadPool();
NioServerSocketChannelFactory channelFactory =
new NioServerSocketChannelFactory(bossExecutor, workerExecutor);
ServerBootstrap bootstrap = new ServerBootstrap(channelFactory);
ChannelHandler collectorHandler = new CollectorHandler();
bootstrap.setPipelineFactory(new CollectorPipelineFactory(collectorHandler));
bootstrap.setOption("localAddress", new InetSocketAddress(port));
return bootstrap;
}
protected KDBCategory[] getKDBCategories() {
return new KDBCategory[] { KDBCategory.GC_EVENTS };
}
/**
* Bind to a socket to accept messages
*
* #throws Exception
*/
public void doStart() throws Exception {
Channel channel = bootstrap.bind();
GROUP.add(channel);
}
/**
* Disconnect the channel to stop accepting messages and wait until disconnected
*
* #throws Exception
*/
public void doStop() throws Exception {
logger.info("disconnecting");
GROUP.close().awaitUninterruptibly();
}
class CollectorHandler extends SimpleChannelHandler {
#Override
public void channelOpen(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
super.channelOpen(ctx, e);
GROUP.add(e.getChannel());
}
#Override
public void channelConnected(ChannelHandlerContext ctx,
ChannelStateEvent e) throws Exception {
super.channelConnected(ctx, e);
logger.info("channel connected");
}
#Override
public void channelDisconnected(ChannelHandlerContext ctx,
ChannelStateEvent e) throws Exception {
super.channelDisconnected(ctx, e);
logger.info("channel disconnected");
}
#Override
public void messageReceived(ChannelHandlerContext ctx, final MessageEvent e) throws Exception {
if (logger.isDebugEnabled()) {
logger.debug("Received GcStats event: " + e.toString());
}
WORKERS.execute(new Runnable() {
public void run() {
saveData(KDBCategory.GC_EVENTS, (GcEventsPersister) e.getMessage());
}
});
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e) throws Exception {
logger.error("netty error", e.getCause());
}
}
private static class CollectorPipelineFactory implements ChannelPipelineFactory {
private final ChannelHandler handler;
private CollectorPipelineFactory(ChannelHandler handler) {
this.handler = handler;
}
#Override
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(new ObjectDecoder(), handler);
}
}
}
Here is the agent:
public class GCEventsAgent {
private final static Logger logger = Logger.getLogger(GCEventsAgent.class);
private static final ExecutorService bosses = Executors.newCachedThreadPool();
private static final ExecutorService workers = Executors.newCachedThreadPool();
private static final Timer timer = new HashedWheelTimer();
private static final String localHostName;
private static final ParseExceptionListener exceptionListener = new ExceptionListener();
static {
String name = "";
try {
name = InetAddress.getLocalHost().getHostName();
} catch (UnknownHostException e) {
logger.error("cannot retrieve local host name", e);
}
localHostName = name;
}
public static void main(final String[] args) {
checkArgument(args.length >= 3, "Usage: GCEventsAgent [log4j cfg] [mba cfg] [server cfg] [process 1] ... [process n]");
System.setProperty("log4j.configuration", "file:log4j.properties");
final String log4jConfig = args[0];
DOMConfigurator.configure(log4jConfig);
final String mbaConfig = args[1];
final String serverConfigPath = args[2];
final ServerConfig serverCfg = new ServerConfig(serverConfigPath);
setup(serverCfg, args);
}
private static void setup(ServerConfig cfg, String[] args) {
final String host = cfg.getParameter(String.class, "host");
final int port = cfg.getParameter(Integer.class, "port");
if (args.length == 3)
configurePolling(cfg, host, port);
else
configureProcesses(cfg, args, host, port);
}
private static void configureProcesses(final ServerConfig cfg,
final String[] args,
final String host,
final int port) {
final List<File> logFiles = logFiles(cfg, args);
logger.info("Initializing GC Agent for [" + host + ":" + port + "]");
final NioClientSocketChannelFactory channelFactory =
new NioClientSocketChannelFactory(bosses, workers);
final ClientBootstrap bootstrap = new ClientBootstrap(channelFactory);
bootstrap.setOption("remoteAddress", new InetSocketAddress(host, port));
final GCParserFactory parserFactory = new DefaultParserFactory();
final AgentProcessHandler agentHandler =
new AgentProcessHandler(bootstrap, logFiles, parserFactory);
bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
#Override
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(new ObjectEncoder(), agentHandler);
}
});
bootstrap.connect().awaitUninterruptibly();
}
private static void configurePolling(ServerConfig cfg, String host, int port) {
final int frequency = cfg.getParameter(Integer.class, "frequency");
final NioClientSocketChannelFactory channelFactory =
new NioClientSocketChannelFactory(newCachedThreadPool(), newCachedThreadPool());
final ClientBootstrap bootstrap = new ClientBootstrap(channelFactory);
bootstrap.setOption("remoteAddress", new InetSocketAddress(host, port));
final GcParserSupplier parserSupplier = new GcParserSupplier();
final ConcurrentMap<File, Tailer> tailerMap = Maps.newConcurrentMap();
final ParseExceptionListener exceptionListener = new ExceptionListener();
final Set<File> discoveredSet = Sets.newHashSet();
final File directory = new File(cfg.getParameter(String.class, "logDirectory"));
final TailManager tailManager =
new TailManager(discoveredSet, tailerMap, parserSupplier, exceptionListener, localHostName);
final DetectionTask detectionTask = new DetectionTask(directory, discoveredSet, tailManager);
final FileWatcher fileWatcher =
new FileWatcher(Executors.newScheduledThreadPool(1), detectionTask, frequency);
final Timer timer = new HashedWheelTimer();
final EfxAgentHandler agentHandler =
new EfxAgentHandler(bootstrap, tailManager, fileWatcher, timer);
bootstrap.setPipelineFactory(new ChannelPipelineFactory() {
#Override public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(new ObjectEncoder(), agentHandler);
}
});
bootstrap.connect().awaitUninterruptibly();
}
private static List<File> logFiles(ServerConfig cfg, String[] args) {
String logDir = cfg.getParameter(String.class, "logDirectory");
final int n = args.length;
List<File> logFiles = new ArrayList<File>(n-3);
for (int i = 3; i < n; i++) {
String filePath = logDir + args[i] + ".gc.log";
try {
File file = new File(filePath);
if (!file.exists()) {
logger.info("missing log file so creating empty file at path: " + filePath);
File dir = file.getParentFile();
dir.mkdirs();
if (file.createNewFile())
logger.info("successfully created empty file at path: " + filePath);
}
logFiles.add(file);
} catch (IOException e) {
logger.error("error creating log file at path: " + filePath);
}
}
return logFiles;
}
static final class AgentPauseListener implements GCEventListener<CMSType, CMSHeap> {
private final Channel channel;
private final GcEventsPersister.Builder builder;
private byte state;
private AgentPauseListener(Channel channel,
GcEventsPersister.Builder builder) {
this.channel = channel;
this.builder = builder;
}
#Override
public void onPause(PauseDetail<CMSType> pauseDetail) {
logger.info("onPause");
checkState(state == 0x00 || state == 0x01);
builder
.collectionType(pauseDetail.getType() == null
? null : pauseDetail.getType().toString())
.start(new Instant(pauseDetail.getStartTimestamp()))
.end(new Instant(pauseDetail.getEndTimestamp()))
.pause(pauseDetail.getType() == null
? false : pauseDetail.getType().isPause())
.duration(pauseDetail.getPauseMillis());
if (state == 0x00)
channel.write(builder.build());
else
state |= 0x02;
}
#Override
public void onHeapBefore(HeapDetail<CMSHeap> details) {
logger.info("onHeapBefore");
checkState(state == 0x00);
builder.heapBefore(used(details)).heapBeforeTotal(total(details));
state |= 0x01;
}
#Override
public void onHeapAfter(HeapDetail<CMSHeap> details) {
logger.info("onHeapAfter");
checkState(state == 0x03);
builder.heapAfter(used(details)).heapAfterTotal(total(details));
channel.write(builder.build());
state = 0x00;
}
private final long used(HeapDetail<CMSHeap> details) {
return used(details, CMSHeap.PAR_NEW_GENERATION)
+ used(details, CMSHeap.CMS_GENERATION)
+ used(details, CMSHeap.CMS_PERM_GENERATION);
}
private final long used(HeapDetail<CMSHeap> heapDetail,
CMSHeap gen) {
final Map<CMSHeap, HeapDetail.HeapMetric> sizes = heapDetail.getSizes();
final long used = sizes.get(gen).getUsed();
logger.info("used = " + used);
return used;
}
private final long total(HeapDetail<CMSHeap> details) {
return total(details, CMSHeap.PAR_NEW_GENERATION)
+ total(details, CMSHeap.CMS_GENERATION)
+ total(details, CMSHeap.CMS_PERM_GENERATION);
}
private final long total(HeapDetail<CMSHeap> heapDetail,
CMSHeap gen) {
final Map<CMSHeap, HeapDetail.HeapMetric> sizes = heapDetail.getSizes();
return sizes.get(gen).getTotal();
}
}
static final class ExceptionListener implements ParseExceptionListener {
#Override
public void onParseError(int lineNo, String input, String error) {
logger.error("error parsing: " + lineNo + " - " + input + " - " + error);
}
}
static final class ReconnectTask implements TimerTask {
private final ClientBootstrap bootstrap;
ReconnectTask(ClientBootstrap bootstrap) {
this.bootstrap = bootstrap;
}
#Override
public void run(Timeout timeout) throws Exception {
bootstrap.connect();
}
}
static class AgentProcessHandler extends SimpleChannelHandler {
private final ClientBootstrap bootstrap;
private final List<File> logFiles;
private final GCParserFactory parserFactory;
private final Set<Tailer> tailers = new HashSet<Tailer>(4);
public AgentProcessHandler(ClientBootstrap bootstrap,
List<File> logFiles,
GCParserFactory parserFactory) {
this.bootstrap = bootstrap;
this.logFiles = logFiles;
this.parserFactory = parserFactory;
}
#Override
public void channelConnected(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
logger.info("channel connected");
for (File logFile : logFiles) {
logger.info("setting up scraper for logfile: " + logFile);
GCParser parser = parserFactory.getParser();
GcEventsPersister.Builder builder =
new GcEventsPersister.Builder(logFile.getName(), localHostName);
GCEventListener gcEventListener =
new AgentPauseListener(e.getChannel(), builder);
TailerListener listener =
new LogLineListener(parser, gcEventListener, exceptionListener);
Tailer tailer = Tailer.create(logFile, listener, 1000L, true);
tailers.add(tailer);
}
}
#Override
public void channelDisconnected(ChannelHandlerContext ctx, ChannelStateEvent e)
throws Exception {
logger.error("channel disconnected");
stopTailers();
scheduleReconnect();
}
private void scheduleReconnect() {
timer.newTimeout(new ReconnectTask(bootstrap), 5L, TimeUnit.SECONDS);
}
private final void stopTailers() {
for (Tailer tailer : tailers) {
tailer.stop();
}
tailers.clear();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e)
throws Exception {
final Throwable cause = e.getCause();
logger.error(cause);
if (cause instanceof ConnectException) {
stopTailers();
scheduleReconnect();
}
}
};
private static class LogLineListener extends TailerListenerAdapter {
private final GCParser parser;
private final GCEventListener pauseListener;
private final ParseExceptionListener exceptionLister;
public LogLineListener(GCParser parser,
GCEventListener listener,
ParseExceptionListener exceptionLister) {
this.parser = parser;
this.pauseListener = listener;
this.exceptionLister = exceptionLister;
}
#Override
public void handle(String line) {
logger.info("handle(String line)");
parser.matchLine(line, pauseListener, exceptionLister);
}
}
private static final class GcParserSupplier
implements Supplier<GCParser<CMSType, CMSHeap>> {
#Override public GCParser<CMSType, CMSHeap> get() {
return new CMSParser();
}
}
private static final class TailManager implements FileHandler {
private final Set<File> discoveredSet;
private final ConcurrentMap<File, Tailer> tailerMap;
private final Supplier<GCParser<CMSType, CMSHeap>> parserSupplier;
private final ParseExceptionListener exceptionListener;
private final String host;
private volatile boolean go;
private TailManager(final Set<File> discoveredSet,
final ConcurrentMap<File, Tailer> tailerMap,
final Supplier<GCParser<CMSType, CMSHeap>> parserSupplier,
final ParseExceptionListener exceptionListener,
final String host) {
this.discoveredSet = discoveredSet;
this.tailerMap = tailerMap;
this.parserSupplier = parserSupplier;
this.exceptionListener = exceptionListener;
this.host = host;
}
public void stop() {
go = false;
for (Tailer tailer : tailerMap.values())
tailer.stop();
tailerMap.clear();
}
public void start() {
go = true;
}
#Override public void onNew(final File file,
final Channel channel) {
checkState(go);
GCParser<CMSType, CMSHeap> parser = parserSupplier.get();
String fileName = file.getName();
GcEventsPersister.Builder builder =
new GcEventsPersister.Builder(fileName, host);
AgentPauseListener eventListener =
new AgentPauseListener(channel, builder);
Function<Void, Void> removeTail = new Function<Void, Void>() {
#Override
public Void apply(#Nullable final Void input) {
final Tailer tailer = tailerMap.remove(file);
tailer.stop();
discoveredSet.remove(file);
return null;
}
};
GcTailAdaptor listener =
new GcTailAdaptor(logger, parser, eventListener, exceptionListener, removeTail);
tailerMap.put(file, Tailer.create(file, listener, 1000L, true));
}
#Override public void onDelete(File file, Channel channel) {
checkState(go);
final Tailer tailer = tailerMap.remove(file);
tailer.stop();
}
}
static class EfxAgentHandler extends SimpleChannelHandler {
private final ClientBootstrap bootstrap;
private final TailManager tailManager;
private final FileWatcher fileWatcher;
private final Timer timer;
public EfxAgentHandler(ClientBootstrap bootstrap,
TailManager tailManager,
FileWatcher fileWatcher,
Timer timer) {
this.bootstrap = bootstrap;
this.tailManager = tailManager;
this.fileWatcher = fileWatcher;
this.timer = timer;
}
#Override public void channelConnected(ChannelHandlerContext ctx,
ChannelStateEvent e) throws Exception {
logger.info("channel connected");
tailManager.start();
fileWatcher.start(e.getChannel());
}
#Override public void channelDisconnected(ChannelHandlerContext ctx,
ChannelStateEvent e) throws Exception {
logger.error("channel disconnected");
tailManager.stop();
fileWatcher.stop();
scheduleReconnect();
}
private void scheduleReconnect() {
timer.newTimeout(new ReconnectTask(bootstrap), 5L, TimeUnit.SECONDS);
}
#Override public void exceptionCaught(ChannelHandlerContext ctx,
ExceptionEvent e) throws Exception {
final Throwable cause = e.getCause();
logger.error(cause);
if (cause instanceof ConnectException) {
tailManager.stop();
scheduleReconnect();
}
}
}
static final class GcTailAdaptor extends TailerListenerAdapter {
private final Logger logger;
private final GCParser parser;
private final GCEventListener eventListener;
private final ParseExceptionListener exceptionListener;
private final Function<Void, Void> removeTail;
private volatile long lastTail;
GcTailAdaptor(final Logger logger,
final GCParser parser,
final GCEventListener eventListener,
final ParseExceptionListener exceptionListener,
final Function<Void, Void> removeTail) {
this.logger = logger;
this.parser = parser;
this.eventListener = eventListener;
this.exceptionListener = exceptionListener;
this.removeTail = removeTail;
}
#Override public void handle(String line) {
lastTail();
parser.matchLine(line, eventListener, exceptionListener);
}
private final void lastTail() {
final long t = System.currentTimeMillis();
if (lastTail == 0L) {
lastTail = t;
return;
}
if ((t-lastTail)>=1800000L)
removeTail.apply(null);
}
#Override public void handle(Exception ex) {
logger.error(ex);
}
}
#VisibleForTesting
final static class DetectionTask implements Runnable {
private final File directory;
private final Set<File> discovered;
private final FileHandler handler;
private volatile Channel channel;
DetectionTask(final File directory,
final Set<File> discovered,
final FileHandler handler) {
this.discovered = discovered;
this.handler = handler;
this.directory = directory;
}
public void setChannel(Channel channel) {
this.channel = channel;
}
public boolean removeStaleFile(File file) {
checkArgument(discovered.contains(file),
"file is not discovered so cannot be stale");
return discovered.remove(file);
}
public void run() {
final File[] files = directory.listFiles();
for (int i=0, n=files.length; i<n; i++) {
final File file = files[i];
synchronized (discovered) {
if (!discovered.contains(file)) {
discovered.add(file);
handler.onNew(file, channel);
}
}
}
final ImmutableSet<File> logFiles = ImmutableSet.copyOf(files);
final ImmutableSet<File> diff = Sets.difference(discovered, logFiles).immutableCopy();
for (File file : diff) {
discovered.remove(file);
handler.onDelete(file, channel);
}
}
}
#VisibleForTesting static interface FileHandler {
void onNew(File file, Channel channel);
void onDelete(File file, Channel channel);
}
#VisibleForTesting
final static class FileWatcher {
private final ScheduledExecutorService executor;
private final DetectionTask detectionTask;
private final int frequency;
private volatile ScheduledFuture<?> task;
#VisibleForTesting
FileWatcher(ScheduledExecutorService executor,
DetectionTask detectionTask,
int frequency) {
this.executor = executor;
this.detectionTask = detectionTask;
this.frequency = frequency;
}
public void start(Channel chanel) {
task = executor.scheduleAtFixedRate(detectionTask, 0L, frequency, TimeUnit.SECONDS);
detectionTask.setChannel(chanel);
}
public void stop() {
task.cancel(true);
detectionTask.setChannel(null);
}
public static FileWatcher on(File directory,
FileHandler handler,
int frequency) {
checkNotNull(directory);
checkNotNull(handler);
checkArgument(directory.isDirectory(), "file is not a directory");
checkArgument(directory.canRead(), "no read access to directory");
checkArgument(0 < frequency, "frequency must be > 0");
final HashSet<File> objects = Sets.newHashSet();
final DetectionTask task = new DetectionTask(directory, objects, handler);
final ScheduledExecutorService executorService = Executors.newScheduledThreadPool(1);
return new FileWatcher(executorService, task, frequency);
}
}
}

I have incorrect code somewhere.
Correct. Specifically, you almost certainly have different ObjectInput/OutputStream lifetimes at server and client. Use the same streams for the life of the socket, and don't do any I/O over those sockets via any other means.

I discovered why this happens. I am using a deprecated ObjectDecoder that's not compatible with my client's ObjectEncoder. I am just sending a ByteBuffer instead and it's fine now.

Related

Netty client not receiving the complete data sent by the Server

I am designing a Netty based solution to transfer a file from Server to the Client over TCP. The client specifies the location of the file and then the server sends the file to the client.
Currently, the solution works fine for files with small size (< 2MB of data).
If the file to be sent is larger than ~5MB only partial data is sent and this varies (every time not the same amount of data is sent). Also, it is seen from the log that the Server has sent the complete amount of data (file).
The issue is the client is not receiving complete data sent by the server. What is wrong in my below code? or Can someone point me to right direction.
Below is my client, server and their handlers:
(For brevity I have listed only the methods that is of importance)
Client:
public class FileClient {
private final static int PORT = 8992;
private final static String HOST = "127.0.0.1";
public class ClientChannelInitializer extends ChannelInitializer<SocketChannel> {
private SslContext sslContext = null;
private String srcFile = "";
private String destFile = "";
public ClientChannelInitializer(String srcFile, String destFile, SslContext sslCtx) {
this.sslContext = sslCtx;
this.srcFile = srcFile;
this.destFile = destFile;
}
#Override
protected void initChannel(SocketChannel socketChannel) throws Exception {
ChannelPipeline pipeline = socketChannel.pipeline();
pipeline.addLast(sslContext.newHandler(socketChannel.alloc(), HOST, PORT));
pipeline.addLast("clientHandler", new FileClientHandler(srcFile, destFile));
}
}
private void startUp(String srcFile, String destFile) throws Exception {
SslContext sslCtx = SslContextBuilder.forClient().trustManager(InsecureTrustManagerFactory.INSTANCE).build();
EventLoopGroup workerGroup = new NioEventLoopGroup();
Bootstrap clientBootstrap = new Bootstrap();
clientBootstrap.group(workerGroup);
clientBootstrap.channel(NioSocketChannel.class);
clientBootstrap.option(ChannelOption.TCP_NODELAY, true);
clientBootstrap.handler(new LoggingHandler(LogLevel.INFO));
clientBootstrap.handler(new ClientChannelInitializer(srcFile, destFile, sslCtx));
Channel channel = clientBootstrap.connect(new InetSocketAddress(HOST, PORT)).sync().channel();
channel.closeFuture().sync();
}
}
public static void main(String[] args) throws Exception {
String src = "/Users/home/src/test.mp4";
String dest = "/Users/home/dest/test.mp4";
new FileClient().startUp(src, dest);
}
}
ClientHandler:
public class FileClientHandler extends SimpleChannelInboundHandler<ByteBuf> {
private final String sourceFileName;
private OutputStream outputStream;
private Path destFilePath;
private byte[] buffer = new byte[0];
public FileClientHandler(String SrcFileName, String destFileName) {
this.sourceFileName = SrcFileName;
this.destFilePath = Paths.get(destFileName);
System.out.println("DestFilePath-" + destFilePath);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception {
ctx.writeAndFlush(ToByteBuff(this.sourceFileName));
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf byteBuff) throws Exception {
if (this.outputStream == null) {
Files.createDirectories(this.destFilePath.getParent());
if (Files.exists(this.destFilePath)) {
Files.delete(this.destFilePath);
}
this.outputStream = Files.newOutputStream(this.destFilePath, StandardOpenOption.CREATE,
StandardOpenOption.APPEND);
}
int size = byteBuff.readableBytes();
if (size > this.buffer.length) {
this.buffer = new byte[size];
}
byteBuff.readBytes(this.buffer, 0, size);
this.outputStream.write(this.buffer, 0, size);
}
FileServer:
public class FileServer {
private final int PORT = 8992;
public void run() throws Exception {
SelfSignedCertificate ssc = new SelfSignedCertificate();
final SslContext sslCtx = SslContextBuilder.forServer(ssc.certificate(), ssc.privateKey()).build();
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).option(ChannelOption.SO_BACKLOG, 100)
.handler(new LoggingHandler(LogLevel.INFO)).childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ChannelPipeline pipeline = ch.pipeline();
pipeline.addLast(sslCtx.newHandler(ch.alloc()));
pipeline.addLast(new ChunkedWriteHandler());
pipeline.addLast(new FilServerFileHandler());
}
});
ChannelFuture f = b.bind(PORT).sync();
f.channel().closeFuture().sync();
} finally {
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
public static void main(String[] args) throws Exception {
new FileServer().run();
}
}
FileServerHandler:
public class FilServerFileHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf buff) throws Exception {
String filePathStr = byteBuf.toString(CharsetUtil.UTF_8);
File file = new File(filePathStr);
RandomAccessFile raf = null;
ChannelFuture sendFileFuture;
try {
raf = new RandomAccessFile(file, "r");
sendFileFuture = ctx.writeAndFlush(new ChunkedNioFile(raf.getChannel()),
ctx.newProgressivePromise());
sendFileFuture.addListener(new ChannelProgressiveFutureListener() {
public void operationComplete(ChannelProgressiveFuture future) throws Exception {
System.err.println("Transfer complete.");
}
public void operationProgressed(ChannelProgressiveFuture future, long progress, long total)
throws Exception {
if (total < 0) { // total unknown
System.err.println("Transfer progress: " + progress);
} else {
System.err.println("Transfer progress: " + progress + " / " + total);
}
}
});
} catch (FileNotFoundException fnfe) {
} finally {
if (raf != null)
raf.close();
}
}
I have checked SO Q1 and SO Q2
Fixed your problem by a little tweak in FilServerFileHandler:
public class FileServerHandler extends SimpleChannelInboundHandler<ByteBuf> {
#Override
protected void channelRead0(ChannelHandlerContext ctx, ByteBuf buff) throws Exception {
String filePathStr = buff.toString(CharsetUtil.UTF_8);
File file = new File(filePathStr);
RandomAccessFile raf = new RandomAccessFile(file, "r");
ChannelFuture sendFileFuture;
try {
sendFileFuture = ctx.writeAndFlush(new ChunkedNioFile(raf.getChannel()), ctx.newProgressivePromise());
sendFileFuture.addListener(new ChannelProgressiveFutureListener() {
public void operationComplete(ChannelProgressiveFuture future) throws Exception {
System.err.println("Transfer complete.");
if (raf != null) {
raf.close();
}
}
public void operationProgressed(ChannelProgressiveFuture future, long progress, long total)
throws Exception {
if (total < 0) { // total unknown
System.err.println("Transfer progress: " + progress);
} else {
System.err.println("Transfer progress: " + progress + " / " + total);
}
}
});
} catch (FileNotFoundException e) {
e.printStackTrace();
}
}
}
I moved raf.close() into operationComplete method.
The partial transportation is caused by the closing of raf during write operation. Note that ctx.writeAndFlush is an async call, so the raf.close() in finally block might be triggered before the write operation complete, especially when the size of the file is large enough.

Publisher MQTT client stops to work in Subscriber's Callback. (PAHO JAVA)

I'm trying to implement a fake broker (actually it is an mqtt publisher client in an mqtt subscriber's callback). There are 3 separated publisher clients which are publishing random numbers between 0 and 1. This fake broker just summarizes this random numbers, and publishes away to an other topic. (Maybe not in the right way, but for now it is ok) This solution is working but after a few incoming messages this broker stops to work. I Tried to debug it, but I found only ClassNotFound Exceptions... Here is my FakeBroker and it's Callback implementation.
public class FakeBroker implements Runnable{
public static final String BROKER_URL = "";
public static final String TOPIC_FAKE_A = "";
public static final String TOPIC_FAKE_B = "";
public static final String TOPIC_FAKE_C = "";
public static final String USER_NAME = "";
public static final char[] USER_PSW = "".toCharArray();
private MqttClient client;
private MqttConnectOptions options;
private SubscriberCallback callback;
public FakeBroker() {
options = new MqttConnectOptions();
options.setUserName(USER_NAME);
options.setPassword(USER_PSW);
options.setCleanSession(false);
callback = new SubscriberCallback();
try {
client = new MqttClient(BROKER_URL, MqttClient.generateClientId()+"-sub");
client.setCallback(callback);
} catch (MqttException e) {
e.printStackTrace();
System.exit(1);
}
}
public void start() {
try {
client.connect(options);
System.out.println("Fake Broker are connected to the cloud.");
client.subscribe(TOPIC_FAKE_A);
client.subscribe(TOPIC_FAKE_B);
client.subscribe(TOPIC_FAKE_C);
} catch (MqttException e) {
e.printStackTrace();
}
}
#Override
public void run() {
start();
}
}
And here is it's Callback
public class SubscriberCallback implements MqttCallback {
public static final String BROKER_URL = "";
public static final String TOPIC_FAKE_BROKER = "";
public static final String USER_NAME = "";
public static final char[] USER_PSW = "".toCharArray();
private MqttClient client;
private MqttConnectOptions options;
private int counter = 1;
private int result = 0;
public SubscriberCallback() {
try {
client = new MqttClient(BROKER_URL, "4-pub");
options = new MqttConnectOptions();
options.setPassword(USER_PSW);
options.setUserName(USER_NAME);
} catch (MqttException e) {
e.printStackTrace();
}
}
#Override
public void connectionLost(Throwable throwable) {
}
#Override
public void messageArrived(String topic, MqttMessage mqttMessage) throws Exception {
System.out.println("Message Arrived. Topic " + topic + " message: " +mqttMessage + " ---- Message Counter: " + counter);
int number = Integer.parseInt(mqttMessage.toString());
result += number;
if (counter%3 == 0) {
publishAway(new MqttMessage(Integer.toString(result).getBytes()));
result = 0;
}
incrementCounter();
}
private void publishAway(MqttMessage mqttMessage) throws MqttException {
client.connect(options);
final MqttTopic topicFakeBroker = client.getTopic(TOPIC_FAKE_BROKER);
topicFakeBroker.publish(mqttMessage);
client.disconnect();
System.out.println("Fake broker got the message " + mqttMessage + " and published away to" + topicFakeBroker.getName());
}
#Override
public void deliveryComplete(IMqttDeliveryToken iMqttDeliveryToken) {
}
private void incrementCounter() {
counter++;
}
}
Of course I use valid BROKER_URL and TOPICS but these informations are confidential. Thanks for Your answers! :)

Why file transfer is very very slow on netty (4.1.5-Final)

In my socket based server implementation using Netty 4.1.5-Final, when I transfer the video/image file into chunks (20K Chunk Size) I found that around 350+ ms is the difference between two chunks, not sure how to reduce that.
Here is my main server code :
public class MultimediaServer extends Thread implements IMultimediaServer, BeanFactoryAware {
/**
* Logger Instance
*/
protected Logger logger = Logger.getLogger(this.getClass());
#Autowired
private Properties props;
private RequestHandler requestHandler;
private BeanFactory beanFactory;
private int port;
private int maxConnection;
private int timeout = 30000;
private EventLoopGroup bossGroup = null;
private EventLoopGroup workerGroup = null;
#Override
public void run() {
try {
bossGroup = new NioEventLoopGroup();
workerGroup = new NioEventLoopGroup();
try {
ServerBootstrap serverBootstrap = new ServerBootstrap();
serverBootstrap.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class)
.childHandler(new ChannelInitializer<SocketChannel>() {
#Override
public void initChannel(SocketChannel ch) throws Exception {
ch.pipeline().addLast("frameDecoder", new LengthFieldBasedFrameDecoder(Integer.MAX_VALUE, 0, Const.PACKET_HEADER_LENGTH, 0, Const.PACKET_HEADER_LENGTH));
ch.pipeline().addLast("messageDecoder", new MessageDecoder());
ch.pipeline().addLast("frameEncoder", new ResponseHandler(Const.PACKET_HEADER_LENGTH));
ch.pipeline().addLast("bytesEncoder", new ByteArrayEncoder());
ch.pipeline().addLast(getHandler());
}
}).option(ChannelOption.SO_BACKLOG, maxConnection)
.option(ChannelOption.SO_KEEPALIVE, true)
.option(ChannelOption.TCP_NODELAY, true)
.option(ChannelOption.SO_REUSEADDR, true)
.option(ChannelOption.MAX_MESSAGES_PER_READ, Integer.MAX_VALUE)
.option(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeout)
.option(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
.option(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 128 * 1024)
.option(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.childOption(ChannelOption.SO_KEEPALIVE, true)
.childOption(ChannelOption.TCP_NODELAY, true)
.childOption(ChannelOption.SO_REUSEADDR, true)
.childOption(ChannelOption.CONNECT_TIMEOUT_MILLIS, timeout)
.childOption(ChannelOption.MAX_MESSAGES_PER_READ, Integer.MAX_VALUE)
.childOption(ChannelOption.ALLOCATOR, PooledByteBufAllocator.DEFAULT)
.childOption(ChannelOption.WRITE_BUFFER_LOW_WATER_MARK, 32 * 1024)
.childOption(ChannelOption.WRITE_BUFFER_HIGH_WATER_MARK, 128 * 1024);
// Bind and start to accept incoming connections.
ChannelFuture f = serverBootstrap.bind(this.port).sync();
// Wait until the server socket is closed.
// In this example, this does not happen, but you can do that to
// gracefully shut down your server.
f.channel().closeFuture().sync();
} finally {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
} catch (Throwable e) {
logger.error("ERROR : While starting the Konvx service ", e);
}
}
#Override
public void startServer(int port) {
super.setName("KonvxMultimediaServer : " + port);
this.port = port;
this.start();
}
#Override
public void stopServer() {
workerGroup.shutdownGracefully();
bossGroup.shutdownGracefully();
}
public RequestHandler getRequestHandler() {
return requestHandler;
}
public void setRequestHandler(RequestHandler requestHandler) {
this.requestHandler = requestHandler;
}
/**
* Return Request Handler
* #return RequestHandler
*/
private RequestHandler getHandler() {
return (RequestHandler) beanFactory.getBean("requestHandler", RequestHandler.class);
}
#Override
public void setBeanFactory(BeanFactory beanFactory) throws BeansException {
this.beanFactory = beanFactory;
}
#Value("${konvx.maxConnection}")
public void setMaxConnection(String maxConnection) {
this.maxConnection = Integer.parseInt(maxConnection);
}
#Value("${konvx.socket.timeout}")
public void setTimeout(String timeout) {
this.timeout = Integer.parseInt(timeout);
}
}
Here is the channel handler
public class RequestHandler extends SimpleChannelInboundHandler<KonvxMessage> {
/**
* Logger Instance
*/
private Logger logger = Logger.getLogger(this.getClass());
#Autowired
private Router router;
#Autowired
protected UserPool userPool;
#Override
public void channelRead0(ChannelHandlerContext ctx, KonvxMessage message) throws Exception {
Packet packet = new Packet();
packet.setCtx(ctx);
try {
if (message == null) {
logger.warn("Warning - message is empty");
return;
}
// Throw the exception if in-bound message does not magic cookie
if (!message.hasCookie()) {
logger.error("ERROR: Bad Cookie :" + message);
return;
}
// Checking if user is a valid/registered to our application
if (!userPool.isValidUser(message.getUserId())) {
packet.writeMessage(KonvxMessageFactory.getInvalidUserMessage(message));
return;
}
packet.setInMessage(message);
router.route(packet);
} catch (Exception e) {
logger.error("ERROR : Whie receiving/processing the in-bound message ", e);
packet.writeMessage(KonvxMessageFactory.getErrorMessage(message, KonvxError.UNKNOWN_ERROR));
}
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) throws Exception {
ctx.flush();
}
#Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) throws Exception {
logger.warn("WARN : Connection problem - " + cause.getMessage() + " Client address :" + ctx.channel().remoteAddress());
ctx.close();
return;
}
}
Here is the decoder for the packet -
public class MessageDecoder extends ByteToMessageDecoder {
/**
* Logger Instance
*/
protected Logger logger = Logger.getLogger(this.getClass());
#Override
protected void decode(ChannelHandlerContext ctx, ByteBuf in, List<Object> out) throws Exception {
// Parsing the object
String msg = null;
try {
byte[] bytes = new byte[in.readableBytes()];
if (bytes.length <= 0) {
logger.debug("Total readable bytes :" + in.readableBytes() + " exiting...");
return;
}
in.readBytes(bytes);
msg = new String(bytes, CharsetUtil.UTF_8);
// Return if message is empty
if (msg.isEmpty()) {
logger.warn("Message is empty...exiting...");
return;
}
KonvxMessage konvxMessage = JsonUtil.parseMessage(msg);
// Logging the incoming message
StringBuilder logMessage = new StringBuilder();
logMessage.append("Incoming message :").append(System.lineSeparator())
.append(konvxMessage)
.append(System.lineSeparator());
logger.info(logMessage.toString());
out.add(konvxMessage);
} catch (Throwable e) {
logger.error("ERROR : While receiving/parsing/decoding the message " + msg, e);
new Packet(ctx).writeMessage(KonvxMessageFactory.getParseFailedErrorMessage(msg));
}
}
}
Please help, how to fine tune the netty to improve the file transfer performance over socket between mobile device and my java server.

UDP Broadcasting not working

My Requirement is to broadcast a UDP message to all connected client from the server.
I am using netty 4.x.x for client server implementation. I am not I am not able to broadcast the message to all connected client.
Server code
public class UDPServer {
private static final Logger logger = LogManager.getLogger(UDPServer.class.getName());
private final ServerConfig conf;
/*
* Constructor of Server class
*/
public UDPServer(ServerConfig conf) {
this.conf = conf;
public void run() throws Exception {
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try {
EventLoopGroup udpGroup = new NioEventLoopGroup();
try {
Bootstrap b = new Bootstrap();
b.group(udpGroup)
.channel(NioDatagramChannel.class)
.handler(new ChannelInitializer<DatagramChannel>() {
#Override
protected void initChannel(DatagramChannel ch) throws Exception {
ChannelPipeline p = ch.pipeline();
p.addLast("udpDecoder", new MessageToMessageDecoder<DatagramPacket>() {
#Override
protected void decode(ChannelHandlerContext ctx, DatagramPacket msg, List<Object> out) throws Exception {
out.add(msg.content());
msg.retain();
}
});
p.addLast("handler", new UDServerPHandler());
p.addLast("frameDecoder", new LengthFieldBasedFrameDecoder(65535, 2, 2));
}
})
.option(ChannelOption.SO_BROADCAST, true);
// b.bind(9998).sync().channel().closeFuture().sync();
b.bind(9998).channel().closeFuture().sync();
} finally {
udpGroup.shutdownGracefully();
}
}
/**
* Main method to run the class
*
* #param args
* #throws Exception
*/
public static void main(String[] args) throws Exception {
new UDPServer(new ServerConfig()).run();
}
}
Server Handler
package com.quantotrade.shamse.udp;
import io.netty.buffer.Unpooled;
import io.netty.channel.ChannelHandlerContext;
import io.netty.channel.SimpleChannelInboundHandler;
import io.netty.channel.socket.DatagramPacket;
import io.netty.util.CharsetUtil;
import java.util.Random;
public class UDServerPHandler extends SimpleChannelInboundHandler<DatagramPacket> {
private static final Random random = new Random();
private static final String[] quotes = {
"ONE",
"TWO",
"THREE",
"FOUR",
};
private static String nextQuote() {
int quoteId;
synchronized (random) {
quoteId = random.nextInt(quotes.length);
}
return quotes[quoteId];
}
#Override
protected void channelRead0(ChannelHandlerContext ctx, DatagramPacket packet) throws Exception {
//System.err.println(packet.content().toString(CharsetUtil.UTF_8));
//if ("Quote".equals(packet.content().toString(CharsetUtil.UTF_8))) {
// ctx.write(new DatagramPacket(Unpooled.copiedBuffer("Quote" + nextQuote(), CharsetUtil.UTF_8), packet.sender()));
ctx.channel().writeAndFlush(new DatagramPacket(Unpooled.copiedBuffer("Quote" + nextQuote(), CharsetUtil.UTF_8), packet.sender()));
//}
}
#Override
public void channelReadComplete(ChannelHandlerContext ctx) {
ctx.flush();
}
}
Client Code
public class UDPClientHandler extends SimpleChannelInboundHandler<DatagramPacket> {
//private final EventExecutor executor = new NioEventLoopGroup();
//private final ChannelGroup channels = new DefaultChannelGroup();
#Override
protected void channelRead0(ChannelHandlerContext ctx, DatagramPacket msg) throws Exception {
String response = msg.content().toString(CharsetUtil.UTF_8);
if (response.startsWith("Quote")) {
System.out.println("Quote:" + response.substring(5));
//ctx.writeAndFlush(new DatagramPacket(Unpooled.copiedBuffer("Quote", CharsetUtil.UTF_8), msg.sender()));
// channels.add(ctx.channel());
}
}
Please Help

netty4:How to listen on multiple ports on a java process

I'm trying to listen for connections on two different ports
I start 2 Thread in a java main method,every Thread bind a port with netty4,but can't
listener success!
this is my code,the port 3333 is ok,but 1234 is not ok,it looks like 3333 is blocking!
public class ObjectServer
{
private static final Logger logger = LoggerFactory.getLogger(ObjectServer.class);
private String ip;
private int port;
public ObjectServer(int port)
{
this.port = port;
}
public void run(final ChannelInboundHandlerAdapter handler) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try
{
ServerBootstrap server = new ServerBootstrap();
server.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<SocketChannel>()
{
#Override
public void initChannel(SocketChannel ch) throws Exception
{
ch.pipeline().addLast(new ObjectEncoder(), new ObjectDecoder(ClassResolvers.cacheDisabled(null)), handler);
}
});
server.bind(port).sync().channel().closeFuture().sync();
}
catch (Exception e)
{
logger.error("开启监听失败!端口[" + port + "]", e);
throw e;
}
finally
{
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
public class SocketServer
{
private static final Logger logger = LoggerFactory.getLogger(SocketServer.class);
private static final StringDecoder DECODER = new StringDecoder();
private static final StringEncoder ENCODER = new StringEncoder();
private int port;
public SocketServer(int port)
{
this.port = port;
}
public void run(final ChannelInboundHandlerAdapter handler) throws Exception
{
EventLoopGroup bossGroup = new NioEventLoopGroup();
EventLoopGroup workerGroup = new NioEventLoopGroup();
try
{
ServerBootstrap b = new ServerBootstrap();
b.group(bossGroup, workerGroup).channel(NioServerSocketChannel.class).childHandler(new ChannelInitializer<SocketChannel>()
{
#Override
public void initChannel(SocketChannel ch) throws Exception
{
ChannelPipeline pipeline = ch.pipeline();
// Add the text line codec combination first,
pipeline.addLast("framer", new DelimiterBasedFrameDecoder(8192, Delimiters.lineDelimiter()));
// the encoder and decoder are static as these are
// sharable
pipeline.addLast("encoder", ENCODER);
pipeline.addLast("decoder", DECODER);
// and then business logic.
pipeline.addLast("handler", handler);
}
});
b.bind(port).sync().channel().closeFuture().sync();
}
catch (Exception e)
{
logger.error("开启监听失败!端口[" + port + "]", e);
throw e;
}
finally
{
bossGroup.shutdownGracefully();
workerGroup.shutdownGracefully();
}
}
}
public class Test
{
public static void main(String[] args) throws Exception
{
Thread1 thread1 = new Thread1();
Thread2 thread2 = new Thread2();
thread2.start();
thread1.start();
new SocketClient("192.168.16.52", 3333).run(new TestHandler4("test4"));
new ObjectClient("192.168.16.52", 1234).run(new TestHandler3("test3"));
}
#Sharable
static class TestHandler1 extends ChannelInboundHandlerAdapter
{
#Override
public void channelRead(ChannelHandlerContext ctx, Object msg) throws Exception
{
System.out.println("1234" + msg);
}
}
static class Thread1 extends Thread
{
#Override
public void run()
{
try
{
new ObjectServer(1234).run(new TestHandler1());
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
static class Thread2 extends Thread
{
#Override
public void run()
{
try
{
new SocketServer(3333).run(new TestHandler2());
}
catch (Exception e)
{
e.printStackTrace();
}
}
}
#Sharable
static class TestHandler2 extends SimpleChannelInboundHandler<String>
{
#Override
public void channelRead0(ChannelHandlerContext ctx, String msg) throws Exception
{
System.out.println("3333" + msg);
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
System.out.println("sssssssssssssssss");
}
}
#Sharable
static class TestHandler3 extends ChannelInboundHandlerAdapter
{
private String msg;
public TestHandler3(String msg)
{
this.msg = msg;
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.writeAndFlush(msg);
}
}
#Sharable
static class TestHandler4 extends SimpleChannelInboundHandler<String>
{
private String msg;
public TestHandler4(String msg)
{
this.msg = msg;
}
#Override
public void channelActive(ChannelHandlerContext ctx) throws Exception
{
ctx.writeAndFlush(msg);
}
#Override
protected void channelRead0(ChannelHandlerContext arg0, String arg1)throws Exception
{
}
}
}
In your run() implementation, you do this:
server.bind(port).sync().channel().closeFuture().sync();
.. which will block until the server socket is closed. Because you do not close the server socket, it will never return. Therefore, only the first server socket will be bound.
What you probably want is just bind and return rather than waiting for the server sockets closed.

Categories