How can I get Spring's WebFlux to emit a response - java
When I create a Spring-webflux webclient I'm not able to get it to go into the subscribe or doOnNext callback. I keep getting this error: java.lang.IllegalStateException: The underlying HTTP client completed without emitting a response.
I've tried changing the baseurl during the creation of the webcilent. Tried a different uri after creating the webclient.
WebClient build = WebClient.builder().baseUrl("https://www.google.com").build();
Mono<String> t = build.get().uri("/{hi}", "hi").retrieve().bodyToMono(String.class).doOnNext(r->{
System.out.println("aoeuaotnseuhte");
});
build.get().uri("https://www.google.com").exchange().subscribe(r->{
System.out.println("aotnsehu this was not a triumph");
});
client.get().uri("/{test}", "test").retrieve().bodyToMono(String.class).subscribe(resp->{
System.out.println("hai");
});
client.get().uri("https://github.com/square/okhttp/tree/master/mockwebserver").retrieve().bodyToMono(String.class).subscribe(resp ->{
System.out.println("hi");
});
I'm expecting a breakpoint to stop at any of those println statements, I've never been able to get a breakpoint to stop there.
environment details:
java 1.8.0_144
spring boot 2.1.5
console output
"C:\Program Files\Java\jdk1.8.0_144\bin\java.exe" -agentlib:jdwp=transport=dt_socket,address=127.0.0.1:55458,suspend=y,server=n -ea -Didea.test.cyclic.buffer.size=1048576 -javaagent:C:\Users\Sam\.IdeaIC2019.1\system\captureAgent\debugger-agent.jar -Dfile.encoding=UTF-8 -classpath "C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2019.1\lib\idea_rt.jar;C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2019.1\plugins\junit\lib\junit-rt.jar;C:\Program Files\JetBrains\IntelliJ IDEA Community Edition 2019.1\plugins\junit\lib\junit5-rt.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_144\jre\lib\rt.jar;C:\Users\Sam\IdeaProjects\wondrous-magic\target\test-classes;C:\Users\Sam\IdeaProjects\wondrous-magic\target\classes;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter-webflux\2.1.5.RELEASE\spring-boot-starter-webflux-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter\2.1.5.RELEASE\spring-boot-starter-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot\2.1.5.RELEASE\spring-boot-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-context\5.1.7.RELEASE\spring-context-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-aop\5.1.7.RELEASE\spring-aop-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-expression\5.1.7.RELEASE\spring-expression-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-autoconfigure\2.1.5.RELEASE\spring-boot-autoconfigure-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter-logging\2.1.5.RELEASE\spring-boot-starter-logging-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\ch\qos\logback\logback-classic\1.2.3\logback-classic-1.2.3.jar;C:\Users\Sam\.m2\repository\ch\qos\logback\logback-core\1.2.3\logback-core-1.2.3.jar;C:\Users\Sam\.m2\repository\org\apache\logging\log4j\log4j-to-slf4j\2.11.2\log4j-to-slf4j-2.11.2.jar;C:\Users\Sam\.m2\repository\org\apache\logging\log4j\log4j-api\2.11.2\log4j-api-2.11.2.jar;C:\Users\Sam\.m2\repository\org\slf4j\jul-to-slf4j\1.7.26\jul-to-slf4j-1.7.26.jar;C:\Users\Sam\.m2\repository\javax\annotation\javax.annotation-api\1.3.2\javax.annotation-api-1.3.2.jar;C:\Users\Sam\.m2\repository\org\yaml\snakeyaml\1.23\snakeyaml-1.23.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter-json\2.1.5.RELEASE\spring-boot-starter-json-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\core\jackson-databind\2.9.8\jackson-databind-2.9.8.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\core\jackson-annotations\2.9.0\jackson-annotations-2.9.0.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\core\jackson-core\2.9.8\jackson-core-2.9.8.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\datatype\jackson-datatype-jdk8\2.9.8\jackson-datatype-jdk8-2.9.8.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\datatype\jackson-datatype-jsr310\2.9.8\jackson-datatype-jsr310-2.9.8.jar;C:\Users\Sam\.m2\repository\com\fasterxml\jackson\module\jackson-module-parameter-names\2.9.8\jackson-module-parameter-names-2.9.8.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter-reactor-netty\2.1.5.RELEASE\spring-boot-starter-reactor-netty-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\io\projectreactor\netty\reactor-netty\0.8.8.RELEASE\reactor-netty-0.8.8.RELEASE.jar;C:\Users\Sam\.m2\repository\io\netty\netty-codec-http\4.1.36.Final\netty-codec-http-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-common\4.1.36.Final\netty-common-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-buffer\4.1.36.Final\netty-buffer-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-transport\4.1.36.Final\netty-transport-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-resolver\4.1.36.Final\netty-resolver-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-codec\4.1.36.Final\netty-codec-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-codec-http2\4.1.36.Final\netty-codec-http2-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-handler\4.1.36.Final\netty-handler-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-handler-proxy\4.1.36.Final\netty-handler-proxy-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-codec-socks\4.1.36.Final\netty-codec-socks-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\io\netty\netty-transport-native-epoll\4.1.36.Final\netty-transport-native-epoll-4.1.36.Final-linux-x86_64.jar;C:\Users\Sam\.m2\repository\io\netty\netty-transport-native-unix-common\4.1.36.Final\netty-transport-native-unix-common-4.1.36.Final.jar;C:\Users\Sam\.m2\repository\org\glassfish\javax.el\3.0.0\javax.el-3.0.0.jar;C:\Users\Sam\.m2\repository\org\hibernate\validator\hibernate-validator\6.0.16.Final\hibernate-validator-6.0.16.Final.jar;C:\Users\Sam\.m2\repository\javax\validation\validation-api\2.0.1.Final\validation-api-2.0.1.Final.jar;C:\Users\Sam\.m2\repository\org\jboss\logging\jboss-logging\3.3.2.Final\jboss-logging-3.3.2.Final.jar;C:\Users\Sam\.m2\repository\com\fasterxml\classmate\1.4.0\classmate-1.4.0.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-web\5.1.7.RELEASE\spring-web-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-beans\5.1.7.RELEASE\spring-beans-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-webflux\5.1.7.RELEASE\spring-webflux-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\io\projectreactor\reactor-core\3.2.9.RELEASE\reactor-core-3.2.9.RELEASE.jar;C:\Users\Sam\.m2\repository\org\reactivestreams\reactive-streams\1.0.2\reactive-streams-1.0.2.jar;C:\Users\Sam\.m2\repository\org\synchronoss\cloud\nio-multipart-parser\1.1.0\nio-multipart-parser-1.1.0.jar;C:\Users\Sam\.m2\repository\org\synchronoss\cloud\nio-stream-storage\1.1.3\nio-stream-storage-1.1.3.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-starter-test\2.1.5.RELEASE\spring-boot-starter-test-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-test\2.1.5.RELEASE\spring-boot-test-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\boot\spring-boot-test-autoconfigure\2.1.5.RELEASE\spring-boot-test-autoconfigure-2.1.5.RELEASE.jar;C:\Users\Sam\.m2\repository\junit\junit\4.12\junit-4.12.jar;C:\Users\Sam\.m2\repository\org\assertj\assertj-core\3.11.1\assertj-core-3.11.1.jar;C:\Users\Sam\.m2\repository\org\mockito\mockito-core\2.23.4\mockito-core-2.23.4.jar;C:\Users\Sam\.m2\repository\net\bytebuddy\byte-buddy\1.9.12\byte-buddy-1.9.12.jar;C:\Users\Sam\.m2\repository\net\bytebuddy\byte-buddy-agent\1.9.12\byte-buddy-agent-1.9.12.jar;C:\Users\Sam\.m2\repository\org\objenesis\objenesis\2.6\objenesis-2.6.jar;C:\Users\Sam\.m2\repository\org\hamcrest\hamcrest-core\1.3\hamcrest-core-1.3.jar;C:\Users\Sam\.m2\repository\org\hamcrest\hamcrest-library\1.3\hamcrest-library-1.3.jar;C:\Users\Sam\.m2\repository\org\skyscreamer\jsonassert\1.5.0\jsonassert-1.5.0.jar;C:\Users\Sam\.m2\repository\com\vaadin\external\google\android-json\0.0.20131108.vaadin1\android-json-0.0.20131108.vaadin1.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-core\5.1.7.RELEASE\spring-core-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-jcl\5.1.7.RELEASE\spring-jcl-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\springframework\spring-test\5.1.7.RELEASE\spring-test-5.1.7.RELEASE.jar;C:\Users\Sam\.m2\repository\org\xmlunit\xmlunit-core\2.6.2\xmlunit-core-2.6.2.jar;C:\Users\Sam\.m2\repository\com\jayway\jsonpath\json-path\2.4.0\json-path-2.4.0.jar;C:\Users\Sam\.m2\repository\net\minidev\json-smart\2.3\json-smart-2.3.jar;C:\Users\Sam\.m2\repository\net\minidev\accessors-smart\1.2\accessors-smart-1.2.jar;C:\Users\Sam\.m2\repository\org\ow2\asm\asm\5.0.4\asm-5.0.4.jar;C:\Users\Sam\.m2\repository\org\slf4j\slf4j-api\1.7.26\slf4j-api-1.7.26.jar;C:\Users\Sam\.m2\repository\com\squareup\okhttp3\mockwebserver\3.14.1\mockwebserver-3.14.1.jar;C:\Users\Sam\.m2\repository\com\squareup\okhttp3\okhttp\3.14.1\okhttp-3.14.1.jar;C:\Users\Sam\.m2\repository\com\squareup\okio\okio\1.17.2\okio-1.17.2.jar;C:\Users\Sam\.m2\repository\org\jsoup\jsoup\1.11.3\jsoup-1.11.3.jar;C:\Users\Sam\.m2\repository\commons-io\commons-io\2.6\commons-io-2.6.jar" com.intellij.rt.execution.junit.JUnitStarter -ideVersion5 -junit4 com.spry.magic.service.ApiServiceTest,test
Connected to the target VM, address: '127.0.0.1:55458', transport: 'socket'
23:18:08.515 [main] DEBUG io.netty.util.internal.logging.InternalLoggerFactory - Using SLF4J as the default logging framework
23:18:08.532 [main] DEBUG io.netty.util.internal.PlatformDependent - Platform: Windows
23:18:08.534 [main] DEBUG io.netty.util.internal.PlatformDependent0 - -Dio.netty.noUnsafe: false
23:18:08.534 [main] DEBUG io.netty.util.internal.PlatformDependent0 - Java version: 8
23:18:08.535 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available
23:18:08.536 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available
23:18:08.536 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Buffer.address: available
23:18:08.536 [main] DEBUG io.netty.util.internal.PlatformDependent0 - direct buffer constructor: available
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent0 - jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable prior to Java9
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): available
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent - sun.misc.Unsafe: available
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\Sam\AppData\Local\Temp (java.io.tmpdir)
23:18:08.537 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model)
23:18:08.539 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.maxDirectMemory: 7621050368 bytes
23:18:08.539 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.uninitializedArrayAllocationThreshold: -1
23:18:08.539 [main] DEBUG io.netty.util.internal.CleanerJava6 - java.nio.ByteBuffer.cleaner(): available
23:18:08.540 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false
23:18:08.541 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv4Stack: false
23:18:08.541 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv6Addresses: false
23:18:08.620 [main] DEBUG io.netty.util.NetUtil - Loopback interface: lo (Software Loopback Interface 1, 127.0.0.1)
23:18:08.620 [main] DEBUG io.netty.util.NetUtil - Failed to get SOMAXCONN from sysctl and file \proc\sys\net\core\somaxconn. Default: 200
23:18:08.623 [main] DEBUG reactor.util.Loggers$LoggerFactory - Using Slf4j logging framework
23:18:08.644 [main] DEBUG reactor.netty.tcp.TcpResources - [http] resources will use the default LoopResources: DefaultLoopResources {prefix=reactor-http, daemon=true, selectCount=8, workerCount=8}
23:18:08.644 [main] DEBUG reactor.netty.tcp.TcpResources - [http] resources will use the default ConnectionProvider: PooledConnectionProvider {name=http, poolFactory=reactor.netty.resources.ConnectionProvider$$Lambda$69/511832416#1acaf3d}
23:18:08.758 [main] DEBUG reactor.netty.resources.DefaultLoopEpoll - Default Epoll support : false
23:18:08.758 [main] DEBUG reactor.netty.resources.DefaultLoopKQueue - Default KQueue support : false
23:18:08.763 [main] DEBUG io.netty.channel.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 16
23:18:08.807 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024
23:18:08.807 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096
23:18:08.818 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false
23:18:08.818 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512
23:18:08.830 [main] DEBUG io.netty.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available
23:18:51.526 [main] DEBUG org.springframework.web.reactive.function.client.ExchangeFunctions - [4de41af9] HTTP GET https://www.google.com
23:18:51.538 [main] DEBUG io.netty.handler.ssl.OpenSsl - netty-tcnative not in the classpath; OpenSslEngine will be unavailable.
23:18:51.784 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default protocols (JDK): [TLSv1.2, TLSv1.1, TLSv1]
23:18:51.784 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default cipher suites (JDK): [TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_128_GCM_SHA256, TLS_RSA_WITH_AES_128_CBC_SHA]
23:18:51.814 [main] DEBUG reactor.netty.resources.PooledConnectionProvider - Creating new client pool [http] for www.google.com:443
23:18:51.825 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.processId: 7664 (auto-detected)
23:18:51.908 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.machineId: 30:5a:3a:ff:fe:02:74:f6 (auto-detected)
23:18:51.918 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.level: simple
23:18:51.918 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.targetRecords: 4
23:18:51.968 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 16
23:18:51.968 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 16
23:18:51.968 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimIntervalMillis: 0
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.useCacheForAllThreads: true
23:18:51.969 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023
23:18:51.977 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: pooled
23:18:51.977 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 0
23:18:51.977 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384
Disconnected from the target VM, address: '127.0.0.1:55458', transport: 'socket'
Process finished with exit code 0
stack trace for java.lang.IllegalStateException: The underlying HTTP client completed without emitting a response.
org.springframework.web.reactive.function.client.DefaultWebClient.<clinit>(DefaultWebClient.java:70)
org.springframework.web.reactive.function.client.DefaultWebClientBuilder.build(DefaultWebClientBuilder.java:212)
Edit: I was using the webclient incorrectly, the lambdas weren't getting executed because the program was exiting before a response was returned. Adding a block, like in chas spenlau's answer, fixed this for me. chas spenlau's example worked for me, and the code examples above worked for me when I added the block.
Edit 2: I also needed to subscribe to the response. Using block subscribes and blocks, I could fix these statements by adding .subscribe to the end of each one. See Thomas's comment for a better explanation.
I'm not sure what exactly you're trying to do, so sorry if this isn't helpful. But when you click to set your breakpoint it should ask if you want it to be on the line or on the lambda, is it asking you that? if so choose lambda.
I was able to place my breakpoint on that line in the following code and go inside the doOnNext
webClient.get()
.retrieve()
.bodyToMono(String.class)
.doOnNext(myString -> {System.out.println(myString);})
.block();
Related
Quarkus upgrade vertx dns resolver ignoring kubernetes dns
I have upgraded the version of quarkus to 2.8.0.CR1 My quarkus apps are using a mix of camel, mutiny, resteasy, mongo, pubsub and it seems to have been reproduced on all. It seems like there was an upgrade to vertx in the mean time that introduced an alternative dns resolver. The issue however is that when I try to now deploy my application through my kubernetes cluster I am getting dns resolution issues. If i try to launch it as is, I get this failure when trying to connect to my spring-cloud-config server (on init). The domain would not resolve since it' susing the google public dns. May 06, 2022 7:42:53 AM io.netty.resolver.dns.DefaultDnsServerAddressStreamProvider WARN: Default DNS servers: [/8.8.8.8:53, /8.8.4.4:53] (Google Public DNS as a fallback) May 06, 2022 7:42:55 AM io.quarkus.runtime.ApplicationLifecycleManager run ERROR: Failed to start application (with profile cloud) java.net.UnknownHostException: Failed to resolve 'config-server.servers.svc.cluster.local'. Exceeded max queries per resolve 4 at io.netty.resolver.dns.DnsResolveContext.finishResolve(DnsResolveContext.java:1047) at io.netty.resolver.dns.DnsResolveContext.tryToFinishResolve(DnsResolveContext.java:1000) at io.netty.resolver.dns.DnsResolveContext.query(DnsResolveContext.java:418) at io.netty.resolver.dns.DnsResolveContext.onResponse(DnsResolveContext.java:629) at io.netty.resolver.dns.DnsResolveContext.access$400(DnsResolveContext.java:66) at io.netty.resolver.dns.DnsResolveContext$2.operationComplete(DnsResolveContext.java:462) at io.netty.util.concurrent.DefaultPromise.notifyListener0(DefaultPromise.java:578) at io.netty.util.concurrent.DefaultPromise.notifyListeners0(DefaultPromise.java:571) at io.netty.util.concurrent.DefaultPromise.notifyListenersNow(DefaultPromise.java:550) at io.netty.util.concurrent.DefaultPromise.notifyListeners(DefaultPromise.java:491) at io.netty.util.concurrent.DefaultPromise.setValue0(DefaultPromise.java:616) at io.netty.util.concurrent.DefaultPromise.setSuccess0(DefaultPromise.java:605) at io.netty.util.concurrent.DefaultPromise.trySuccess(DefaultPromise.java:104) at io.netty.resolver.dns.DnsQueryContext.trySuccess(DnsQueryContext.java:216) at io.netty.resolver.dns.DnsQueryContext.finish(DnsQueryContext.java:208) at io.netty.resolver.dns.DnsNameResolver$DnsResponseHandler.channelRead(DnsNameResolver.java:1314) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:357) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1410) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:379) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:365) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:919) at io.netty.channel.nio.AbstractNioMessageChannel$NioMessageUnsafe.read(AbstractNioMessageChannel.java:97) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:722) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:658) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:584) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:496) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:986) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:833) If i set -Dvertx.disableDnsResolver=true however it seems like it doesn't even connect to my server instead (no request received on server side and no config being read) 2022-05-06 06:54:24,160 DEBUG [io.net.uti.ResourceLeakDetector] (main) -Dio.netty.leakDetection.level: simple 2022-05-06 06:54:24,161 DEBUG [io.net.uti.ResourceLeakDetector] (main) -Dio.netty.leakDetection.targetRecords: 4 2022-05-06 06:54:24,172 DEBUG [io.net.cha.MultithreadEventLoopGroup] (main) -Dio.netty.eventLoopThreads: 2 2022-05-06 06:54:24,181 DEBUG [io.net.uti.int.InternalThreadLocalMap] (main) -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 2022-05-06 06:54:24,181 DEBUG [io.net.uti.int.InternalThreadLocalMap] (main) -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 2022-05-06 06:54:24,184 DEBUG [io.net.cha.nio.NioEventLoop] (main) -Dio.netty.noKeySetOptimization: false 2022-05-06 06:54:24,184 DEBUG [io.net.cha.nio.NioEventLoop] (main) -Dio.netty.selectorAutoRebuildThreshold: 512 2022-05-06 06:54:24,188 DEBUG [io.net.uti.int.PlatformDependent] (main) org.jctools-core.MpscChunkedArrayQueue: available 2022-05-06 06:54:24,478 DEBUG [io.qua.spr.clo.con.cli.run.VertxSpringCloudConfigGateway] (main) Attempting to read configuration from 'http://config-server.servers.svc.cluster.local:8888/my-camel-service/cloud'. 2022-05-06 06:54:24,560 DEBUG [io.net.uti.NetUtil] (main) -Djava.net.preferIPv4Stack: false 2022-05-06 06:54:24,560 DEBUG [io.net.uti.NetUtil] (main) -Djava.net.preferIPv6Addresses: false 2022-05-06 06:54:24,561 DEBUG [io.net.uti.NetUtilInitializations] (main) Loopback interface: lo (lo, 127.0.0.1) 2022-05-06 06:54:24,562 DEBUG [io.net.uti.NetUtil] (main) /proc/sys/net/core/somaxconn: 1024 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.numHeapArenas: 2 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.numDirectArenas: 2 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.pageSize: 8192 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.maxOrder: 3 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.chunkSize: 65536 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.smallCacheSize: 256 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.normalCacheSize: 64 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.maxCachedBufferCapacity: 32768 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.cacheTrimInterval: 8192 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.cacheTrimIntervalMillis: 0 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.useCacheForAllThreads: true 2022-05-06 06:54:24,779 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-1) -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023 2022-05-06 06:54:24,859 DEBUG [io.net.buf.ByteBufUtil] (vert.x-eventloop-thread-1) -Dio.netty.allocator.type: pooled 2022-05-06 06:54:24,859 DEBUG [io.net.buf.ByteBufUtil] (vert.x-eventloop-thread-1) -Dio.netty.threadLocalDirectBufferSize: 0 2022-05-06 06:54:24,859 DEBUG [io.net.buf.ByteBufUtil] (vert.x-eventloop-thread-1) -Dio.netty.maxThreadLocalCharBufferSize: 16384 2022-05-06 06:54:25,056 DEBUG [io.net.buf.AbstractByteBuf] (vert.x-eventloop-thread-1) -Dio.netty.buffer.checkAccessible: true 2022-05-06 06:54:25,056 DEBUG [io.net.buf.AbstractByteBuf] (vert.x-eventloop-thread-1) -Dio.netty.buffer.checkBounds: true 2022-05-06 06:54:25,056 DEBUG [io.net.uti.ResourceLeakDetectorFactory] (vert.x-eventloop-thread-1) Loaded default ResourceLeakDetector: io.netty.util.ResourceLeakDetector#7e719269 2022-05-06 06:54:25,180 DEBUG [io.net.uti.Recycler] (vert.x-eventloop-thread-1) -Dio.netty.recycler.maxCapacityPerThread: 4096 2022-05-06 06:54:25,181 DEBUG [io.net.uti.Recycler] (vert.x-eventloop-thread-1) -Dio.netty.recycler.ratio: 8 2022-05-06 06:54:25,181 DEBUG [io.net.uti.Recycler] (vert.x-eventloop-thread-1) -Dio.netty.recycler.chunkSize: 32 2022-05-06 06:54:25,181 DEBUG [io.net.uti.Recycler] (vert.x-eventloop-thread-1) -Dio.netty.recycler.blocking: false 2022-05-06 06:54:26,563 DEBUG [io.net.buf.PoolThreadCache] (vert.x-eventloop-thread-1) Freed 2 thread-local buffer(s) from thread: vert.x-eventloop-thread-1 If i startup an application with the previous version of quarkus I get: 2022-05-06 07:21:16,937 DEBUG [io.net.uti.int.PlatformDependent] (main) org.jctools-core.MpscChunkedArrayQueue: available 2022-05-06 07:21:16,953 DEBUG [io.net.res.dns.DefaultDnsServerAddressStreamProvider] (main) Default DNS servers: [/10.24.0.10:53] (sun.net.dns.ResolverConfiguration) 2022-05-06 07:21:16,956 DEBUG [io.net.uti.NetUtil] (main) -Djava.net.preferIPv4Stack: false 2022-05-06 07:21:16,956 DEBUG [io.net.uti.NetUtil] (main) -Djava.net.preferIPv6Addresses: false 2022-05-06 07:21:16,957 DEBUG [io.net.uti.NetUtilInitializations] (main) Loopback interface: lo (lo, 127.0.0.1) 2022-05-06 07:21:16,957 DEBUG [io.net.uti.NetUtil] (main) /proc/sys/net/core/somaxconn: 1024 2022-05-06 07:21:17,248 DEBUG [io.qua.spr.clo.con.cli.run.VertxSpringCloudConfigGateway] (main) Attempting to read configuration from 'http://config-server.servers.svc.cluster.local:8888/my-camel-gateway/cloud'. 2022-05-06 07:21:17,541 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-0) -Dio.netty.allocator.numHeapArenas: 2 2022-05-06 07:21:17,541 DEBUG [io.net.buf.PooledByteBufAllocator] (vert.x-eventloop-thread-0) -Dio.netty.allocator.numDirectArenas: 2 Is there a way to make it use my local dns configuration properly?
I set quarkus.naming.enableJndi=true and didn't use -Dvertx.disableDnsResolver and my app started using the intended dns again
Visualizing training progresss in deeplearning4j throws error when starting server
I was following this tutorial trying to visualize my models training progress: https://deeplearning4j.konduit.ai/tuning-and-training/visualization The simple code for the server setup is: UIServer uiServer = UIServer.getInstance(); StatsStorage statsStorage = new InMemoryStatsStorage(); uiServer.attach(statsStorage); model.setListeners(new StatsListener(statsStorage)); Then you should be able to see it on http://localhost:9000/train/overview However I get following error when the server is starting and am unsure what to make of it: 12:39:05.063 [main] DEBUG org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server is starting. 12:39:05.084 [main] DEBUG io.netty.util.internal.logging.InternalLoggerFactory - Using SLF4J as the default logging framework 12:39:05.085 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.level: simple 12:39:05.085 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.targetRecords: 4 12:39:05.093 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 12:39:05.093 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 12:39:05.098 [main] DEBUG io.netty.channel.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 16 12:39:05.114 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false 12:39:05.114 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512 12:39:05.123 [main] DEBUG io.netty.util.internal.PlatformDependent - Platform: Windows 12:39:05.124 [main] DEBUG io.netty.util.internal.PlatformDependent0 - -Dio.netty.noUnsafe: false 12:39:05.124 [main] DEBUG io.netty.util.internal.PlatformDependent0 - Java version: 14 12:39:05.125 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available 12:39:05.125 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available 12:39:05.125 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Buffer.address: available 12:39:05.128 [main] DEBUG io.netty.util.internal.PlatformDependent0 - direct buffer constructor: unavailable java.lang.UnsupportedOperationException: Reflective setAccessible(true) disabled at io.netty.util.internal.ReflectionUtil.trySetAccessible(ReflectionUtil.java:31) at io.netty.util.internal.PlatformDependent0$4.run(PlatformDependent0.java:225) at java.base/java.security.AccessController.doPrivileged(AccessController.java:312) at io.netty.util.internal.PlatformDependent0.<clinit>(PlatformDependent0.java:219) at io.netty.util.internal.PlatformDependent.isAndroid(PlatformDependent.java:289) at io.netty.util.internal.PlatformDependent.<clinit>(PlatformDependent.java:92) at io.netty.channel.nio.NioEventLoop.newTaskQueue0(NioEventLoop.java:279) at io.netty.channel.nio.NioEventLoop.newTaskQueue(NioEventLoop.java:150) at io.netty.channel.nio.NioEventLoop.<init>(NioEventLoop.java:138) at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:146) at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:37) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:84) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:58) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:47) at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:59) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:86) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:81) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:68) at io.vertx.core.net.impl.transport.Transport.eventLoopGroup(Transport.java:153) at io.vertx.core.impl.VertxImpl.<init>(VertxImpl.java:143) at io.vertx.core.impl.VertxImpl.vertx(VertxImpl.java:92) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:40) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:32) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:27) at io.vertx.core.Vertx.vertx(Vertx.java:75) at org.deeplearning4j.ui.VertxUIServer.deploy(VertxUIServer.java:188) at org.deeplearning4j.ui.VertxUIServer.deploy(VertxUIServer.java:159) at org.deeplearning4j.ui.VertxUIServer.getInstance(VertxUIServer.java:130) at org.deeplearning4j.ui.VertxUIServer.getInstance(VertxUIServer.java:95) at org.deeplearning4j.ui.api.UIServer.getInstance(UIServer.java:70) at org.deeplearning4j.ui.api.UIServer.getInstance(UIServer.java:50) at Environment.<init>(Environment.java:106) at LSTMMain.main(LSTMMain.java:40) 12:39:05.129 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true 12:39:05.129 [main] DEBUG io.netty.util.internal.PlatformDependent0 - jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable java.lang.IllegalAccessException: class io.netty.util.internal.PlatformDependent0$6 cannot access class jdk.internal.misc.Unsafe (in module java.base) because module java.base does not export jdk.internal.misc to unnamed module #6134ac4a at java.base/jdk.internal.reflect.Reflection.newIllegalAccessException(Reflection.java:376) at java.base/java.lang.reflect.AccessibleObject.checkAccess(AccessibleObject.java:647) at java.base/java.lang.reflect.Method.invoke(Method.java:556) at io.netty.util.internal.PlatformDependent0$6.run(PlatformDependent0.java:335) at java.base/java.security.AccessController.doPrivileged(AccessController.java:312) at io.netty.util.internal.PlatformDependent0.<clinit>(PlatformDependent0.java:326) at io.netty.util.internal.PlatformDependent.isAndroid(PlatformDependent.java:289) at io.netty.util.internal.PlatformDependent.<clinit>(PlatformDependent.java:92) at io.netty.channel.nio.NioEventLoop.newTaskQueue0(NioEventLoop.java:279) at io.netty.channel.nio.NioEventLoop.newTaskQueue(NioEventLoop.java:150) at io.netty.channel.nio.NioEventLoop.<init>(NioEventLoop.java:138) at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:146) at io.netty.channel.nio.NioEventLoopGroup.newChild(NioEventLoopGroup.java:37) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:84) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:58) at io.netty.util.concurrent.MultithreadEventExecutorGroup.<init>(MultithreadEventExecutorGroup.java:47) at io.netty.channel.MultithreadEventLoopGroup.<init>(MultithreadEventLoopGroup.java:59) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:86) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:81) at io.netty.channel.nio.NioEventLoopGroup.<init>(NioEventLoopGroup.java:68) at io.vertx.core.net.impl.transport.Transport.eventLoopGroup(Transport.java:153) at io.vertx.core.impl.VertxImpl.<init>(VertxImpl.java:143) at io.vertx.core.impl.VertxImpl.vertx(VertxImpl.java:92) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:40) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:32) at io.vertx.core.impl.VertxFactoryImpl.vertx(VertxFactoryImpl.java:27) at io.vertx.core.Vertx.vertx(Vertx.java:75) at org.deeplearning4j.ui.VertxUIServer.deploy(VertxUIServer.java:188) at org.deeplearning4j.ui.VertxUIServer.deploy(VertxUIServer.java:159) at org.deeplearning4j.ui.VertxUIServer.getInstance(VertxUIServer.java:130) at org.deeplearning4j.ui.VertxUIServer.getInstance(VertxUIServer.java:95) at org.deeplearning4j.ui.api.UIServer.getInstance(UIServer.java:70) at org.deeplearning4j.ui.api.UIServer.getInstance(UIServer.java:50) at Environment.<init>(Environment.java:106) at LSTMMain.main(LSTMMain.java:40) 12:39:05.130 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): unavailable 12:39:05.130 [main] DEBUG io.netty.util.internal.PlatformDependent - sun.misc.Unsafe: available 12:39:05.143 [main] DEBUG io.netty.util.internal.PlatformDependent - maxDirectMemory: 4294967296 bytes (maybe) 12:39:05.143 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\David\AppData\Local\Temp (java.io.tmpdir) 12:39:05.144 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model) 12:39:05.144 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.maxDirectMemory: -1 bytes 12:39:05.144 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.uninitializedArrayAllocationThreshold: -1 12:39:05.145 [main] DEBUG io.netty.util.internal.CleanerJava9 - java.nio.ByteBuffer.cleaner(): available 12:39:05.145 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false 12:39:05.150 [main] DEBUG io.netty.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available 12:39:05.241 [main] DEBUG io.netty.resolver.dns.DefaultDnsServerAddressStreamProvider - Default DNS servers: [/192.168.0.1:53, /0.0.0.0:53, /192.168.2.1:53] (sun.net.dns.ResolverConfiguration) 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 16 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 16 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimIntervalMillis: 0 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.useCacheForAllThreads: true 12:39:05.381 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023 12:39:05.403 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv4Stack: false 12:39:05.403 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv6Addresses: false 12:39:05.410 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - Loopback interface: lo (Software Loopback Interface 1, 127.0.0.1) 12:39:05.411 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - Failed to get SOMAXCONN from sysctl and file \proc\sys\net\core\somaxconn. Default: 200 12:39:05.444 [vert.x-eventloop-thread-0] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.processId: 9660 (auto-detected) 12:39:05.459 [vert.x-eventloop-thread-0] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.machineId: d0:37:45:ff:fe:22:ee:b3 (auto-detected) 12:39:05.472 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: pooled 12:39:05.473 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 0 12:39:05.473 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384 12:39:05.487 [vert.x-eventloop-thread-0] INFO org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server started at: http://localhost:9000 12:39:05.490 [main] INFO org.deeplearning4j.ui.VertxUIServer - StatsStorage instance attached to UI: InMemoryStatsStorage(uid=bd548909) 12:39:05.803 [Thread-5] INFO org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server is auto-stopping after thread (name: main) died. 12:39:05.812 [vert.x-eventloop-thread-0] INFO org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server stopped. Following this post: java.lang.UnsupportedOperationException: Reflective setAccessible(true) disabled I tried to set the Java version to 8 but it didn't help Any ideas what I could try?
12:39:05.487 [vert.x-eventloop-thread-0] INFO org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server started at: http://localhost:9000` 12:39:05.490 [main] INFO org.deeplearning4j.ui.VertxUIServer - StatsStorage instance attached to UI: InMemoryStatsStorage(uid=bd548909) 12:39:05.803 [Thread-5] INFO org.deeplearning4j.ui.VertxUIServer - Deeplearning4j UI server is auto-stopping after thread (name: main) died. The server has started, ran for about 500ms and then shut down, because the main thread of your application has ended. So if you want to keep it running, you need to keep your main thread alive. You can do that for example with a Thread.sleep(60000) to keep it running for another minute.
Blocked vertx threads
I must learn to develop microservices using: Java 8 Vertx 3.9 Maven 3.6 MongoDB over Docker 19.03.1 And as an IDE I am using Intellij The code I'm working on is this: import io.vertx.core.AbstractVerticle; import io.vertx.core.Future; import io.vertx.core.Promise; import io.vertx.core.Vertx; import io.vertx.core.http.HttpServer; import io.vertx.core.json.Json; import io.vertx.core.json.JsonObject; import io.vertx.ext.mongo.MongoClient; import io.vertx.ext.web.Router; import io.vertx.ext.web.RoutingContext; import io.vertx.ext.web.handler.BodyHandler; public class MainVerticle extends AbstractVerticle { MongoClient mongoSharedClient = null; private Future<Void> prepareDatabase(){ Promise<Void> promises = Promise.promise(); JsonObject config = Vertx.currentContext().config(); String uri = config.getString("mongo_uri"); if(uri == null){ uri = "mongodb://localhost:27017"; } String database = config.getString("mongo_db"); if (database == null){ database = "test"; } JsonObject configMongo = new JsonObject(); configMongo.put("connection_string", uri); configMongo.put("db_name", database); mongoSharedClient = MongoClient.create(vertx, configMongo); if(mongoSharedClient != null){ promises.complete(); }else { promises.fail("Error in Database"); } return promises.future(); } #Override public void start(Promise<Void> startFuture) throws Exception { prepareDatabase().compose(as-> HttpServer()).onComplete(asyn->{ if(asyn.succeeded()){ startFuture.complete(); }else { startFuture.fail("Error"); } }); } private Future<Void> HttpServer(){ Promise<Void> promises = Promise.promise(); HttpServer server = vertx.createHttpServer(); Router router = Router.router(vertx); router.get("/test/").handler(this::pruebaRuta); router.post("/create/").handler(this::createPrueba); router.post().handler(BodyHandler.create()); server.requestHandler(router).listen(9090, ar -> { if(ar.succeeded()){ promises.complete(); }else { promises.fail(ar.cause()); } }); return promises.future(); } private void createPrueba(RoutingContext routingContext) { JsonObject data = routingContext.getBodyAsJson(); mongoSharedClient.insert("User", data, result -> { if(result.succeeded()){ routingContext.response().setStatusCode(200).putHeader("Content-Type", "text/html").end("Operation Successful"); }else { routingContext.response().setStatusCode(400).putHeader("Content-Type", "text/html").end(result.cause().getMessage()); } }); routingContext.response().setStatusCode(200).putHeader("Content-Type", "Application/Json; charset=utf-8").end(Json.encodePrettily(data)); } private void pruebaRuta(RoutingContext routingContext) { routingContext.response().setStatusCode(200).putHeader("Content-Type", "text/html").end("Success Execute!"); } } At the moment I only want to do small tests with postman, the problem is that when I run the project I get the following: Connected to the target VM, address: '127.0.0.1:51951', transport: 'socket' 18:36:02.095 [main] DEBUG io.netty.util.internal.logging.InternalLoggerFactory - Using SLF4J as the default logging framework 18:36:02.108 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.level: simple 18:36:02.108 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.targetRecords: 4 18:36:02.393 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 18:36:02.393 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 18:36:02.578 [main] DEBUG io.netty.channel.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 8 18:36:02.824 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false 18:36:02.824 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512 18:36:02.874 [main] DEBUG io.netty.util.internal.PlatformDependent - Platform: Windows 18:36:02.878 [main] DEBUG io.netty.util.internal.PlatformDependent0 - -Dio.netty.noUnsafe: false 18:36:02.880 [main] DEBUG io.netty.util.internal.PlatformDependent0 - Java version: 8 18:36:02.883 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available 18:36:02.886 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available 18:36:02.888 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Buffer.address: available 18:36:02.889 [main] DEBUG io.netty.util.internal.PlatformDependent0 - direct buffer constructor: available 18:36:02.892 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true 18:36:02.892 [main] DEBUG io.netty.util.internal.PlatformDependent0 - jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable prior to Java9 18:36:02.892 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): available 18:36:02.892 [main] DEBUG io.netty.util.internal.PlatformDependent - sun.misc.Unsafe: available 18:36:02.894 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\kathy\AppData\Local\Temp (java.io.tmpdir) 18:36:02.894 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model) 18:36:02.899 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.maxDirectMemory: 934281216 bytes 18:36:02.899 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.uninitializedArrayAllocationThreshold: -1 18:36:02.902 [main] DEBUG io.netty.util.internal.CleanerJava6 - java.nio.ByteBuffer.cleaner(): available 18:36:02.902 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false 18:36:02.924 [main] DEBUG io.netty.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available 18:36:06.345 [main] DEBUG io.netty.resolver.dns.DefaultDnsServerAddressStreamProvider - Default DNS servers: [/1.1.1.1:53, /8.8.8.8:53] (sun.net.dns.ResolverConfiguration) 18:36:08.487 [vert.x-eventloop-thread-0] INFO org.mongodb.driver.cluster - Cluster created with settings {hosts=[localhost:27017], mode=SINGLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=500} 18:36:08.619 [vert.x-eventloop-thread-0] DEBUG org.mongodb.driver.cluster - Updating cluster description to {type=UNKNOWN, servers=[{address=localhost:27017, type=UNKNOWN, state=CONNECTING}] jul 07, 2020 6:36:09 PM io.vertx.core.impl.BlockedThreadChecker ADVERTENCIA: Thread Thread[vert.x-eventloop-thread-0,5,main]=Thread[vert.x-eventloop-thread-0,5,main] has been blocked for 2459 ms, time limit is 2000 ms 18:36:10.012 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 8 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 8 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64 18:36:10.013 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768 18:36:10.014 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192 18:36:10.014 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimIntervalMillis: 0 18:36:10.014 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.useCacheForAllThreads: true 18:36:10.014 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023 18:36:10.327 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv4Stack: false 18:36:10.328 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv6Addresses: false jul 07, 2020 6:36:10 PM io.vertx.core.impl.BlockedThreadChecker ADVERTENCIA: Thread Thread[vert.x-eventloop-thread-0,5,main]=Thread[vert.x-eventloop-thread-0,5,main] has been blocked for 3485 ms, time limit is 2000 ms 18:36:11.188 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - Loopback interface: lo (Software Loopback Interface 1, 127.0.0.1) 18:36:11.189 [vert.x-eventloop-thread-0] DEBUG io.netty.util.NetUtil - Failed to get SOMAXCONN from sysctl and file \proc\sys\net\core\somaxconn. Default: 200 jul 07, 2020 6:36:11 PM io.vertx.core.impl.BlockedThreadChecker ADVERTENCIA: Thread Thread[vert.x-eventloop-thread-0,5,main]=Thread[vert.x-eventloop-thread-0,5,main] has been blocked for 4489 ms, time limit is 2000 ms 18:36:12.550 [vert.x-eventloop-thread-0] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.processId: 6252 (auto-detected) jul 07, 2020 6:36:12 PM io.vertx.core.impl.BlockedThreadChecker ADVERTENCIA: Thread Thread[vert.x-eventloop-thread-0,5,main]=Thread[vert.x-eventloop-thread-0,5,main] has been blocked for 5489 ms, time limit is 2000 ms io.vertx.core.VertxException: Thread blocked at java.net.NetworkInterface.getAll(Native Method) at java.net.NetworkInterface.getNetworkInterfaces(NetworkInterface.java:355) at io.netty.util.internal.MacAddressUtil.bestAvailableMac(MacAddressUtil.java:55) at io.netty.util.internal.MacAddressUtil.defaultMachineId(MacAddressUtil.java:138) at io.netty.channel.DefaultChannelId.<clinit>(DefaultChannelId.java:99) at io.netty.channel.AbstractChannel.newId(AbstractChannel.java:101) at io.netty.channel.AbstractChannel.<init>(AbstractChannel.java:73) at io.netty.channel.nio.AbstractNioChannel.<init>(AbstractNioChannel.java:80) at io.netty.channel.nio.AbstractNioMessageChannel.<init>(AbstractNioMessageChannel.java:42) at io.netty.channel.socket.nio.NioDatagramChannel.<init>(NioDatagramChannel.java:150) at io.netty.channel.socket.nio.NioDatagramChannel.<init>(NioDatagramChannel.java:118) at io.vertx.core.net.impl.transport.Transport.datagramChannel(Transport.java:162) at io.vertx.core.impl.resolver.DnsResolverProvider$1.lambda$newResolver$0(DnsResolverProvider.java:136) at io.vertx.core.impl.resolver.DnsResolverProvider$1$$Lambda$43/1292567456.newChannel(Unknown Source) at io.netty.bootstrap.AbstractBootstrap.initAndRegister(AbstractBootstrap.java:310) at io.netty.bootstrap.AbstractBootstrap.register(AbstractBootstrap.java:227) at io.netty.resolver.dns.DnsNameResolver.<init>(DnsNameResolver.java:451) at io.netty.resolver.dns.DnsNameResolverBuilder.build(DnsNameResolverBuilder.java:473) at io.vertx.core.impl.resolver.DnsResolverProvider$1$1.newNameResolver(DnsResolverProvider.java:186) at io.netty.resolver.dns.DnsAddressResolverGroup.newResolver(DnsAddressResolverGroup.java:91) at io.netty.resolver.dns.DnsAddressResolverGroup.newResolver(DnsAddressResolverGroup.java:76) at io.netty.resolver.AddressResolverGroup.getResolver(AddressResolverGroup.java:70) at io.vertx.core.impl.resolver.DnsResolverProvider$1.newResolver(DnsResolverProvider.java:190) at io.netty.resolver.AddressResolverGroup.getResolver(AddressResolverGroup.java:70) at io.vertx.core.impl.AddressResolver.resolveHostname(AddressResolver.java:82) at io.vertx.core.impl.VertxImpl.resolveAddress(VertxImpl.java:810) at io.vertx.core.net.impl.AsyncResolveConnectHelper.doBind(AsyncResolveConnectHelper.java:56) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:253) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:188) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:184) at com.lakatuna.com.MainVerticle.HttpServer(MainVerticle.java:75) at com.lakatuna.com.MainVerticle.lambda$start$0(MainVerticle.java:56) at com.lakatuna.com.MainVerticle$$Lambda$30/752448968.apply(Unknown Source) at io.vertx.core.Future.lambda$compose$3(Future.java:363) at io.vertx.core.Future$$Lambda$32/767632927.handle(Unknown Source) at io.vertx.core.impl.FutureImpl.dispatch(FutureImpl.java:105) at io.vertx.core.impl.FutureImpl.onComplete(FutureImpl.java:83) at io.vertx.core.Future.compose(Future.java:359) at io.vertx.core.Future.compose(Future.java:331) at com.lakatuna.com.MainVerticle.start(MainVerticle.java:56) at io.vertx.core.impl.DeploymentManager.lambda$doDeploy$9(DeploymentManager.java:556) at io.vertx.core.impl.DeploymentManager$$Lambda$9/726379593.handle(Unknown Source) at io.vertx.core.impl.ContextImpl.executeTask(ContextImpl.java:369) at io.vertx.core.impl.EventLoopContext.lambda$executeAsync$0(EventLoopContext.java:38) at io.vertx.core.impl.EventLoopContext$$Lambda$10/1212772528.run(Unknown Source) at io.netty.util.concurrent.AbstractEventExecutor.safeExecute$$$capture(AbstractEventExecutor.java:164) at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.lang.Thread.run(Thread.java:748) 18:36:13.149 [vert.x-eventloop-thread-0] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.machineId: 9c:ad:97:ff:fe:8b:00:df (auto-detected) 18:36:13.342 [cluster-ClusterId{value='5f0506e849515074214c3f60', description='null'}-localhost:27017] DEBUG org.mongodb.driver.connection - Closing connection connectionId{localValue:1} 18:36:13.602 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: pooled 18:36:13.602 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 0 18:36:13.602 [vert.x-eventloop-thread-0] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384 18:36:13.635 [cluster-ClusterId{value='5f0506e849515074214c3f60', description='null'}-localhost:27017] INFO org.mongodb.driver.cluster - Exception in monitor thread while connecting to server localhost:27017 com.mongodb.MongoSocketOpenException: Exception opening socket at com.mongodb.internal.connection.AsynchronousSocketChannelStream$OpenCompletionHandler.failed(AsynchronousSocketChannelStream.java:117) at sun.nio.ch.Invoker.invokeUnchecked(Invoker.java:128) at sun.nio.ch.Invoker.invokeDirect(Invoker.java:157) at sun.nio.ch.Invoker.invoke(Invoker.java:185) at sun.nio.ch.Invoker.invoke(Invoker.java:297) at sun.nio.ch.WindowsAsynchronousSocketChannelImpl$ConnectTask.failed(WindowsAsynchronousSocketChannelImpl.java:302) at sun.nio.ch.Iocp$EventHandlerTask.run(Iocp.java:399) at sun.nio.ch.AsynchronousChannelGroupImpl$1.run(AsynchronousChannelGroupImpl.java:112) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) at java.lang.Thread.run(Thread.java:748) Caused by: java.io.IOException: El equipo remoto rechazó la conexión de red. at sun.nio.ch.Iocp.translateErrorToIOException(Iocp.java:309) at sun.nio.ch.Iocp.access$700(Iocp.java:46) ... 5 common frames omitted jul 07, 2020 6:36:13 PM io.vertx.core.impl.BlockedThreadChecker ADVERTENCIA: Thread Thread[vert.x-eventloop-thread-0,5,main]=Thread[vert.x-eventloop-thread-0,5,main] has been blocked for 6490 ms, time limit is 2000 ms io.vertx.core.VertxException: Thread blocked at java.net.DatagramSocket$1.run(DatagramSocket.java:312) at java.net.DatagramSocket$1.run(DatagramSocket.java:309) at java.security.AccessController.doPrivileged(Native Method) at java.net.DatagramSocket.checkOldImpl(DatagramSocket.java:308) at java.net.DatagramSocket.<init>(DatagramSocket.java:211) at sun.nio.ch.DatagramSocketAdaptor.<init>(DatagramSocketAdaptor.java:57) at sun.nio.ch.DatagramSocketAdaptor.create(DatagramSocketAdaptor.java:63) at sun.nio.ch.DatagramChannelImpl.socket(DatagramChannelImpl.java:173) at io.netty.channel.socket.nio.NioDatagramChannelConfig.<init>(NioDatagramChannelConfig.java:117) at io.netty.channel.socket.nio.NioDatagramChannel.<init>(NioDatagramChannel.java:151) at io.netty.channel.socket.nio.NioDatagramChannel.<init>(NioDatagramChannel.java:118) at io.vertx.core.net.impl.transport.Transport.datagramChannel(Transport.java:162) at io.vertx.core.impl.resolver.DnsResolverProvider$1.lambda$newResolver$0(DnsResolverProvider.java:136) at io.vertx.core.impl.resolver.DnsResolverProvider$1$$Lambda$43/1292567456.newChannel(Unknown Source) at io.netty.bootstrap.AbstractBootstrap.initAndRegister(AbstractBootstrap.java:310) at io.netty.bootstrap.AbstractBootstrap.register(AbstractBootstrap.java:227) at io.netty.resolver.dns.DnsNameResolver.<init>(DnsNameResolver.java:451) at io.netty.resolver.dns.DnsNameResolverBuilder.build(DnsNameResolverBuilder.java:473) at io.vertx.core.impl.resolver.DnsResolverProvider$1$1.newNameResolver(DnsResolverProvider.java:186) at io.netty.resolver.dns.DnsAddressResolverGroup.newResolver(DnsAddressResolverGroup.java:91) at io.netty.resolver.dns.DnsAddressResolverGroup.newResolver(DnsAddressResolverGroup.java:76) at io.netty.resolver.AddressResolverGroup.getResolver(AddressResolverGroup.java:70) at io.vertx.core.impl.resolver.DnsResolverProvider$1.newResolver(DnsResolverProvider.java:190) at io.netty.resolver.AddressResolverGroup.getResolver(AddressResolverGroup.java:70) at io.vertx.core.impl.AddressResolver.resolveHostname(AddressResolver.java:82) at io.vertx.core.impl.VertxImpl.resolveAddress(VertxImpl.java:810) at io.vertx.core.net.impl.AsyncResolveConnectHelper.doBind(AsyncResolveConnectHelper.java:56) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:253) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:188) at io.vertx.core.http.impl.HttpServerImpl.listen(HttpServerImpl.java:184) at com.lakatuna.com.MainVerticle.HttpServer(MainVerticle.java:75) at com.lakatuna.com.MainVerticle.lambda$start$0(MainVerticle.java:56) at com.lakatuna.com.MainVerticle$$Lambda$30/752448968.apply(Unknown Source) at io.vertx.core.Future.lambda$compose$3(Future.java:363) at io.vertx.core.Future$$Lambda$32/767632927.handle(Unknown Source) at io.vertx.core.impl.FutureImpl.dispatch(FutureImpl.java:105) at io.vertx.core.impl.FutureImpl.onComplete(FutureImpl.java:83) at io.vertx.core.Future.compose(Future.java:359) at io.vertx.core.Future.compose(Future.java:331) at com.lakatuna.com.MainVerticle.start(MainVerticle.java:56) at io.vertx.core.impl.DeploymentManager.lambda$doDeploy$9(DeploymentManager.java:556) at io.vertx.core.impl.DeploymentManager$$Lambda$9/726379593.handle(Unknown Source) at io.vertx.core.impl.ContextImpl.executeTask(ContextImpl.java:369) at io.vertx.core.impl.EventLoopContext.lambda$executeAsync$0(EventLoopContext.java:38) at io.vertx.core.impl.EventLoopContext$$Lambda$10/1212772528.run(Unknown Source) at io.netty.util.concurrent.AbstractEventExecutor.safeExecute$$$capture(AbstractEventExecutor.java:164) at io.netty.util.concurrent.AbstractEventExecutor.safeExecute(AbstractEventExecutor.java) at io.netty.util.concurrent.SingleThreadEventExecutor.runAllTasks(SingleThreadEventExecutor.java:472) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:500) at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:989) at io.netty.util.internal.ThreadExecutorMap$2.run(ThreadExecutorMap.java:74) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.lang.Thread.run(Thread.java:748) 18:36:13.660 [cluster-ClusterId{value='5f0506e849515074214c3f60', description='null'}-localhost:27017] DEBUG org.mongodb.driver.cluster - Updating cluster description to {type=UNKNOWN, servers=[{address=localhost:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.io.IOException: El equipo remoto rechazó la conexión de red. }}] Vertx threads are blocked. In postman I try to test the route localhost:9090/create/ and it returns error 500 I know the question is long, but seriously I don't know what to do, I have looked for a solution but the truth is that I don't really understand what my mistake is or what is happening, I need your help. Thank you very much
If you look at the middle stacktrace, it is reporting that the host it's trying to reach to connect to the Mongo database rejected the connection. Make sure it is reachable. I'll bet the 500 error you're seeing is a Null Pointer Exception caused by you trying to use the mongo client even though it failed to initialize and the variable was never assigned. The first and third stacktrace are complaining that binding to the ports while starting up the server is taking too long. This might be an issue in the underlying netty library.
Why doesn't AsyncHttpClient close the thread after throwing an exception?
I'm using the Zendesk Java Client. When I supply the correct credentials, it works as expected. However, I stumbled on a scenario where if the wrong credentials are passed in, the thread just hangs and it throws an error. Whether I run the code with or without the debugger, the code just stays running. It doesn't exit or return control to the debugger. Is this an issue with the library, or am I misunderstanding how the AsyncHttpClient works? My code is below: fun zdtestWrongCredentials() { val client = asyncHttpClient( config() .setRequestTimeout(5000) .setReadTimeout(5000) .setShutdownTimeout(3000) .setPooledConnectionIdleTimeout(5000) .setKeepAlive(false) ) var zd = Zendesk.Builder("https://website.zendesk.com") .setClient(client) .setUsername("john.doe#website.com") .setPassword("abcd") .build() var ticket = Ticket(123, "a", Comment("abc")) // The code hangs here. It's unclear why it exhibits this behavior. var test = zd.createTicket(ticket) // The code does not reach this line. client.close() return } Although I'm using Kotlin, I tried replicating this issue in a simple Java project and the issue persists. The stack trace is below. There is an exception at the bottom, but even after this exception, the program does not exit/give control back to the debugger. "C:\Program Files\Java\jdk-11.0.4\bin\java.exe" 09:47:01.463 [main] DEBUG io.netty.util.internal.logging.InternalLoggerFactory - Using SLF4J as the default logging framework 09:47:01.480 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.level: simple 09:47:01.480 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.targetRecords: 4 09:47:01.550 [main] DEBUG io.netty.util.internal.PlatformDependent - Platform: Windows 09:47:01.562 [main] DEBUG io.netty.util.internal.PlatformDependent0 - -Dio.netty.noUnsafe: false 09:47:01.562 [main] DEBUG io.netty.util.internal.PlatformDependent0 - Java version: 11 09:47:01.566 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available 09:47:01.567 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available 09:47:01.568 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Buffer.address: available 09:47:01.575 [main] DEBUG io.netty.util.internal.PlatformDependent0 - direct buffer constructor: unavailable java.lang.UnsupportedOperationException: Reflective setAccessible(true) disabled at io.netty.util.internal.ReflectionUtil.trySetAccessible(ReflectionUtil.java:31) at io.netty.util.internal.PlatformDependent0$4.run(PlatformDependent0.java:224) at java.base/java.security.AccessController.doPrivileged(Native Method) at io.netty.util.internal.PlatformDependent0.<clinit>(PlatformDependent0.java:218) at io.netty.util.internal.PlatformDependent.isAndroid(PlatformDependent.java:212) at io.netty.util.internal.PlatformDependent.<clinit>(PlatformDependent.java:80) at io.netty.util.ResourceLeakDetector.<init>(ResourceLeakDetector.java:171) at io.netty.util.ResourceLeakDetector.<init>(ResourceLeakDetector.java:213) at io.netty.util.ResourceLeakDetectorFactory$DefaultResourceLeakDetectorFactory.newResourceLeakDetector(ResourceLeakDetectorFactory.java:201) at io.netty.util.HashedWheelTimer.<clinit>(HashedWheelTimer.java:89) at org.asynchttpclient.DefaultAsyncHttpClient.newNettyTimer(DefaultAsyncHttpClient.java:96) at org.asynchttpclient.DefaultAsyncHttpClient.<init>(DefaultAsyncHttpClient.java:87) at org.asynchttpclient.Dsl.asyncHttpClient(Dsl.java:32) at com.website.MainKt.test123(Main.kt:321) at com.website.MainKt.main(Main.kt:288) at com.website.MainKt.main(Main.kt) 09:47:01.577 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true 09:47:01.579 [main] DEBUG io.netty.util.internal.PlatformDependent0 - jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable java.lang.IllegalAccessException: class io.netty.util.internal.PlatformDependent0$6 cannot access class jdk.internal.misc.Unsafe (in module java.base) because module java.base does not export jdk.internal.misc to unnamed module #4de5031f at java.base/jdk.internal.reflect.Reflection.newIllegalAccessException(Reflection.java:361) at java.base/java.lang.reflect.AccessibleObject.checkAccess(AccessibleObject.java:591) at java.base/java.lang.reflect.Method.invoke(Method.java:558) at io.netty.util.internal.PlatformDependent0$6.run(PlatformDependent0.java:334) at java.base/java.security.AccessController.doPrivileged(Native Method) at io.netty.util.internal.PlatformDependent0.<clinit>(PlatformDependent0.java:325) at io.netty.util.internal.PlatformDependent.isAndroid(PlatformDependent.java:212) at io.netty.util.internal.PlatformDependent.<clinit>(PlatformDependent.java:80) at io.netty.util.ResourceLeakDetector.<init>(ResourceLeakDetector.java:171) at io.netty.util.ResourceLeakDetector.<init>(ResourceLeakDetector.java:213) at io.netty.util.ResourceLeakDetectorFactory$DefaultResourceLeakDetectorFactory.newResourceLeakDetector(ResourceLeakDetectorFactory.java:201) at io.netty.util.HashedWheelTimer.<clinit>(HashedWheelTimer.java:89) at org.asynchttpclient.DefaultAsyncHttpClient.newNettyTimer(DefaultAsyncHttpClient.java:96) at org.asynchttpclient.DefaultAsyncHttpClient.<init>(DefaultAsyncHttpClient.java:87) at org.asynchttpclient.Dsl.asyncHttpClient(Dsl.java:32) at com.website.MainKt.test123(Main.kt:321) at com.website.MainKt.main(Main.kt:288) at com.website.MainKt.main(Main.kt) 09:47:01.579 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): unavailable 09:47:01.579 [main] DEBUG io.netty.util.internal.PlatformDependent - sun.misc.Unsafe: available 09:47:01.634 [main] DEBUG io.netty.util.internal.PlatformDependent - maxDirectMemory: 6404702208 bytes (maybe) 09:47:01.635 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\john~1.doe\AppData\Local\Temp (java.io.tmpdir) 09:47:01.635 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model) 09:47:01.639 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.maxDirectMemory: -1 bytes 09:47:01.639 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.uninitializedArrayAllocationThreshold: -1 09:47:01.649 [main] DEBUG io.netty.util.internal.CleanerJava9 - java.nio.ByteBuffer.cleaner(): available 09:47:01.649 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false 09:47:01.649 [main] DEBUG io.netty.util.ResourceLeakDetectorFactory - Loaded default ResourceLeakDetector: io.netty.util.ResourceLeakDetector#13acb0d1 09:47:01.705 [main] DEBUG io.netty.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available 09:47:02.621 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default protocols (JDK): [TLSv1.2, TLSv1.1, TLSv1] 09:47:02.622 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default cipher suites (JDK): [TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384, TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA, TLS_RSA_WITH_AES_128_GCM_SHA256, TLS_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_256_CBC_SHA, TLS_AES_128_GCM_SHA256, TLS_AES_256_GCM_SHA384] 09:47:02.651 [main] DEBUG io.netty.channel.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 8 09:47:02.674 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false 09:47:02.675 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512 09:47:02.714 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 09:47:02.714 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 8 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 8 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.useCacheForAllThreads: true 09:47:02.728 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedByteBuffersPerChunk: 1023 09:47:02.738 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: pooled 09:47:02.738 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 0 09:47:02.738 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384 09:47:03.522 [main] DEBUG org.zendesk.client.v2.Zendesk - Request POST https://website.zendesk.com/api/v2/tickets.json application/json; charset=UTF-8 92 bytes 09:47:03.568 [main] DEBUG io.netty.buffer.AbstractByteBuf - -Dio.netty.buffer.checkAccessible: true 09:47:03.568 [main] DEBUG io.netty.buffer.AbstractByteBuf - -Dio.netty.buffer.checkBounds: true 09:47:03.569 [main] DEBUG io.netty.util.ResourceLeakDetectorFactory - Loaded default ResourceLeakDetector: io.netty.util.ResourceLeakDetector#604c5de8 09:47:03.650 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.processId: 12648 (auto-detected) 09:47:03.653 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv4Stack: false 09:47:03.653 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv6Addresses: false 09:47:04.352 [main] DEBUG io.netty.util.NetUtil - Loopback interface: [lo, Software Loopback Interface 1, 127.0.0.1] ([lo, Software Loopback Interface 1, 127.0.0.1], {}) 09:47:04.353 [main] DEBUG io.netty.util.NetUtil - Failed to get SOMAXCONN from sysctl and file \proc\sys\net\core\somaxconn. Default: 200 09:47:05.043 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.machineId: 00:50:b6:ff:fe:ae:6e:01 (auto-detected) 09:47:05.172 [AsyncHttpClient-3-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.maxCapacityPerThread: 4096 09:47:05.172 [AsyncHttpClient-3-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.maxSharedCapacityFactor: 2 09:47:05.173 [AsyncHttpClient-3-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.linkCapacity: 16 09:47:05.173 [AsyncHttpClient-3-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.ratio: 8 09:47:05.614 [AsyncHttpClient-3-1] DEBUG org.asynchttpclient.netty.channel.NettyConnectListener - Using new Channel '[id: 0xf398d3e1, L:/192.168.108.56:64305 - R:website.zendesk.com/104.16.54.111:443]' for 'POST' to '/api/v2/tickets.json' 09:47:05.630 [AsyncHttpClient-3-1] DEBUG io.netty.handler.ssl.SslHandler - [id: 0xf398d3e1, L:/192.168.108.56:64305 - R:website.zendesk.com/104.16.54.111:443] HANDSHAKEN: TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 09:47:05.810 [AsyncHttpClient-3-1] DEBUG org.asynchttpclient.netty.handler.HttpHandler - Request DefaultFullHttpRequest(decodeResult: success, version: HTTP/1.1, content: UnpooledHeapByteBuf(freed)) POST /api/v2/tickets.json HTTP/1.1 Content-type: application/json; charset=UTF-8 content-length: 92 connection: close host: website.zendesk.com authorization: Basic RGVyZWsudG93cmlzc0BkZWxhd2FyZWxpZmUuY29tOlRlczE= accept: */* user-agent: AHC/2.1 Response DefaultHttpResponse(decodeResult: success, version: HTTP/1.1) HTTP/1.1 401 Unauthorized Date: Tue, 20 Aug 2019 13:47:06 GMT Content-Type: application/json; charset=UTF-8 Content-Length: 37 Connection: close Set-Cookie: __cfduid=d807076f1918856a9ecbded67e619ee901566308826; expires=Wed, 19-Aug-20 13:47:06 GMT; path=/; domain=.website.zendesk.com; HttpOnly WWW-Authenticate: Basic realm="Web Password" Strict-Transport-Security: max-age=31536000; Cache-Control: no-cache X-Zendesk-Origin-Server: app23.pod20.usw2.zdsys.com X-Request-Id: 5094d137ce35e1fa-SEA X-Runtime: 0.060433 X-Zendesk-Request-Id: 7d89cc0062f1b7e6f05c Set-Cookie: __cfruid=b0a77a0d73109c7862b0ab39be944601c81b0353-1566308826; path=/; domain=.website.zendesk.com; HttpOnly Expect-CT: max-age=604800, report-uri="https://report-uri.cloudflare.com/cdn-cgi/beacon/expect-ct" Server: cloudflare CF-RAY: 5094d137ce35e1fa-ORD 09:47:05.829 [AsyncHttpClient-3-1] INFO org.asynchttpclient.netty.handler.intercept.Unauthorized401Interceptor - Can't handle 401 as auth was already performed 09:47:05.839 [AsyncHttpClient-3-1] DEBUG org.asynchttpclient.netty.channel.ChannelManager - Closing Channel [id: 0xf398d3e1, L:/192.168.108.56:64305 - R:website.zendesk.com/104.16.54.111:443] 09:47:05.844 [AsyncHttpClient-3-1] DEBUG org.zendesk.client.v2.Zendesk - Response HTTP/401 Unauthorized {"error":"Couldn't authenticate you"} 09:47:05.846 [AsyncHttpClient-3-1] DEBUG org.asynchttpclient.AsyncCompletionHandler - HTTP/401: Unauthorized - {"error":"Couldn't authenticate you"} org.zendesk.client.v2.ZendeskResponseException: HTTP/401: Unauthorized - {"error":"Couldn't authenticate you"} at org.zendesk.client.v2.Zendesk$BasicAsyncCompletionHandler.onCompleted(Zendesk.java:1997) at org.asynchttpclient.AsyncCompletionHandler.onCompleted(AsyncCompletionHandler.java:66) at org.asynchttpclient.netty.NettyResponseFuture.loadContent(NettyResponseFuture.java:223) at org.asynchttpclient.netty.NettyResponseFuture.done(NettyResponseFuture.java:258) at org.asynchttpclient.netty.handler.AsyncHttpClientHandler.finishUpdate(AsyncHttpClientHandler.java:239) at org.asynchttpclient.netty.handler.HttpHandler.handleChunk(HttpHandler.java:113) at org.asynchttpclient.netty.handler.HttpHandler.handleRead(HttpHandler.java:142) at org.asynchttpclient.netty.handler.AsyncHttpClientHandler.channelRead(AsyncHttpClientHandler.java:76) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:438) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:323) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:297) at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:253) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1436) at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1203) at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1247) at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:502) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:441) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:278) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1408) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:930) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:677) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:612) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:529) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:491) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:905) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:834) Exception in thread "main" org.zendesk.client.v2.ZendeskResponseException: HTTP/401: Unauthorized - {"error":"Couldn't authenticate you"} at org.zendesk.client.v2.Zendesk.complete(Zendesk.java:2252) at org.zendesk.client.v2.Zendesk.createTicket(Zendesk.java:307) at com.website.MainKt.test123(Main.kt:349) at com.website.MainKt.main(Main.kt:288)09:47:05.849 [AsyncHttpClient-3-1] DEBUG org.asynchttpclient.netty.handler.HttpHandler - Channel Closed: [id: 0xf398d3e1, L:/192.168.108.56:64305 ! R:website.zendesk.com/104.16.54.111:443] with attribute DISCARD at com.website.MainKt.main(Main.kt) Caused by: org.zendesk.client.v2.ZendeskResponseException: HTTP/401: Unauthorized - {"error":"Couldn't authenticate you"} at org.zendesk.client.v2.Zendesk$BasicAsyncCompletionHandler.onCompleted(Zendesk.java:1997) at org.asynchttpclient.AsyncCompletionHandler.onCompleted(AsyncCompletionHandler.java:66) at org.asynchttpclient.netty.NettyResponseFuture.loadContent(NettyResponseFuture.java:223) at org.asynchttpclient.netty.NettyResponseFuture.done(NettyResponseFuture.java:258) at org.asynchttpclient.netty.handler.AsyncHttpClientHandler.finishUpdate(AsyncHttpClientHandler.java:239) at org.asynchttpclient.netty.handler.HttpHandler.handleChunk(HttpHandler.java:113) at org.asynchttpclient.netty.handler.HttpHandler.handleRead(HttpHandler.java:142) at org.asynchttpclient.netty.handler.AsyncHttpClientHandler.channelRead(AsyncHttpClientHandler.java:76) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.ChannelInboundHandlerAdapter.channelRead(ChannelInboundHandlerAdapter.java:86) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:102) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.CombinedChannelDuplexHandler$DelegatingChannelHandlerContext.fireChannelRead(CombinedChannelDuplexHandler.java:438) at io.netty.handler.codec.ByteToMessageDecoder.fireChannelRead(ByteToMessageDecoder.java:323) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:297) at io.netty.channel.CombinedChannelDuplexHandler.channelRead(CombinedChannelDuplexHandler.java:253) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.handler.ssl.SslHandler.unwrap(SslHandler.java:1436) at io.netty.handler.ssl.SslHandler.decodeJdkCompatible(SslHandler.java:1203) at io.netty.handler.ssl.SslHandler.decode(SslHandler.java:1247) at io.netty.handler.codec.ByteToMessageDecoder.decodeRemovalReentryProtection(ByteToMessageDecoder.java:502) at io.netty.handler.codec.ByteToMessageDecoder.callDecode(ByteToMessageDecoder.java:441) at io.netty.handler.codec.ByteToMessageDecoder.channelRead(ByteToMessageDecoder.java:278) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:340) at io.netty.channel.DefaultChannelPipeline$HeadContext.channelRead(DefaultChannelPipeline.java:1408) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:362) at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:348) at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:930) at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:163) at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:677) at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:612) at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:529) at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:491) at io.netty.util.concurrent.SingleThreadEventExecutor$5.run(SingleThreadEventExecutor.java:905) at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30) at java.base/java.lang.Thread.run(Thread.java:834)
Depending on how the async client is implemented it can throw an error in one thread/coroutine and then wait for a completion message or trigger in another part of its code. Because the error was thrown, the trigger is never sent and thus the thread hangs in perpetuity. I "built" a problem like this once, so yes it is possible. Obviously, I don't know if this is actually the issue at hand. Cheers and all the best!
AsyncHttpClient won't let program exit
I'm using org.asynchttpclient.async-http-client version 2.2.0 in the following code: public class Application { public static void main(String[] args) throws InterruptedException, ExecutionException, IOException { AsyncHttpClient httpClient = null; AsyncHttpClientConfig config = Dsl.config() .setConnectTimeout(1000) .setRequestTimeout(5000) .setKeepAlive(true) .build(); try { httpClient = Dsl.asyncHttpClient(config); String url ="https://www.google.com"; Response r = httpClient.prepareGet(url).execute().get(); String result = r.getResponseBody(Charset.forName("UTF-8")); System.out.println(result.toString()); } finally { ClientStats stats = httpClient.getClientStats(); System.out.println(stats); httpClient.close(); stats = httpClient.getClientStats(); System.out.println(stats); } } } The program works perfectly, retrieves Google HTML but doesn't exit. It hangs up indefinitely until someone kills it. Somehow, it seems that httpClient.close(); doesn't release all the resources so the vm stays alive. this is the log of the execution: SLF4J: Class path contains multiple SLF4J bindings. SLF4J: Found binding in [jar:file:/C:/home/u096285/.m2/repository/org/slf4j/slf4j-api/1.7.25/slf4j-api-1.7.25.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: Found binding in [jar:file:/C:/home/u096285/.m2/repository/ch/qos/logback/logback-classic/1.1.11/logback-classic-1.1.11.jar!/org/slf4j/impl/StaticLoggerBinder.class] SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation. SLF4J: Actual binding is of type [ch.qos.logback.classic.util.ContextSelectorStaticBinder] 19:27:42.837 [main] DEBUG io.netty.util.internal.logging.InternalLoggerFactory - Using SLF4J as the default logging framework 19:27:42.849 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.level: simple 19:27:42.851 [main] DEBUG io.netty.util.ResourceLeakDetector - -Dio.netty.leakDetection.targetRecords: 4 19:27:42.921 [main] DEBUG io.netty.util.internal.PlatformDependent - Platform: Windows 19:27:42.924 [main] DEBUG io.netty.util.internal.PlatformDependent0 - -Dio.netty.noUnsafe: false 19:27:42.925 [main] DEBUG io.netty.util.internal.PlatformDependent0 - Java version: 8 19:27:42.927 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.theUnsafe: available 19:27:42.928 [main] DEBUG io.netty.util.internal.PlatformDependent0 - sun.misc.Unsafe.copyMemory: available 19:27:42.929 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Buffer.address: available 19:27:42.930 [main] DEBUG io.netty.util.internal.PlatformDependent0 - direct buffer constructor: available 19:27:42.932 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.Bits.unaligned: available, true 19:27:42.932 [main] DEBUG io.netty.util.internal.PlatformDependent0 - jdk.internal.misc.Unsafe.allocateUninitializedArray(int): unavailable prior to Java9 19:27:42.932 [main] DEBUG io.netty.util.internal.PlatformDependent0 - java.nio.DirectByteBuffer.<init>(long, int): available 19:27:42.932 [main] DEBUG io.netty.util.internal.PlatformDependent - sun.misc.Unsafe: available 19:27:42.933 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.tmpdir: C:\Users\u096285\AppData\Local\Temp (java.io.tmpdir) 19:27:42.933 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.bitMode: 64 (sun.arch.data.model) 19:27:42.935 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.noPreferDirect: false 19:27:42.936 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.maxDirectMemory: 1879048192 bytes 19:27:42.936 [main] DEBUG io.netty.util.internal.PlatformDependent - -Dio.netty.uninitializedArrayAllocationThreshold: -1 19:27:42.937 [main] DEBUG io.netty.util.internal.CleanerJava6 - java.nio.ByteBuffer.cleaner(): available 19:27:42.938 [main] DEBUG io.netty.util.ResourceLeakDetectorFactory - Loaded default ResourceLeakDetector: io.netty.util.ResourceLeakDetector#71c7db30 19:27:42.962 [main] DEBUG io.netty.util.internal.PlatformDependent - org.jctools-core.MpscChunkedArrayQueue: available 19:27:43.721 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default protocols (JDK): [TLSv1.2, TLSv1.1, TLSv1] 19:27:43.721 [main] DEBUG io.netty.handler.ssl.JdkSslContext - Default cipher suites (JDK): [TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256, TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA, TLS_RSA_WITH_AES_128_GCM_SHA256, TLS_RSA_WITH_AES_128_CBC_SHA] 19:27:43.781 [main] DEBUG io.netty.channel.MultithreadEventLoopGroup - -Dio.netty.eventLoopThreads: 8 19:27:43.791 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.noKeySetOptimization: false 19:27:43.791 [main] DEBUG io.netty.channel.nio.NioEventLoop - -Dio.netty.selectorAutoRebuildThreshold: 512 19:27:43.837 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.initialSize: 1024 19:27:43.837 [main] DEBUG io.netty.util.internal.InternalThreadLocalMap - -Dio.netty.threadLocalMap.stringBuilder.maxSize: 4096 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numHeapArenas: 8 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.numDirectArenas: 8 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.pageSize: 8192 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxOrder: 11 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.chunkSize: 16777216 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.tinyCacheSize: 512 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.smallCacheSize: 256 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.normalCacheSize: 64 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.maxCachedBufferCapacity: 32768 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.cacheTrimInterval: 8192 19:27:43.847 [main] DEBUG io.netty.buffer.PooledByteBufAllocator - -Dio.netty.allocator.useCacheForAllThreads: true 19:27:43.855 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.allocator.type: pooled 19:27:43.855 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.threadLocalDirectBufferSize: 65536 19:27:43.855 [main] DEBUG io.netty.buffer.ByteBufUtil - -Dio.netty.maxThreadLocalCharBufferSize: 16384 19:27:44.015 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.processId: 13112 (auto-detected) 19:27:44.017 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv4Stack: false 19:27:44.017 [main] DEBUG io.netty.util.NetUtil - -Djava.net.preferIPv6Addresses: false 19:27:44.213 [main] DEBUG io.netty.util.NetUtil - Loopback interface: lo (Software Loopback Interface 1, 127.0.0.1) 19:27:44.214 [main] DEBUG io.netty.util.NetUtil - Failed to get SOMAXCONN from sysctl and file \proc\sys\net\core\somaxconn. Default: 200 19:27:44.426 [main] DEBUG io.netty.channel.DefaultChannelId - -Dio.netty.machineId: e8:2a:ea:ff:fe:2c:cf:9a (auto-detected) 19:27:44.475 [AsyncHttpClient-2-1] DEBUG io.netty.buffer.AbstractByteBuf - -Dio.netty.buffer.bytebuf.checkAccessible: true 19:27:44.475 [AsyncHttpClient-2-1] DEBUG io.netty.util.ResourceLeakDetectorFactory - Loaded default ResourceLeakDetector: io.netty.util.ResourceLeakDetector#58ef4cc 19:27:44.533 [AsyncHttpClient-2-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.maxCapacityPerThread: 32768 19:27:44.533 [AsyncHttpClient-2-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.maxSharedCapacityFactor: 2 19:27:44.533 [AsyncHttpClient-2-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.linkCapacity: 16 19:27:44.533 [AsyncHttpClient-2-1] DEBUG io.netty.util.Recycler - -Dio.netty.recycler.ratio: 8 19:27:44.620 [AsyncHttpClient-2-1] DEBUG org.asynchttpclient.netty.channel.NettyConnectListener - Using new Channel '[id: 0x04f0b68b, L:/10.23.78.87:65044 - R:www.google.com/172.217.17.4:443]' for 'GET' to '/' 19:27:44.640 [AsyncHttpClient-2-1] DEBUG io.netty.handler.ssl.SslHandler - [id: 0x04f0b68b, L:/10.23.78.87:65044 - R:www.google.com/172.217.17.4:443] HANDSHAKEN: TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 19:27:44.700 [AsyncHttpClient-2-1] DEBUG org.asynchttpclient.netty.handler.HttpHandler - Request DefaultFullHttpRequest(decodeResult: success, version: HTTP/1.1, content: EmptyByteBufBE) GET / HTTP/1.1 host: www.google.com accept: */* user-agent: AHC/2.1 Response DefaultHttpResponse(decodeResult: success, version: HTTP/1.1) HTTP/1.1 200 OK Date: Wed, 19 Jun 2019 17:27:44 GMT Expires: -1 Cache-Control: private, max-age=0 Content-Type: text/html; charset=ISO-8859-1 P3P: CP="This is not a P3P policy! See g.co/p3phelp for more info." Server: gws X-XSS-Protection: 0 X-Frame-Options: SAMEORIGIN Set-Cookie: 1P_JAR=2019-06-19-17; expires=Fri, 19-Jul-2019 17:27:44 GMT; path=/; domain=.google.com Set-Cookie: NID=186=f79HtEZwEwn8ivi3Y0dp6reZ6S6cA6wILrGF9RfPB1yxf5M8LDWVS4ahdsmHzVh3nAAmjXTPJSrMeQCNSlR7xYirMDJJ8Dz2PIBSrCPdjwF1UIRwK1VyfKTqQZO5ke-HyldO4-BIvU1QFJ5ZdzdruCti4TeqQVgCpXexuJ9mAwI; expires=Thu, 19-Dec-2019 17:27:44 GMT; path=/; domain=.google.com; HttpOnly Alt-Svc: quic=":443"; ma=2592000; v="46,44,43,39" Accept-Ranges: none Vary: Accept-Encoding Transfer-Encoding: chunked 19:27:44.729 [AsyncHttpClient-2-1] DEBUG org.asynchttpclient.netty.channel.ChannelManager - Adding key: https://www.google.com:443 for channel [id: 0x04f0b68b, L:/10.23.78.87:65044 - R:www.google.com/172.217.17.4:443] <!doctype html><html itemscope="" itemtype="http://schema.org/WebPage" lang="es"><head><meta content="Google.es permite acceder a la informaci�n mundial en castellano, catal�n, gallego, euskara e ingl�s." name="description"><meta content="noodp" name="robots"><meta content="text/html; charset=UTF-8" http-equiv="Content-Type"><meta content="/images/branding/googleg/1x/googleg_standard_color_128dp.png" itemprop="image"><title>Google</title><script nonce="9zpfk6cQAFKT2oKUnz+PKg==">(function(){window.google={kEI:'kHAKXfLOJteLjLsPnNyjwAk',kEXPI:'0,1353804,1958,1641,781,1225,590,141,223,1575,3152,56,152,170,206,1017,175,947,293,69,220,20,21,2331894,329485,1294,12383,4855,32692,2074,13173,867,12163,16520,365,3319,1262,4243,2442,260,5107,575,835,284,2,204,375,727,2431,1362,283,4040,4968,773,2252,4736,6,7,3111,2595,1929,1672,669,1050,1808,1397,81,7,491,620,29,1395,8910,4603,693,796,1220,38,920,754,119,1217,1364,1611,2736,3061,2,632,3239,44,4148,634,1161,1447,634,1137,1089,656,18,320,1118,447,415,42,101,86,2,1159,777,1,368,1319,703,758,96,36,2,354,30,399,992,509,598,10,168,8,109,187,831,1042,453,1063,78,48,459,94,11,14,10,1269,1831,381,25,177,323,5,1245,7,298,542,324,193,531,371,564,24,32,820,1661,340,619,818,109,151,1189,2,7,7,1348,148,553,606,778,355,373,1,324,8,159,25,366,229,943,239,520,21,339,698,18,268,472,121,257,306,194,29,192,24,72,112,228,115,5,798,319,9,31,1253,40,60,135,37,164,81,507,606,250,427,99,16,397,124,147,47,569,155,79,404,29,347,811,132,506,3,108,85,349,22,2,4,760,229,228,121,8,268,433,5922925,2963,8797377,4,1572,549,333,444,1,2,80,1,900,583,9,304,1,8,1,2,2132,1,1,1,1,1,414,1,748,141,59,726,3,7,563,1,2346,241,30,33,13,33,33,25,71',authuser:0,kscs:'c9c918f0_kHAKXfLOJteLjLsPnNyjwAk',kGL:'ES'};google.sn='webhp';google.kHL='es';})();(function(){google.lc=[];google.li=0;google.getEI=function(a){for(var b;a&&(!a.getAttribute||!(b=a.getAttribute("eid")));)a=a.parentNode;return b||google.kEI};google.getLEI=function(a){for(var b=null;a&&(!a.getAttribute||!(b=a.getAttribute("leid")));)a=a.parentNode;return b};google.https=function(){return"https:"==window.location.protocol};google.ml=function(){return null};google.time=function(){return(new Date).getTime()};google.log=function(a,b,e,c,g){if(a=google.logUrl(a,b,e,c,g)){b=new Image;var d=google.lc,f=google.li;d[f]=b;b.onerror=b.onload=b.onabort=function(){delete d[f]};google.vel&&google.vel.lu&&google.vel.lu(a);b.src=a;google.li=f+1}};google.logUrl=function(a,b,e,c,g){var d="",f=google.ls||"";e||-1!=b.search("&ei=")||(d="&ei="+google.getEI(c),-1==b.search("&lei=")&&(c=google.getLEI(c))&&(d+="&lei="+c));c="";!e&&google.cshid&&-1==b.search("&cshid=")&&"slh"!=a&&(c="&cshid="+google.cshid);a=e||"/"+(g||"gen_204")+"?atyp=i&ct="+a+"&cad="+b+d+f+"&zx="+google.time()+c;/^http:/i.test(a)&&google.https()&&(google.ml(Error("a"),!1,{src:a,glmm:1}),a="");return a};}).call(this);(function(){google.y={};google.x=function(a,b){if(a)var c=a.id;else{do c=Math.random();while(google.y[c])}google.y[c]=[a,b];return!1};google.lm=[];google.plm=function(a){google.lm.push.apply(google.lm,a)};google.lq=[];google.load=function(a,b,c){google.lq.push([[a],b,c])};google.loadAll=function(a,b){google.lq.push([a,b])};}).call(this);google.f={};var a=window.location,b=a.href.indexOf("#");if(0<=b){var c=a.href.substring(b+1);/(^|&)q=/.test(c)&&-1==c.indexOf("#")&&a.replace("/search?"+c.replace(/(^|&)fp=[^&]*/g,"")+"&cad=h")};</script><style>#gbar,#guser{font-size:13px;padding-top:1px !important;}#gbar{height:22px}#guser{padding-bottom:7px !important;text-align:right}.gbh,.gbd{border-top:1px solid #c9d7f1;font-size:1px}.gbh{height:0;position:absolute;top:24px;width:100%}#media all{.gb1{height:22px;margin-right:.5em;vertical-align:top}#gbar{float:left}}a.gb1,a.gb4{text-decoration:underline !important}a.gb1,a.gb4{color:#00c !important}.gbi .gb4{color:#dd8e27 !important}.gbf .gb4{color:#900 !important} </style><style>body,td,a,p,.h{font-family:arial,sans-serif}body{margin:0;overflow-y:scroll}#gog{padding:3px 8px 0}td{line-height:.8em}.gac_m td{line-height:17px}form{margin-bottom:20px}.h{color:#36c}.q{color:#00c}.ts td{padding:0}.ts{border-collapse:collapse}em{font-weight:bold;font-style:normal}.lst{height:25px;width:496px}.gsfi,.lst{font:18px arial,sans-serif}.gsfs{font:17px arial,sans-serif}.ds{display:inline-box;display:inline-block;margin:3px 0 4px;margin-left:4px}input{font-family:inherit}a.gb1,a.gb2,a.gb3,a.gb4{color:#11c !important}body{background:#fff;color:black}a{color:#11c;text-decoration:none}a:hover,a:active{text-decoration:underline}.fl a{color:#36c}a:visited{color:#551a8b}a.gb1,a.gb4{text-decoration:underline}a.gb3:hover{text-decoration:none}#ghead a.gb2:hover{color:#fff !important}.sblc{padding-top:5px}.sblc a{display:block;margin:2px 0;margin-left:13px;font-size:11px}.lsbb{background:#eee;border:solid 1px;border-color:#ccc #999 #999 #ccc;height:30px}.lsbb{display:block}.ftl,#fll a{display:inline-block;margin:0 12px}.lsb{background:url(/images/nav_logo229.png) 0 -261px repeat-x;border:none;color:#000;cursor:pointer;height:30px;margin:0;outline:0;font:15px arial,sans-serif;vertical-align:top}.lsb:active{background:#ccc}.lst:focus{outline:none}</style><script nonce="9zpfk6cQAFKT2oKUnz+PKg=="></script></head><body bgcolor="#fff"><script nonce="9zpfk6cQAFKT2oKUnz+PKg==">(function(){var src='/images/nav_logo229.png';var iesg=false;document.body.onload = function(){window.n && window.n();if (document.images){new Image().src=src;} if (!iesg){document.f&&document.f.q.focus();document.gbqf&&document.gbqf.q.focus();} } })();</script><div id="mngb"> <div id=gbar><nobr><b class=gb1>B�squeda</b> <a class=gb1 href="https://www.google.es/imghp?hl=es&tab=wi">Im�genes</a> <a class=gb1 href="https://maps.google.es/maps?hl=es&tab=wl">Maps</a> <a class=gb1 href="https://play.google.com/?hl=es&tab=w8">Play</a> <a class=gb1 href="https://www.youtube.com/?gl=ES&tab=w1">YouTube</a> <a class=gb1 href="https://news.google.es/nwshp?hl=es&tab=wn">Noticias</a> <a class=gb1 href="https://mail.google.com/mail/?tab=wm">Gmail</a> <a class=gb1 href="https://drive.google.com/?tab=wo">Drive</a> <a class=gb1 style="text-decoration:none" href="https://www.google.es/intl/es/about/products?tab=wh"><u>M�s</u> »</a></nobr></div><div id=guser width=100%><nobr><span id=gbn class=gbi></span><span id=gbf class=gbf></span><span id=gbe></span><div id="lga"><img alt="Google" height="92" src="/images/branding/googlelogo/1x/googlelogo_white_background_color_272x92dp.png" style="padding:28px 0 14px" width="272" id="hplogo" onload="window.lol&&lol()"><br><br></div><form action="/search" name="f"><table cellpadding="0" cellspacing="0"><tr valign="top"><td width="25%"> </td><td align="center" nowrap=""><input name="ie" value="ISO-8859-1" type="hidden"><input value="es" name="hl" type="hidden"><input name="source" type="hidden" value="hp"><input name="biw" type="hidden"><input name="bih" type="hidden"><div class="ds" style="height:32px;margin:4px 0"><input style="color:#000;margin:0;padding:5px 8px 0 6px;vertical-align:top" autocomplete="off" class="lst" value="" title="Buscar con Google" maxlength="2048" name="q" size="57"></div><br style="line-height:0"><span class="ds"><span class="lsbb"><input class="lsb" value="Buscar con Google" name="btnG" type="submit"></span></span><span class="ds"><span class="lsbb"><input class="lsb" value="Voy a tener suerte" name="btnI" onclick="if(this.form.q.value)this.checked=1; else top.location='/doodles/'" type="submit"></span></span></td><td class="fl sblc" align="left" nowrap="" width="25%"><a href="/advanced_search?hl=es&authuser=0">B�squeda avanzadaHerramientas del idioma</td></tr></table><input id="gbv" name="gbv" type="hidden" value="1"><script nonce="9zpfk6cQAFKT2oKUnz+PKg==">(function(){var a,b="1";if(document&&document.getElementById)if("undefined"!=typeof XMLHttpRequest)b="2";else if("undefined"!=typeof ActiveXObject){var c,d,e=["MSXML2.XMLHTTP.6.0","MSXML2.XMLHTTP.3.0","MSXML2.XMLHTTP","Microsoft.XMLHTTP"];for(c=0;d=e[c++];)try{new ActiveXObject(d),b="2"}catch(h){}}a=b;if("2"==a&&-1==location.search.indexOf("&gbv=2")){var f=google.gbvu,g=document.getElementById("gbv");g&&(g.value=a);f&&window.setTimeout(function(){location.href=f},0)};}).call(this);</script></form><div id="gac_scont"></div><div style="font-size:83%;min-height:3.5em"><br><div id="gws-output-pages-elements-homepage_additional_languages__als"><style>#gws-output-pages-elements-homepage_additional_languages__als{font-size:small;margin-bottom:24px}#SIvCob{display:inline-block;line-height:28px;}#SIvCob a{padding:0 3px;}.H6sW5{display:inline-block;margin:0 2px;white-space:nowrap}.z4hgWe{display:inline-block;margin:0 2px}</style><div id="SIvCob">Ofrecido por Google en: catal� galego euskara </div></div></div><span id="footer"><div style="font-size:10pt"><div style="margin:19px auto;text-align:center" id="fll">Programas de publicidadSoluciones EmpresarialesTodo acerca de GoogleGoogle.es</div></div><p style="color:#767676;font-size:8pt">© 2019 - Privacidad - Condiciones</p></span></center><script nonce="9zpfk6cQAFKT2oKUnz+PKg==">(function(){window.google.cdo={height:0,width:0};(function(){var a=window.innerWidth,b=window.innerHeight;if(!a||!b){var c=window.document,d="CSS1Compat"==c.compatMode?c.documentElement:c.body;a=d.clientWidth;b=d.clientHeight}a&&b&&(a!=google.cdo.width||b!=google.cdo.height)&&google.log("","","/client_204?&atyp=i&biw="+a+"&bih="+b+"&ei="+google.kEI);}).call(this);})();(function(){var u='/xjs/_/js/k\x3dxjs.hp.en.ubKvDBDxdWw.O/m\x3dsb_he,d/am\x3d4KAW/d\x3d1/rs\x3dACT90oEwGwcda780WoUEhug97Gz_Jjn42g';setTimeout(function(){var a=document.createElement("script");a.src=u;google.timers&&google.timers.load&&google.tick&&google.tick("load","xjsls");document.body.appendChild(a)},0);})();(function(){window.google.xjsu='/xjs/_/js/k\x3dxjs.hp.en.ubKvDBDxdWw.O/m\x3dsb_he,d/am\x3d4KAW/d\x3d1/rs\x3dACT90oEwGwcda780WoUEhug97Gz_Jjn42g';})();function _DumpException(e){throw e;} function _F_installCss(c){} (function(){google.spjs=false;google.snet=true;google.em=[];google.emw=false;})();google.sm=1;(function(){var pmc='{\x22Qnk92g\x22:{},\x22RWGcrA\x22:{},\x22U5B21g\x22:{},\x22YFCs/g\x22:{},\x22ZI/YVQ\x22:{},\x22d\x22:{},\x22mVopag\x22:{},\x22sb_he\x22:{\x22agen\x22:true,\x22cgen\x22:true,\x22client\x22:\x22heirloom-hp\x22,\x22dh\x22:true,\x22dhqt\x22:true,\x22ds\x22:\x22\x22,\x22ffql\x22:\x22es\x22,\x22fl\x22:true,\x22host\x22:\x22google.com\x22,\x22isbh\x22:28,\x22jsonp\x22:true,\x22lm\x22:true,\x22msgs\x22:{\x22cibl\x22:\x22Borrar b�squeda\x22,\x22dym\x22:\x22Quiz�s quisiste decir:\x22,\x22lcky\x22:\x22Voy a tener suerte\x22,\x22lml\x22:\x22M�s informaci�n\x22,\x22oskt\x22:\x22Herramientas de introducci�n de texto\x22,\x22psrc\x22:\x22Esta b�squeda se ha eliminado de tu \\u003Ca href\x3d\\\x22/history\\\x22\\u003Ehistorial web\\u003C/a\\u003E.\x22,\x22psrl\x22:\x22Eliminar\x22,\x22sbit\x22:\x22Buscar por imagen\x22,\x22srch\x22:\x22Buscar con Google\x22},\x22ovr\x22:{},\x22pq\x22:\x22\x22,\x22refpd\x22:true,\x22rfs\x22:[],\x22sbpl\x22:24,\x22sbpr\x22:24,\x22scd\x22:10,\x22sce\x22:5,\x22stok\x22:\x22jQgBJxeAAuUyjdH7C8biGO51e-o\x22,\x22uhde\x22:false}}';google.pmc=JSON.parse(pmc);})();</script> </body></html> There are 1 total connections, 0 are active and 1 are idle. There are 0 total connections, 0 are active and 0 are idle. 19:27:46.869 [AsyncHttpClient-2-1] DEBUG io.netty.buffer.PoolThreadCache - Freed 14 thread-local buffer(s) from thread: AsyncHttpClient-2-1 This is the simplest version of the code. I obviously started with the asynchronous version of it, with the same results. As you can see, after closing the httpclient, the stats show no connection neither alive nor idle. What am I doing wrong?