NullPointerException in storm when deserialize tuples - java

I've encountered the following NPE when storm tries to deserialize. I did not use OutputCollector concurrently in my code. The only object we are passing between bolts are a thrift object, and we have written a serializer for it. I've attached the code of serializer and please help to check whether there are any potential bugs there.
2016-03-04 17:17:43.583 b.s.util [ERROR] Async loop died!
java.lang.RuntimeException: java.lang.NullPointerException
at backtype.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:135) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:106) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:80) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.daemon.executor$fn__5694$fn__5707$fn__5758.invoke(executor.clj:819) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.util$async_loop$fn__545.invoke(util.clj:479) [storm-core-0.10.0.jar:0.10.0]
at clojure.lang.AFn.run(AFn.java:22) [clojure-1.6.0.jar:?]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_60]
Caused by: java.lang.NullPointerException
at com.esotericsoftware.kryo.io.Input.setBuffer(Input.java:57) ~[kryo-2.21.jar:?]
at backtype.storm.serialization.KryoTupleDeserializer.deserialize(KryoTupleDeserializer.java:47) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.daemon.executor$mk_task_receiver$fn__5615.invoke(executor.clj:433) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.disruptor$clojure_handler$reify__5189.onEvent(disruptor.clj:58) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:132) ~[storm-core-0.10.0.jar:0.10.0]
... 6 more
2016-03-04 17:17:43.584 b.s.d.executor [ERROR]
java.lang.RuntimeException: java.lang.NullPointerException
at backtype.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:135) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:106) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:80) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.daemon.executor$fn__5694$fn__5707$fn__5758.invoke(executor.clj:819) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.util$async_loop$fn__545.invoke(util.clj:479) [storm-core-0.10.0.jar:0.10.0]
at clojure.lang.AFn.run(AFn.java:22) [clojure-1.6.0.jar:?]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_60]
Caused by: java.lang.NullPointerException
at com.esotericsoftware.kryo.io.Input.setBuffer(Input.java:57) ~[kryo-2.21.jar:?]
at backtype.storm.serialization.KryoTupleDeserializer.deserialize(KryoTupleDeserializer.java:47) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.daemon.executor$mk_task_receiver$fn__5615.invoke(executor.clj:433) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.disruptor$clojure_handler$reify__5189.onEvent(disruptor.clj:58) ~[storm-core-0.10.0.jar:0.10.0]
at backtype.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:132) ~[storm-core-0.10.0.jar:0.10.0]
... 6 more
2016-03-04 17:17:43.648 b.s.util [ERROR] Halting process: ("Worker died")
java.lang.RuntimeException: ("Worker died")
at backtype.storm.util$exit_process_BANG_.doInvoke(util.clj:336) [storm-core-0.10.0.jar:0.10.0]
at clojure.lang.RestFn.invoke(RestFn.java:423) [clojure-1.6.0.jar:?]
at backtype.storm.daemon.worker$fn__7188$fn__7189.invoke(worker.clj:536) [storm-core-0.10.0.jar:0.10.0]
at backtype.storm.daemon.executor$mk_executor_data$fn__5523$fn__5524.invoke(executor.clj:261) [storm-core-0.10.0.jar:0.10.0]
at backtype.storm.util$async_loop$fn__545.invoke(util.clj:489) [storm-core-0.10.0.jar:0.10.0]
at clojure.lang.AFn.run(AFn.java:22) [clojure-1.6.0.jar:?]
at java.lang.Thread.run(Thread.java:745) [?:1.8.0_60]
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.Serializer;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import org.apache.thrift.TBase;
import org.apache.thrift.TDeserializer;
import org.apache.thrift.TException;
import org.apache.thrift.TSerializer;
import org.apache.thrift.protocol.TCompactProtocol;
import org.slf4j.Logger;
/**
* #author zhangyf
* #version 1.0 2016-02-19
*/
public class GZipThriftSerializer<T extends TBase> extends Serializer<T> {
private static final int EMPTY = 0;
// add DELTA to make length positive due to kryo internal optimization
private static final int DELTA = 1;
private static final Logger LOG = com.mediav.utils.Loggers.get();
private static final ThreadLocal<TSerializer> COMPACT_T_SERIALIZER = new ThreadLocal<TSerializer>() {
#Override
protected TSerializer initialValue() {
return new TSerializer(new TCompactProtocol.Factory());
}
};
private static final ThreadLocal<TDeserializer> COMPACT_T_DESERIALIZER = new ThreadLocal<TDeserializer>() {
#Override
protected TDeserializer initialValue() {
return new TDeserializer(new TCompactProtocol.Factory());
}
};
{
setAcceptsNull(true);
}
#Override
public void write(Kryo kryo, Output output, T t) {
if (t == null) {
output.writeInt(EMPTY + DELTA, true);
return;
}
try {
byte[] data = COMPACT_T_SERIALIZER.get().serialize(t);
byte[] compressed = GZipUtils.zip(data);
output.writeInt(compressed.length + DELTA, true);
output.writeBytes(compressed);
} catch (TException e) {
LOG.error("TException during serialization", e);
output.writeInt(EMPTY + DELTA, true);
}
}
#Override
public T read(Kryo kryo, Input input, Class<T> aClass) {
int length = input.readInt(true) - DELTA;
if (length == 0) {
return null;
}
try {
T t = aClass.newInstance();
COMPACT_T_DESERIALIZER.get().deserialize(t, GZipUtils.unzip(input.readBytes(length)));
return t;
} catch (InstantiationException e) {
throw new RuntimeException(e);
} catch (IllegalAccessException e) {
throw new RuntimeException(e);
} catch (TException e) {
LOG.error("TException during deserialization", e);
}
return null;
}
}

I'm answering a 3 year old question because I spent quite some time figuring out the same issue myself. We also encountered a NullPointerException when deserializing tuples in Storm, and we didn't do anything multi-threaded in our bolts, in the way Darpan27 is suggesting.
It turned out that we were using the same object in 2 separate bolts that were running concurrently in the same JVM, and one of the bolts modified the object, while the other bolt sent it to the OutputCollector. Cloning the object before modifying it did resolve our problem. Our final solution was to write a custom Kryo serializer that ignored the properties that were modified (as we didn't need them further downstream).

Related

RedirectingEurekaHttpClient Request execution error

I have a gateway application with customized loadbalancing rule, and here is the code following spring cloud official doc:
#RibbonClients(defaultConfiguration = CustomizedRibbonConfig.class)
public class RibbonClientConfiguration {
public static class BazServiceList extends ConfigurationBasedServerList {
public BazServiceList(IClientConfig config) {
super.initWithNiwsConfig(config);
}
}
}
#Configuration
class CustomizedRibbonConfig {
#Bean
public IRule ribbonRule() {
return new MetadataAwareRule();
}
#Bean
public ServerListUpdater ribbonServerListUpdater() {
return new EurekaNotificationServerListUpdater();
}
}
public class MetadataAwarePredicate extends AbstractDiscoveryEnabledPredicate {
/**
* {#inheritDoc}
*/
#Override
protected boolean apply(DiscoveryEnabledServer server) {
return true;
}
}
#Slf4j
public class MetadataAwareRule extends AbstractDiscoveryEnabledRule {
public static final ThreadLocal<String> CURRENT_LOAD_BALANCED_SERVICE_IP = new ThreadLocal<>();
/**
* Creates new instance of {#link MetadataAwareRule}.
*/
public MetadataAwareRule() {
this(new MetadataAwarePredicate());
}
/**
* Creates new instance of {#link MetadataAwareRule} with specific predicate.
*
* #param predicate the predicate, can't be {#code null}
* #throws IllegalArgumentException if predicate is {#code null}
*/
public MetadataAwareRule(AbstractDiscoveryEnabledPredicate predicate) {
super(predicate);
}
#Override
public Server choose(Object key) {
....my customized choose policy....
}
And Here is the thing, I have a need to refresh application by firing RefreshEvent but it will lead to quite strange problem which may due to Eureka or zuul client of version from parent:
<parent>
<groupId>org.springframework.cloud</groupId>
<artifactId>spring-cloud-starter-netflix</artifactId>
<version>2.2.5.RELEASE</version>
</parent>
For easy recurrent of such problem, the function was simplified as a simple request showing below:
#GetMapping("/test/event")
public CommonResult testRaiseRefreshEvent() {
ApplicationContextHolder.getApplicationContext().publishEvent(new RefreshEvent(this, null, "test to trigger the problem"));
return CommonResult.succeed();
}
Once request this api, application will take a refresh.
But sometimes, application will have this exception:
2022-10-19 11:29:32.947 [app:web-gateway,traceId:,spanId:,parentId:] [DiscoveryClient-CacheRefreshExecutor-0] ERROR | RedirectingEurekaHttpClient.java:83 | c.n.d.s.t.d.RedirectingEurekaHttpClient | Request execution error. endpoint=DefaultEndpoint{ serviceUrl='http://localhost:8000/eureka/}
javax.ws.rs.WebApplicationException: com.fasterxml.jackson.core.JsonParseException: processing aborted
at [Source: (GZIPInputStream); line: 1, column: 18]
at com.netflix.discovery.provider.DiscoveryJerseyProvider.readFrom(DiscoveryJerseyProvider.java:110)
at com.sun.jersey.api.client.ClientResponse.getEntity(ClientResponse.java:634)
at com.sun.jersey.api.client.ClientResponse.getEntity(ClientResponse.java:586)
at com.netflix.discovery.shared.transport.jersey.AbstractJerseyEurekaHttpClient.getApplicationsInternal(AbstractJerseyEurekaHttpClient.java:200)
at com.netflix.discovery.shared.transport.jersey.AbstractJerseyEurekaHttpClient.getApplications(AbstractJerseyEurekaHttpClient.java:167)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator$6.execute(EurekaHttpClientDecorator.java:137)
at com.netflix.discovery.shared.transport.decorator.MetricsCollectingEurekaHttpClient.execute(MetricsCollectingEurekaHttpClient.java:73)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator$6.execute(EurekaHttpClientDecorator.java:137)
at com.netflix.discovery.shared.transport.decorator.RedirectingEurekaHttpClient.executeOnNewServer(RedirectingEurekaHttpClient.java:118)
at com.netflix.discovery.shared.transport.decorator.RedirectingEurekaHttpClient.execute(RedirectingEurekaHttpClient.java:79)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator$6.execute(EurekaHttpClientDecorator.java:137)
at com.netflix.discovery.shared.transport.decorator.RetryableEurekaHttpClient.execute(RetryableEurekaHttpClient.java:120)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator$6.execute(EurekaHttpClientDecorator.java:137)
at com.netflix.discovery.shared.transport.decorator.SessionedEurekaHttpClient.execute(SessionedEurekaHttpClient.java:77)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.DiscoveryClient.getAndStoreFullRegistry(DiscoveryClient.java:1097)
at com.netflix.discovery.DiscoveryClient.fetchRegistry(DiscoveryClient.java:1011)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:440)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:282)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:278)
at org.springframework.cloud.netflix.eureka.CloudEurekaClient.<init>(CloudEurekaClient.java:67)
at org.springframework.cloud.netflix.eureka.EurekaClientAutoConfiguration$RefreshableEurekaClientConfiguration.eurekaClient(EurekaClientAutoConfiguration.java:316)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:154)
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:650)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:635)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1336)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1176)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:556)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:516)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$1(AbstractBeanFactory.java:363)
at org.springframework.cloud.context.scope.GenericScope$BeanLifecycleWrapper.getBean(GenericScope.java:389)
at org.springframework.cloud.context.scope.GenericScope.get(GenericScope.java:186)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:360)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:202)
at org.springframework.aop.target.SimpleBeanTargetSource.getTarget(SimpleBeanTargetSource.java:35)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:192)
at com.sun.proxy.$Proxy169.getApplications(Unknown Source)
at org.springframework.cloud.netflix.eureka.EurekaDiscoveryClient.getServices(EurekaDiscoveryClient.java:80)
at org.springframework.cloud.client.discovery.composite.CompositeDiscoveryClient.getServices(CompositeDiscoveryClient.java:67)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.locateRoutes(DiscoveryClientRouteLocator.java:121)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.locateRoutes(DiscoveryClientRouteLocator.java:44)
at org.springframework.cloud.netflix.zuul.filters.SimpleRouteLocator.doRefresh(SimpleRouteLocator.java:186)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.refresh(DiscoveryClientRouteLocator.java:171)
at org.springframework.cloud.netflix.zuul.filters.CompositeRouteLocator.refresh(CompositeRouteLocator.java:78)
at org.springframework.cloud.netflix.zuul.web.ZuulHandlerMapping.setDirty(ZuulHandlerMapping.java:79)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.reset(ZuulServerAutoConfiguration.java:315)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.resetIfNeeded(ZuulServerAutoConfiguration.java:310)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.onApplicationEvent(ZuulServerAutoConfiguration.java:304)
at org.springframework.context.event.SimpleApplicationEventMulticaster.doInvokeListener(SimpleApplicationEventMulticaster.java:172)
at org.springframework.context.event.SimpleApplicationEventMulticaster.invokeListener(SimpleApplicationEventMulticaster.java:165)
at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:139)
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:404)
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:361)
at org.springframework.cloud.netflix.eureka.CloudEurekaClient.onCacheRefreshed(CloudEurekaClient.java:123)
at com.netflix.discovery.DiscoveryClient.fetchRegistry(DiscoveryClient.java:1027)
at com.netflix.discovery.DiscoveryClient.refreshRegistry(DiscoveryClient.java:1533)
at com.netflix.discovery.DiscoveryClient$CacheRefreshThread.run(DiscoveryClient.java:1500)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266)
at java.util.concurrent.FutureTask.run(FutureTask.java)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
Caused by: com.fasterxml.jackson.core.JsonParseException: processing aborted
at [Source: (GZIPInputStream); line: 1, column: 18]
at com.netflix.discovery.converters.EurekaJacksonCodec$ApplicationsDeserializer.deserialize(EurekaJacksonCodec.java:805)
at com.netflix.discovery.converters.EurekaJacksonCodec$ApplicationsDeserializer.deserialize(EurekaJacksonCodec.java:791)
at com.fasterxml.jackson.databind.ObjectReader._unwrapAndDeserialize(ObjectReader.java:2196)
at com.fasterxml.jackson.databind.ObjectReader._bindAndClose(ObjectReader.java:2054)
at com.fasterxml.jackson.databind.ObjectReader.readValue(ObjectReader.java:1431)
at com.netflix.discovery.converters.EurekaJacksonCodec.readValue(EurekaJacksonCodec.java:213)
at com.netflix.discovery.converters.wrappers.CodecWrappers$LegacyJacksonJson.decode(CodecWrappers.java:314)
at com.netflix.discovery.provider.DiscoveryJerseyProvider.readFrom(DiscoveryJerseyProvider.java:103)
... 69 common frames omitted
and
2022-10-19 11:29:32.956 [app:web-gateway,traceId:,spanId:,parentId:] [DiscoveryClient-CacheRefreshExecutor-0] ERROR | DiscoveryClient.java:1018 | c.netflix.discovery.DiscoveryClient | DiscoveryClient_WEB-GATEWAY/192.168.56.1:web-gateway:8004:NEW_GATEWAY_DEFAULT_GROUP - was unable to refresh its cache! status = Cannot execute request on any known server
com.netflix.discovery.shared.transport.TransportException: Cannot execute request on any known server
at com.netflix.discovery.shared.transport.decorator.RetryableEurekaHttpClient.execute(RetryableEurekaHttpClient.java:112)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator$6.execute(EurekaHttpClientDecorator.java:137)
at com.netflix.discovery.shared.transport.decorator.SessionedEurekaHttpClient.execute(SessionedEurekaHttpClient.java:77)
at com.netflix.discovery.shared.transport.decorator.EurekaHttpClientDecorator.getApplications(EurekaHttpClientDecorator.java:134)
at com.netflix.discovery.DiscoveryClient.getAndStoreFullRegistry(DiscoveryClient.java:1097)
at com.netflix.discovery.DiscoveryClient.fetchRegistry(DiscoveryClient.java:1011)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:440)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:282)
at com.netflix.discovery.DiscoveryClient.<init>(DiscoveryClient.java:278)
at org.springframework.cloud.netflix.eureka.CloudEurekaClient.<init>(CloudEurekaClient.java:67)
at org.springframework.cloud.netflix.eureka.EurekaClientAutoConfiguration$RefreshableEurekaClientConfiguration.eurekaClient(EurekaClientAutoConfiguration.java:316)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.springframework.beans.factory.support.SimpleInstantiationStrategy.instantiate(SimpleInstantiationStrategy.java:154)
at org.springframework.beans.factory.support.ConstructorResolver.instantiate(ConstructorResolver.java:650)
at org.springframework.beans.factory.support.ConstructorResolver.instantiateUsingFactoryMethod(ConstructorResolver.java:635)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.instantiateUsingFactoryMethod(AbstractAutowireCapableBeanFactory.java:1336)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBeanInstance(AbstractAutowireCapableBeanFactory.java:1176)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.doCreateBean(AbstractAutowireCapableBeanFactory.java:556)
at org.springframework.beans.factory.support.AbstractAutowireCapableBeanFactory.createBean(AbstractAutowireCapableBeanFactory.java:516)
at org.springframework.beans.factory.support.AbstractBeanFactory.lambda$doGetBean$1(AbstractBeanFactory.java:363)
at org.springframework.cloud.context.scope.GenericScope$BeanLifecycleWrapper.getBean(GenericScope.java:389)
at org.springframework.cloud.context.scope.GenericScope.get(GenericScope.java:186)
at org.springframework.beans.factory.support.AbstractBeanFactory.doGetBean(AbstractBeanFactory.java:360)
at org.springframework.beans.factory.support.AbstractBeanFactory.getBean(AbstractBeanFactory.java:202)
at org.springframework.aop.target.SimpleBeanTargetSource.getTarget(SimpleBeanTargetSource.java:35)
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:192)
at com.sun.proxy.$Proxy169.getApplications(Unknown Source)
at org.springframework.cloud.netflix.eureka.EurekaDiscoveryClient.getServices(EurekaDiscoveryClient.java:80)
at org.springframework.cloud.client.discovery.composite.CompositeDiscoveryClient.getServices(CompositeDiscoveryClient.java:67)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.locateRoutes(DiscoveryClientRouteLocator.java:121)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.locateRoutes(DiscoveryClientRouteLocator.java:44)
at org.springframework.cloud.netflix.zuul.filters.SimpleRouteLocator.doRefresh(SimpleRouteLocator.java:186)
at org.springframework.cloud.netflix.zuul.filters.discovery.DiscoveryClientRouteLocator.refresh(DiscoveryClientRouteLocator.java:171)
at org.springframework.cloud.netflix.zuul.filters.CompositeRouteLocator.refresh(CompositeRouteLocator.java:78)
at org.springframework.cloud.netflix.zuul.web.ZuulHandlerMapping.setDirty(ZuulHandlerMapping.java:79)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.reset(ZuulServerAutoConfiguration.java:315)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.resetIfNeeded(ZuulServerAutoConfiguration.java:310)
at org.springframework.cloud.netflix.zuul.ZuulServerAutoConfiguration$ZuulRefreshListener.onApplicationEvent(ZuulServerAutoConfiguration.java:304)
at org.springframework.context.event.SimpleApplicationEventMulticaster.doInvokeListener(SimpleApplicationEventMulticaster.java:172)
at org.springframework.context.event.SimpleApplicationEventMulticaster.invokeListener(SimpleApplicationEventMulticaster.java:165)
at org.springframework.context.event.SimpleApplicationEventMulticaster.multicastEvent(SimpleApplicationEventMulticaster.java:139)
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:404)
at org.springframework.context.support.AbstractApplicationContext.publishEvent(AbstractApplicationContext.java:361)
at org.springframework.cloud.netflix.eureka.CloudEurekaClient.onCacheRefreshed(CloudEurekaClient.java:123)
at com.netflix.discovery.DiscoveryClient.fetchRegistry(DiscoveryClient.java:1027)
at com.netflix.discovery.DiscoveryClient.refreshRegistry(DiscoveryClient.java:1533)
at com.netflix.discovery.DiscoveryClient$CacheRefreshThread.run(DiscoveryClient.java:1500)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266)
at java.util.concurrent.FutureTask.run(FutureTask.java)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at java.lang.Thread.run(Thread.java:748)
And no matter how I refresh application again, gateway will never get chance to correct it's loadbalancer, no request will go through gateway due to exception like this:
java.lang.RuntimeException: com.netflix.client.ClientException: Load balancer does not have available server for client: web-message-center
at org.springframework.cloud.openfeign.ribbon.LoadBalancerFeignClient.execute(LoadBalancerFeignClient.java:90)
at org.springframework.cloud.sleuth.instrument.web.client.feign.TraceLoadBalancerFeignClient.execute(TraceLoadBalancerFeignClient.java:78)
at feign.SynchronousMethodHandler.executeAndDecode(SynchronousMethodHandler.java:119)
at feign.SynchronousMethodHandler.invoke(SynchronousMethodHandler.java:89)
at feign.ReflectiveFeign$FeignInvocationHandler.invoke(ReflectiveFeign.java:100)
at com.sun.proxy.$Proxy261.sendMessage(Unknown Source)
at com.wwstation.webgateway.components.GatewayUrlCountProcessor.sendAccessLogWithMq(GatewayUrlCountProcessor.java:221)
at com.wwstation.webgateway.components.GatewayUrlCountProcessor.run(GatewayUrlCountProcessor.java:82)
at org.springframework.cloud.sleuth.instrument.async.TraceRunnable.run(TraceRunnable.java:68)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run$$$capture(FutureTask.java:266)
at java.util.concurrent.FutureTask.run(FutureTask.java)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
at io.netty.util.concurrent.FastThreadLocalRunnable.run(FastThreadLocalRunnable.java:30)
at java.lang.Thread.run(Thread.java:748)
Caused by: com.netflix.client.ClientException: Load balancer does not have available server for client: web-message-center
at com.netflix.loadbalancer.LoadBalancerContext.getServerFromLoadBalancer(LoadBalancerContext.java:483)
at com.netflix.loadbalancer.reactive.LoadBalancerCommand$1.call(LoadBalancerCommand.java:184)
at com.netflix.loadbalancer.reactive.LoadBalancerCommand$1.call(LoadBalancerCommand.java:180)
at rx.Observable.unsafeSubscribe(Observable.java:10327)
at rx.internal.operators.OnSubscribeConcatMap.call(OnSubscribeConcatMap.java:94)
at rx.internal.operators.OnSubscribeConcatMap.call(OnSubscribeConcatMap.java:42)
at rx.internal.operators.OnSubscribeLift.call(OnSubscribeLift.java:48)
at rx.internal.operators.OnSubscribeLift.call(OnSubscribeLift.java:30)
at rx.internal.operators.OnSubscribeLift.call(OnSubscribeLift.java:48)
at rx.internal.operators.OnSubscribeLift.call(OnSubscribeLift.java:30)
at rx.Observable.subscribe(Observable.java:10423)
at rx.Observable.subscribe(Observable.java:10390)
at rx.observables.BlockingObservable.blockForSingle(BlockingObservable.java:443)
at rx.observables.BlockingObservable.single(BlockingObservable.java:340)
at com.netflix.client.AbstractLoadBalancerAwareClient.executeWithLoadBalancer(AbstractLoadBalancerAwareClient.java:112)
at org.springframework.cloud.openfeign.ribbon.LoadBalancerFeignClient.execute(LoadBalancerFeignClient.java:83)
... 15 common frames omitted
It can be seen from EurekaNotificationServerListUpdater that each fetch interval, there will be a thread refreshing server list. But once I fire a RefreshEvent the refreshing thread will be shut down by refreshing of environment (or else) and no heartbeat will be triggered when eureka's fetch interval reached again, so my application will have no latest server info from eureka.
Because of that, there is another problem which would take place when firing RefreshEvent:
Gateway can still redirect request to target service, but gateway will never get latest server list from eureka. Once the target service is down, gateway will crash my request instead of telling me the target service is not online (I have a exception handler to solve Load balancer does not have available server for client).
These 2 problems will not take place at the same time, which means when problem A occurs, problem B will never take place, and the same when problem B occurs. And they all occur once a RefreshEvent was fired.
I have no idea what's going on, can anyone help me with this or give me some tips where the cause might be?
After 2 days work out, the problem is solved and I have found the cause.
Raw use of com.netflix.niws.loadbalancer.EurekaNotificationServerListUpdater in Zuul application will always have this kind of situation:
At the very beginning, if a EurekaEventListener was fired by DiscoveryClient, listeners registered by EurekaNotificationServerListUpdater will receive this message and then try to update serverlist and it is normal.
But when a RefreshEvent is fired, instance in application will re-register again which will cause DiscoveryClient to become a new instance! Which means, new DiscoveryClient will no longer holding listeners.
And also, a default use of EurekaNotificationServerListUpdater will use a singleton instance of DiscoveryClient which will never be changed by RefreshEvent, and those listeners will be hold by that old DiscoveryClient which is not managed by Eureka again after RefreshEvent.
Cause by this situation, after a RefreshEvent, Eureka's fetch heartbeat will no longer trigger listeners refreshing function and my gateway will crash if I have some applications down.
What I did to fix this problem is to markdown instance of those listeners and then try to re-register them into new DiscoveryClient when refreshing job is done.
Here is my code:
#Configuration
#Slf4j
public class RibbonDiscoveryClientListenerManager implements SmartApplicationListener {
private static EurekaClient discoveryClient;
/**
* markdown alive listeners in current EurekaClient
*/
private static final CopyOnWriteArraySet<EurekaEventListener> EUREKA_EVENT_LISTENER_SET = new CopyOnWriteArraySet<>();
/**
* judge whether to try a re-register
*
* #param listener
*/
static void register(EurekaEventListener listener) {
if (discoveryClient != null) {
registerEurekaListener(listener);
log.debug("discoveryClient update succeed");
} else {
log.warn("discoveryClient was not found waiting for scheduling...");
}
}
public static void registerEurekaListener(EurekaEventListener listener) {
if (!EUREKA_EVENT_LISTENER_SET.contains(listener)) {
EUREKA_EVENT_LISTENER_SET.add(listener);
discoveryClient.registerEventListener(listener);
}
}
#Override
public boolean supportsEventType(Class<? extends ApplicationEvent> eventType) {
return InstanceRegisteredEvent.class.isAssignableFrom(eventType);
}
#Override
public void onApplicationEvent(ApplicationEvent event) {
//clear cache
discoveryClient = null;
EUREKA_EVENT_LISTENER_SET.clear();
//try to get CloudEurekaClient
for (EurekaClient bean : ApplicationContextHolder.getBeans(EurekaClient.class)) {
if (CloudEurekaClient.class.isAssignableFrom(bean.getClass())) {
discoveryClient = bean;
}
}
}
}
Here is the customized ServerListUpdate
#Slf4j
public class RibbonClientEurekaAutoCompensateServerListUpdater implements ServerListUpdater {
private static class LazyHolder {
private final static String CORE_THREAD = "EurekaNotificationServerListUpdater.ThreadPoolSize";
private final static String QUEUE_SIZE = "EurekaNotificationServerListUpdater.queueSize";
private final static LazyHolder SINGLETON = new LazyHolder();
private final DynamicIntProperty poolSizeProp = new DynamicIntProperty(CORE_THREAD, 2);
private final DynamicIntProperty queueSizeProp = new DynamicIntProperty(QUEUE_SIZE, 1000);
private final ThreadPoolExecutor defaultServerListUpdateExecutor;
private final Thread shutdownThread;
private LazyHolder() {
int corePoolSize = getCorePoolSize();
defaultServerListUpdateExecutor = new ThreadPoolExecutor(
corePoolSize,
corePoolSize * 5,
0,
TimeUnit.NANOSECONDS,
new ArrayBlockingQueue<Runnable>(queueSizeProp.get()),
new ThreadFactoryBuilder()
.setNameFormat("EurekaNotificationServerListUpdater-%d")
.setDaemon(true)
.build()
);
poolSizeProp.addCallback(new Runnable() {
#Override
public void run() {
int corePoolSize = getCorePoolSize();
defaultServerListUpdateExecutor.setCorePoolSize(corePoolSize);
defaultServerListUpdateExecutor.setMaximumPoolSize(corePoolSize * 5);
}
});
shutdownThread = new Thread(new Runnable() {
#Override
public void run() {
log.info("Shutting down the Executor for EurekaNotificationServerListUpdater");
try {
defaultServerListUpdateExecutor.shutdown();
Runtime.getRuntime().removeShutdownHook(shutdownThread);
} catch (Exception e) {
// this can happen in the middle of a real shutdown, and that's ok.
}
}
});
Runtime.getRuntime().addShutdownHook(shutdownThread);
}
private int getCorePoolSize() {
int propSize = poolSizeProp.get();
if (propSize > 0) {
return propSize;
}
return 2; // default
}
}
public static ExecutorService getDefaultRefreshExecutor() {
return LazyHolder.SINGLETON.defaultServerListUpdateExecutor;
}
/* visible for testing */ final AtomicBoolean updateQueued = new AtomicBoolean(false);
private final AtomicBoolean isActive = new AtomicBoolean(false);
private final AtomicLong lastUpdated = new AtomicLong(System.currentTimeMillis());
private final Provider<EurekaClient> eurekaClientProvider;
private final ExecutorService refreshExecutor;
private volatile EurekaEventListener updateListener;
private volatile EurekaClient eurekaClient;
public RibbonClientEurekaAutoCompensateServerListUpdater() {
this(new LegacyEurekaClientProvider());
}
public RibbonClientEurekaAutoCompensateServerListUpdater(final Provider<EurekaClient> eurekaClientProvider) {
this(eurekaClientProvider, getDefaultRefreshExecutor());
}
public RibbonClientEurekaAutoCompensateServerListUpdater(final Provider<EurekaClient> eurekaClientProvider, ExecutorService refreshExecutor) {
this.eurekaClientProvider = eurekaClientProvider;
this.refreshExecutor = refreshExecutor;
}
#Override
public synchronized void start(final UpdateAction updateAction) {
if (isActive.compareAndSet(false, true)) {
this.updateListener = new EurekaEventListener() {
#Override
public void onEvent(EurekaEvent event) {
if (event instanceof CacheRefreshedEvent) {
if (!updateQueued.compareAndSet(false, true)) { // if an update is already queued
log.info("an update action is already queued, returning as no-op");
return;
}
if (!refreshExecutor.isShutdown()) {
try {
refreshExecutor.submit(new Runnable() {
#Override
public void run() {
try {
updateAction.doUpdate();
lastUpdated.set(System.currentTimeMillis());
} catch (Exception e) {
log.warn("Failed to update serverList", e);
} finally {
updateQueued.set(false);
}
}
}); // fire and forget
} catch (Exception e) {
log.warn("Error submitting update task to executor, skipping one round of updates", e);
updateQueued.set(false); // if submit fails, need to reset updateQueued to false
}
} else {
log.debug("stopping EurekaNotificationServerListUpdater, as refreshExecutor has been shut down");
stop();
}
}
}
};
if (eurekaClient == null) {
eurekaClient = eurekaClientProvider.get();
}
if (eurekaClient != null) {
RibbonDiscoveryClientListenerManager.register(updateListener);
} else {
log.error("Failed to register an updateListener to eureka client, eureka client is null");
throw new IllegalStateException("Failed to start the updater, unable to register the update listener due to eureka client being null.");
}
//start a shcedulepool to check new DiscoveryClient's listeners
new ScheduledThreadPoolExecutor(1,
new ThreadFactoryBuilder()
.setNameFormat("refreshListenerPool-%d")
.build())
.scheduleWithFixedDelay(() -> {
//schedule to invoke register defined in RibbonDiscoveryClientListenerManager
RibbonDiscoveryClientListenerManager.register(updateListener);
}, 10, 10, TimeUnit.SECONDS);
} else {
log.info("Update listener already registered, no-op");
}
}
#Override
public synchronized void stop() {
if (isActive.compareAndSet(true, false)) {
if (eurekaClient != null) {
eurekaClient.unregisterEventListener(updateListener);
}
} else {
log.info("Not currently active, no-op");
}
}
#Override
public String getLastUpdate() {
return new Date(lastUpdated.get()).toString();
}
#Override
public long getDurationSinceLastUpdateMs() {
return System.currentTimeMillis() - lastUpdated.get();
}
#Override
public int getNumberMissedCycles() {
return 0;
}
#Override
public int getCoreThreads() {
if (isActive.get()) {
if (refreshExecutor != null && refreshExecutor instanceof ThreadPoolExecutor) {
return ((ThreadPoolExecutor) refreshExecutor).getCorePoolSize();
}
}
return 0;
}
}
Config Class:
#RibbonClients(defaultConfiguration = CustomizedRibbonConfig.class)
public class RibbonClientConfiguration {
public static class BazServiceList extends ConfigurationBasedServerList {
public BazServiceList(IClientConfig config) {
super.initWithNiwsConfig(config);
}
}
}
#Configuration
class CustomizedRibbonConfig {
static final AtomicBoolean justRefreshed = new AtomicBoolean(false);
#Bean
public IRule ribbonRule() {
return new MetadataAwareRule();
}
#Bean
public ServerListUpdater ribbonServerListUpdater() {
return new RibbonClientEurekaAutoCompensateServerListUpdater();
}
}

Error "Exception in thread "main" java.lang.ExceptionInInitializerError" while working with log4j

Error is :
Exception in thread "main" java.lang.ExceptionInInitializerError
at com.agile.pc.cmserver.base.CMLogger.setLogClass(CMLogger.java:39)
at com.agile.util.log.CMLogFactory.getLogger(CMLogFactory.java:77)
at com.agile.util.exception.AppException.<clinit>(AppException.java:28)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Unknown Source)
at com.sun.proxy.$Proxy23.<clinit>(Unknown Source)
at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
at sun.reflect.NativeConstructorAccessorImpl.newInstance(Unknown Source)
at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(Unknown Source)
at java.lang.reflect.Constructor.newInstance(Unknown Source)
at java.lang.reflect.Proxy.newProxyInstance(Unknown Source)
at com.agile.api.pc.EJBRemoteProxy.createRemoteProxy(EJBRemoteProxy.java:60)
at com.agile.api.pc.EJBLookup.getRemoteInterface(EJBLookup.java:1012)
at com.agile.api.pc.EJBLookup.getRemoteInterface(EJBLookup.java:959)
at com.agile.api.pc.EJBLookup.getChangeSession(EJBLookup.java:309)
at com.agile.api.pc.change.Change.getBean(Change.java:106)
at com.agile.api.pc.RouteObject$GetStatusAction.doSdkAction(RouteObject.java:2926)
at com.agile.api.common.SDKAction.run(SDKAction.java:23)
at weblogic.security.acl.internal.AuthenticatedSubject.doAs(AuthenticatedSubject.java:368)
at weblogic.security.service.SecurityManager.runAs(SecurityManager.java:163)
at weblogic.security.Security.runAs(Security.java:61)
at com.agile.api.common.WebLogicAuthenticator.doAs(WebLogicAuthenticator.java:111)
at com.agile.api.common.Security.doAs(Security.java:54)
at com.agile.api.common.Security.doAs(Security.java:109)
at com.agile.api.pc.RouteObject.getStatus(RouteObject.java:1206)
at com.gehc.extensions.px.CreateChildSCN.doAction(CreateChildSCN.java:39)
at com.gehc.extensions.px.CreateChildSCN.main(CreateChildSCN.java:124)
Caused by: java.lang.NullPointerException
at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142)
at org.apache.logging.log4j.util.ProviderUtil.loadProvider(ProviderUtil.java:80)
at org.apache.logging.log4j.util.ProviderUtil.<init>(ProviderUtil.java:66)
at org.apache.logging.log4j.util.ProviderUtil.lazyInit(ProviderUtil.java:124)
at org.apache.logging.log4j.util.ProviderUtil.hasProviders(ProviderUtil.java:108)
at org.apache.logging.log4j.LogManager.<clinit>(LogManager.java:89)
... 27 more
Javafile:
import java.util.ResourceBundle;
import org.apache.log4j.Logger;
import com.agile.api.APIException;
import com.agile.api.IAgileSession;
import com.agile.api.IChange;
import com.agile.api.IDataObject;
import com.agile.api.INode;
import com.agile.px.ActionResult;
import com.agile.px.ICustomAction;
import com.gehc.common.core.CreateChildSCN_BO;
import com.gehc.common.pxconstants.GEHC_SCNConstants;
import com.gehc.common.pxutil.SDKUtil;
import com.gehc.common.pxutil.Util;
public class CreateChildSCN implements ICustomAction {
private static ResourceBundle objResourceBundle = ResourceBundle.getBundle("GEHCCreateChildSCN");
private static Logger objLogger = Logger.getLogger(CreateChildSCN.class);
public ActionResult doAction(IAgileSession aSession, INode actionNode,
IDataObject currentObject){
Util.initAppLogger(CreateChildSCN.class, Util.getLogFileName());
StringBuffer pxMessage = new StringBuffer();
try
{
IChange objChange = (IChange)currentObject;
String strChangeStatus = objChange.getStatus().toString();
IAgileSession objAgileSession = null;
String strUser = aSession.getCurrentUser().toString();
objLogger.info("Session with Logged on User..::"+aSession.getCurrentUser().toString());
objAgileSession = SDKUtil.getAgileSession(objResourceBundle
.getString("AGILE_USER"), objResourceBundle
.getString("AGILE_PASSWORD"), objResourceBundle
.getString("AGILE_URL"));
if(!(strUser.equals(objResourceBundle.getString("CURRENT_USER")))){
if(strChangeStatus.equals(objResourceBundle.getString("ECO_WORKFLOW_STATUS"))
|| strChangeStatus.equals(objResourceBundle.getString(("ECR_WORKFLOW_STATUS")))){
String suppliers = objChange.getValue(GEHC_SCNConstants.SCN_SUPPLIER_NAME).toString();
String supplierGroup = objChange.getValue(GEHC_SCNConstants.SCN_SUPPLIER_USER_GROUPS).toString();
//Checking for the suppliers presence
if("".equals(suppliers) && "".equals(supplierGroup)){
CreateChildSCN_BO objChildSCNBO = new CreateChildSCN_BO();
pxMessage.append(objChildSCNBO.createSCNs(objAgileSession, objChange));
}else{
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
System.out.println(objResourceBundle.getString("MESSAGE_ERROR_SUPPLIER_NAME_VALUE"));
}
}else{
//System.out.println(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
} objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_WORKFLOW_CRITERIA"));
}else{
//System.out.println(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
objLogger.info(objResourceBundle.getString("MESSAGE_ERROR_INVALID_USER_LOGIN"));
}
}catch (Exception apiEx){
apiEx.printStackTrace();
System.out.println(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO ") + apiEx);
pxMessage.append(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO"));
objLogger.error(objResourceBundle.getString("MESSAGE_ERROR_UNABLE_TO_DO"));
}
return new ActionResult(ActionResult.STRING, pxMessage.toString());
}
/**
* For Stand alone Only
* Invokes the doAction method
*/
public static void main(String[] args) {
CreateChildSCN objSCRValidation = null;
String strNumber = null;
IAgileSession objAgileSession = null;
IChange objChange = null;
ResourceBundle objResources = ResourceBundle.getBundle("GEHCCreateChildSCN");
Logger objLogger = Logger.getLogger(CreateChildSCN.class);
try {
objSCRValidation = new CreateChildSCN();
Util.initAppLogger(CreateChildSCN.class, Util.getLogFileName());
strNumber = "SCN-0043018";
// Establish session
/*objAgileSession = SDKUtil.getAgileSession(objResources
.getString("AGILE_USER"), objResources
.getString("AGILE_PASSWORD"), objResources
.getString("AGILE_URL"));*/
objAgileSession = SDKUtil.getAgileSession("xxx","xxxx","xxxxxxxxx");
System.out.println("created session");
// Load the objChange
//System.out.println(" Object:: " + objChange.getName());
objChange = (IChange) objAgileSession
.getObject(IChange.OBJECT_TYPE, strNumber);
objSCRValidation
.doAction(objAgileSession, null, objChange);
} catch (APIException e) {
System.out.println("Error log from main thread ::: " + e);
objLogger.error(objResources.getString("SESSION_FAILED") + Util.exception2String(e));
}
}
}
And also it contains properties files where we defined logger details like path and file name of log file and logger initialization like log4j.category.com.xxx.common.util.SDKUtil = debug, XLogger
Here we are using Log4.jar api initialized in classpath.
Any help here is highly appreciated.
Thanks,
Himachandra.
if you look at the code for at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142), it looks like a bug in Log4J:
private static boolean validVersion(final String version) {
for (final String v : COMPATIBLE_API_VERSIONS) {
if (version.startsWith(v)) {
return true;
}
}
return false;
}
In that library, if (version.startsWith(v)) should be if (v.startsWith(version)) since version is nullable but v is never null.
Try using a newer version of log4j that does not have this bug.
I did not see this issue in version:
2.11.2
I saw this issue in versions:
2.6.2 (java.lang.NullPointerException at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142))
2.8.2 (java.lang.NullPointerException at org.apache.logging.log4j.util.ProviderUtil.validVersion(ProviderUtil.java:142))
2.10.0 (java.lang.NoClassDefFoundError: Could not initialize class org.apache.logging.log4j.util.PropertiesUtil)
Changing the version was a quick fix for me. However, it does not fix the underlying root cause. For me, I think it was some strange interaction between junit-log4j-jmockit that allowed the version to be null. If I ran tests in a different order, I had no issue.

Storm Kafka-Spout not work properly

General: I'm a student who want to run some performance tests (WordCount) on Storm / Kafka / Flink / MS Azure SA / Spark. I want to use the Kafka Broker as an input source.
I used the WordCount Example from the Storm-Starter project and added Kafka as a spout:
public class WordCountKafkaTopology {
public static class SplitSentence extends ShellBolt implements IRichBolt {
public SplitSentence() {
super("python", "splitsentence.py");
}
#Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word"));
}
#Override
public Map<String, Object> getComponentConfiguration() {
return null;
}
}
public static class WordCount extends BaseBasicBolt {
Map<String, Integer> counts = new HashMap<String, Integer>();
#Override
public void execute(Tuple tuple, BasicOutputCollector collector) {
String word = tuple.getString(0);
Integer count = counts.get(word);
if (count == null)
count = 0;
count++;
counts.put(word, count);
collector.emit(new Values(word, count));
}
#Override
public void declareOutputFields(OutputFieldsDeclarer declarer) {
declarer.declare(new Fields("word", "count"));
}
}
public static void main(String[] args) {
String zkIp = "localhost";
String topicName = "perfTest";
List<String> nimbus_seeds = new ArrayList<String>();
nimbus_seeds.add("localhost");
String zookeeperHost = zkIp +":2181";
ZkHosts zkHosts = new ZkHosts(zookeeperHost);
SpoutConfig kafkaConfig = new SpoutConfig(zkHosts, topicName, "/" + topicName, topicName);
kafkaConfig.scheme = new SchemeAsMultiScheme(new StringScheme());
KafkaSpout kafkaSpout = new KafkaSpout(kafkaConfig);
TopologyBuilder builder = new TopologyBuilder();
builder.setSpout("kafkaPerfTestSpout", kafkaSpout, 8);
builder.setBolt("split", new SplitSentence(), 8).shuffleGrouping("kafkaPerfTestSpout");
builder.setBolt("count", new WordCount(), 12).fieldsGrouping("split", new Fields("word"));
Config config = new Config();
config.setMaxTaskParallelism(5);
config.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 2);
config.put(Config.NIMBUS_SEEDS, nimbus_seeds);
config.put(Config.NIMBUS_THRIFT_PORT, 6627);
config.put(Config.STORM_ZOOKEEPER_PORT, 2181);
config.put(Config.STORM_ZOOKEEPER_SERVERS, Arrays.asList(zkIp));
try {
StormSubmitter.submitTopology("my-kafka-topology", config, builder.createTopology());
} catch (Exception e) {
throw new IllegalStateException("Couldn't initialize the topology", e);
}
}
}
By running the topolgy I get serveral error messages. The spout says:
java.lang.ExceptionInInitializerError at kafka.metrics.KafkaMetricsGroup$class.newTimer(KafkaMetricsGroup.scala:89) at kafka.consumer.FetchRequestAndResponseMetrics.newTimer(FetchRequestAndResponseStats.scala:26) at kafka.consumer.FetchRequestAndResponseMetrics.(FetchRequestAndResponseStats.scala:35) at kafka.consumer.FetchRequestAndResponseStats.(FetchRequestAndResponseStats.scala:47) at kafka.consumer.FetchRequestAndResponseStatsRegistry$$anonfun$2.apply(FetchRequestAndResponseStats.scala:60) at kafka.consumer.FetchRequestAndResponseStatsRegistry$$anonfun$2.apply(FetchRequestAndResponseStats.scala:60) at kafka.utils.Pool.getAndMaybePut(Pool.scala:59) at kafka.consumer.FetchRequestAndResponseStatsRegistry$.getFetchRequestAndResponseStats(FetchRequestAndResponseStats.scala:64) at kafka.consumer.SimpleConsumer.(SimpleConsumer.scala:44) at kafka.javaapi.consumer.SimpleConsumer.(SimpleConsumer.scala:34) at org.apache.storm.kafka.DynamicPartitionConnections.register(DynamicPartitionConnections.java:60) at org.apache.storm.kafka.PartitionManager.(PartitionManager.java:74) at org.apache.storm.kafka.ZkCoordinator.refresh(ZkCoordinator.java:98) at org.apache.storm.kafka.ZkCoordinator.getMyManagedPartitions(ZkCoordinator.java:69) at org.apache.storm.kafka.KafkaSpout.nextTuple(KafkaSpout.java:129) at org.apache.storm.daemon.executor$fn__7990$fn__8005$fn__8036.invoke(executor.clj:648) at org.apache.storm.util$async_loop$fn__624.invoke(util.clj:484) at clojure.lang.AFn.run(AFn.java:22) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.IllegalStateException: Shutdown in progress at java.lang.ApplicationShutdownHooks.add(ApplicationShutdownHooks.java:66) at java.lang.Runtime.addShutdownHook(Runtime.java:211) at com.yammer.metrics.Metrics.(Metrics.java:21) ... 19 more
At the split bolt:
java.lang.RuntimeException: java.lang.RuntimeException: java.lang.RuntimeException: pid:3973, name:split exitCode:0, errorString: at org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:464) at org.apache.storm.utils.DisruptorQueue.consumeBatchWhenAvailable(DisruptorQueue.java:430) at org.apache.storm.disruptor$consume_batch_when_available.invoke(disruptor.clj:73) at org.apache.storm.daemon.executor$fn__8058$fn__8071$fn__8124.invoke(executor.clj:850) at org.apache.storm.util$async_loop$fn__624.invoke(util.clj:484) at clojure.lang.AFn.run(AFn.java:22) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.RuntimeException: java.lang.RuntimeException: pid:3973, name:split exitCode:0, errorString: at org.apache.storm.task.ShellBolt.execute(ShellBolt.java:150) at org.apache.storm.daemon.executor$fn__8058$tuple_action_fn__8060.invoke(executor.clj:731) at org.apache.storm.daemon.executor$mk_task_receiver$fn__7979.invoke(executor.clj:464) at org.apache.storm.disruptor$clojure_handler$reify__7492.onEvent(disruptor.clj:40) at org.apache.storm.utils.DisruptorQueue.consumeBatchToCursor(DisruptorQueue.java:451) ... 6 more Caused by: java.lang.RuntimeException: pid:3973, name:split exitCode:0, errorString: at org.apache.storm.task.ShellBolt.die(ShellBolt.java:295) at org.apache.storm.task.ShellBolt.access$400(ShellBolt.java:70) at org.apache.storm.task.ShellBolt$BoltWriterRunnable.run(ShellBolt.java:398) ... 1 more Caused by: java.io.IOException: Broken pipe at java.io.FileOutputStream.writeBytes(Native Method) at java.io.FileOutputStream.write(FileOutputStream.java:326) at java.io.BufferedOutputStream.flushBuffer(BufferedOutputStream.java:82) at java.io.BufferedOutputStream.flush(BufferedOutputStream.java:140) at sun.nio.cs.StreamEncoder.implFlush(StreamEncoder.java:297) at sun.nio.cs.StreamEncoder.flush(StreamEncoder.java:141) at java.io.OutputStreamWriter.flush(OutputStreamWriter.java:229) at java.io.BufferedWriter.flush(BufferedWriter.java:254) at org.apache.storm.multilang.JsonSerializer.writeString(JsonSerializer.java:99) at org.apache.storm.multilang.JsonSerializer.writeMessage(JsonSerializer.java:93) at org.apache.storm.multilang.JsonSerializer.writeBoltMsg(JsonSerializer.java:78) at org.apache.storm.utils.ShellProcess.writeBoltMsg(ShellProcess.java:127) at org.apache.storm.task.ShellBolt$BoltWriterRunnable.run(ShellBolt.java:387) ... 1 more
I use the kafka-console-producer to generate some messages. I hope someone can help me. I'am a rookie in programming storm...
Deleting "config.put(Config.TOPOLOGY_TICK_TUPLE_FREQ_SECS, 2);" did the job!

Catch user exception in remote service at caller level

I am running multiple services in an Ignite cluster which depend on each other.
I'd like to catch (user defined) exceptions at caller level when I call a remote service function. See example based on the Service example in the docs for 1.7.
MyUserException.java
package com.example.testing;
public class MyUserException extends Throwable {}
MyCounterService.java
package com.example.testing;
public interface MyCounterService {
int increment() throws MyUserException;
}
MyCounterServiceImpl.java (Error condition is ignite.cluster().forYoungest())
package com.example.testing;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteServices;
import org.apache.ignite.Ignition;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.services.Service;
import org.apache.ignite.services.ServiceContext;
public class MyCounterServiceImpl implements MyCounterService, Service {
#IgniteInstanceResource
private Ignite ignite;
private int value = 0;
public int increment() throws MyUserException {
if ((value % 2) == 0) {
throw new MyUserException();
} else {
value++;
}
return value;
}
public static void main(String [] args) {
Ignite ignite = Ignition.start();
IgniteServices svcs = ignite.services(ignite.cluster().forYoungest());
svcs.deployNodeSingleton("MyCounterService", new MyCounterServiceImpl());
}
#Override
public void cancel(ServiceContext ctx) {
System.out.println("Service cancelled");
}
#Override
public void init(ServiceContext ctx) throws Exception {
System.out.println("Service initialized");
}
#Override
public void execute(ServiceContext ctx) throws Exception {
System.out.println("Service running");
}
}
MyCallerService.java
package com.example.testing;
import org.apache.ignite.Ignite;
import org.apache.ignite.IgniteException;
import org.apache.ignite.Ignition;
import org.apache.ignite.resources.IgniteInstanceResource;
import org.apache.ignite.services.Service;
import org.apache.ignite.services.ServiceContext;
public class MyCallerService implements Service {
#IgniteInstanceResource
private Ignite ignite;
private Boolean stopped;
public void run() {
stopped = false;
MyCounterService service = ignite.services().serviceProxy("MyCounterService", MyCounterService.class, false);
while (!stopped)
{
try {
Thread.sleep(500);
service.increment();
} catch (MyUserException e) {
System.out.println("Got exception");
//e.printStackTrace();
} catch (InterruptedException e) {
//e.printStackTrace();
}
catch (IgniteException e) {
System.out.println("Got critial exception");
// would print the actual user exception
//e.getCause().getCause().getCause().printStackTrace();
break;
}
}
}
public static void main(String [] args) {
Ignite ignite = Ignition.start();
ignite.services(ignite.cluster().forYoungest()).deployNodeSingleton("MyCallerService", new MyCallerService());
}
#Override
public void cancel(ServiceContext ctx) {
stopped = true;
}
#Override
public void init(ServiceContext ctx) throws Exception {
}
#Override
public void execute(ServiceContext ctx) throws Exception {
run();
}
}
The exception is not being catched at the caller level. Instead these exceptions show up in the console. How do I catch and handle the exceptions properly when a service function is called?
Output of MyCounterServiceImpl
[18:23:23] Ignite node started OK (id=c82df19c)
[18:23:23] Topology snapshot [ver=1, servers=1, clients=0, CPUs=4, heap=3.5GB]
Service initialized
Service running
[18:23:27] Topology snapshot [ver=2, servers=2, clients=0, CPUs=4, heap=7.0GB]
Nov 17, 2016 6:23:28 PM org.apache.ignite.logger.java.JavaLogger error
SCHWERWIEGEND: Failed to execute job [jobId=82580537851-3c0a354f-69b5-496c-af10-ee789a5387c3, ses=GridJobSessionImpl [ses=GridTaskSessionImpl [taskName=o.a.i.i.processors.service.GridServiceProxy$ServiceProxyCallable, dep=LocalDeployment [super=GridDeployment [ts=1479403401422, depMode=SHARED, clsLdr=sun.misc.Launcher$AppClassLoader#1d44bcfa, clsLdrId=4fe60537851-c82df19c-cdff-43ef-b7b6-e8485231629a, userVer=0, loc=true, sampleClsName=java.lang.String, pendingUndeploy=false, undeployed=false, usage=0]], taskClsName=o.a.i.i.processors.service.GridServiceProxy$ServiceProxyCallable, sesId=72580537851-3c0a354f-69b5-496c-af10-ee789a5387c3, startTime=1479403408961, endTime=9223372036854775807, taskNodeId=3c0a354f-69b5-496c-af10-ee789a5387c3, clsLdr=sun.misc.Launcher$AppClassLoader#1d44bcfa, closed=false, cpSpi=null, failSpi=null, loadSpi=null, usage=1, fullSup=false, subjId=3c0a354f-69b5-496c-af10-ee789a5387c3, mapFut=IgniteFuture [orig=GridFutureAdapter [resFlag=0, res=null, startTime=1479403408960, endTime=0, ignoreInterrupts=false, state=INIT]]], jobId=82580537851-3c0a354f-69b5-496c-af10-ee789a5387c3]]
class org.apache.ignite.IgniteException: null
at org.apache.ignite.internal.processors.closure.GridClosureProcessor$C2V2.execute(GridClosureProcessor.java:2009)
at org.apache.ignite.internal.processors.job.GridJobWorker$2.call(GridJobWorker.java:509)
at org.apache.ignite.internal.util.IgniteUtils.wrapThreadLoader(IgniteUtils.java:6521)
at org.apache.ignite.internal.processors.job.GridJobWorker.execute0(GridJobWorker.java:503)
at org.apache.ignite.internal.processors.job.GridJobWorker.body(GridJobWorker.java:456)
at org.apache.ignite.internal.util.worker.GridWorker.run(GridWorker.java:110)
at org.apache.ignite.internal.processors.job.GridJobProcessor.processJobExecuteRequest(GridJobProcessor.java:1161)
at org.apache.ignite.internal.processors.job.GridJobProcessor$JobExecutionListener.onMessage(GridJobProcessor.java:1766)
at org.apache.ignite.internal.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1238)
at org.apache.ignite.internal.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:866)
at org.apache.ignite.internal.managers.communication.GridIoManager.access$1700(GridIoManager.java:106)
at org.apache.ignite.internal.managers.communication.GridIoManager$5.run(GridIoManager.java:829)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.ignite.internal.processors.service.GridServiceProxy$ServiceProxyCallable.call(GridServiceProxy.java:392)
at org.apache.ignite.internal.processors.closure.GridClosureProcessor$C2V2.execute(GridClosureProcessor.java:2006)
... 14 more
Caused by: com.example.testing.MyUserException
at com.example.testing.MyCounterServiceImpl.increment(MyCounterServiceImpl.java:19)
... 20 more
Output of MyCallerService
[18:23:28] Ignite node started OK (id=3c0a354f)
[18:23:28] Topology snapshot [ver=2, servers=2, clients=0, CPUs=4, heap=7.0GB]
Nov 17, 2016 6:23:28 PM org.apache.ignite.logger.java.JavaLogger error
SCHWERWIEGEND: Failed to obtain remote job result policy for result from ComputeTask.result(..) method (will fail the whole task): GridJobResultImpl [job=C2V2 [c=ServiceProxyCallable [mtdName=increment, svcName=MyCounterService, ignite=null]], sib=GridJobSiblingImpl [sesId=72580537851-3c0a354f-69b5-496c-af10-ee789a5387c3, jobId=82580537851-3c0a354f-69b5-496c-af10-ee789a5387c3, nodeId=c82df19c-cdff-43ef-b7b6-e8485231629a, isJobDone=false], jobCtx=GridJobContextImpl [jobId=82580537851-3c0a354f-69b5-496c-af10-ee789a5387c3, timeoutObj=null, attrs={}], node=TcpDiscoveryNode [id=c82df19c-cdff-43ef-b7b6-e8485231629a, addrs=[0:0:0:0:0:0:0:1%lo, 127.0.0.1, 172.18.22.52], sockAddrs=[/0:0:0:0:0:0:0:1%lo:47500, /127.0.0.1:47500, /172.18.22.52:47500], discPort=47500, order=1, intOrder=1, lastExchangeTime=1479403407847, loc=false, ver=1.7.0#20160801-sha1:383273e3, isClient=false], ex=class o.a.i.IgniteException: null, hasRes=true, isCancelled=false, isOccupied=true]
class org.apache.ignite.IgniteException: Remote job threw user exception (override or implement ComputeTask.result(..) method if you would like to have automatic failover for this exception).
at org.apache.ignite.compute.ComputeTaskAdapter.result(ComputeTaskAdapter.java:101)
at org.apache.ignite.internal.processors.task.GridTaskWorker$4.apply(GridTaskWorker.java:946)
at org.apache.ignite.internal.processors.task.GridTaskWorker$4.apply(GridTaskWorker.java:939)
at org.apache.ignite.internal.util.IgniteUtils.wrapThreadLoader(IgniteUtils.java:6553)
at org.apache.ignite.internal.processors.task.GridTaskWorker.result(GridTaskWorker.java:939)
at org.apache.ignite.internal.processors.task.GridTaskWorker.onResponse(GridTaskWorker.java:810)
at org.apache.ignite.internal.processors.task.GridTaskProcessor.processJobExecuteResponse(GridTaskProcessor.java:995)
at org.apache.ignite.internal.processors.task.GridTaskProcessor$JobMessageListener.onMessage(GridTaskProcessor.java:1220)
at org.apache.ignite.internal.managers.communication.GridIoManager.invokeListener(GridIoManager.java:1238)
at org.apache.ignite.internal.managers.communication.GridIoManager.processRegularMessage0(GridIoManager.java:866)
at org.apache.ignite.internal.managers.communication.GridIoManager.access$1700(GridIoManager.java:106)
at org.apache.ignite.internal.managers.communication.GridIoManager$5.run(GridIoManager.java:829)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Caused by: class org.apache.ignite.IgniteException: null
at org.apache.ignite.internal.processors.closure.GridClosureProcessor$C2V2.execute(GridClosureProcessor.java:2009)
at org.apache.ignite.internal.processors.job.GridJobWorker$2.call(GridJobWorker.java:509)
at org.apache.ignite.internal.util.IgniteUtils.wrapThreadLoader(IgniteUtils.java:6521)
at org.apache.ignite.internal.processors.job.GridJobWorker.execute0(GridJobWorker.java:503)
at org.apache.ignite.internal.processors.job.GridJobWorker.body(GridJobWorker.java:456)
at org.apache.ignite.internal.util.worker.GridWorker.run(GridWorker.java:110)
at org.apache.ignite.internal.processors.job.GridJobProcessor.processJobExecuteRequest(GridJobProcessor.java:1161)
at org.apache.ignite.internal.processors.job.GridJobProcessor$JobExecutionListener.onMessage(GridJobProcessor.java:1766)
... 7 more
Caused by: java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.ignite.internal.processors.service.GridServiceProxy$ServiceProxyCallable.call(GridServiceProxy.java:392)
at org.apache.ignite.internal.processors.closure.GridClosureProcessor$C2V2.execute(GridClosureProcessor.java:2006)
... 14 more
Caused by: com.example.testing.MyUserException
at com.example.testing.MyCounterServiceImpl.increment(MyCounterServiceImpl.java:19)
... 20 more
Got critial exception
Apperently this is a bug that's to be resolved:
https://issues.apache.org/jira/browse/IGNITE-4298
i think exception must throw over to caller node. Could you please provide full code example? Also, so strange that on node which have service, was exception with null value.
UPD.
Could you please also add log, because fort me, all work as expected. I catched MyUserException, and have in log message "Got exception".

Java library (GSON) : NoClassDefFoundError

For a project, I need to import the GSON library, despite of these steps found on different topics, I still have the error NoClassDefFoundError...
Step 1 : in the BuildPath, I add the library as an external JARs
Step 2 : in Order and Export, I check the library
So, if anyone has the solution, thank you in advance for your answers ! ;)
More Informations :
package ummisco.gama.webgl;
import com.google.gson.Gson;
public class SceneReceiver {
private final static SceneReceiver instance = new SceneReceiver();
private boolean canReceive = true;
public static SceneReceiver getInstance() {
return instance;
}
private SceneReceiver() {
}
public void receive(final SimpleScene scene) {
reception(false);
try {
Gson gson = new Gson();
String sceneSend = gson.toJson(scene);
System.out.println(sceneSend);
} catch (Exception e) {
System.out.println(e.getMessage());
}
reception(true);
}
private void reception(boolean canReceive) {
this.canReceive = canReceive;
}
public boolean canReceive() {
return canReceive;
}
}
Here the stacktrace :
Exception in thread "Thread-14" java.lang.NoClassDefFoundError: com/google/gson/Gson
at ummisco.gama.webgl.SceneReceiver.receive(SceneReceiver.java:28)
at ummisco.gama.opengl.scene.ModelScene.endDrawingLayers(ModelScene.java:232)
at ummisco.gama.opengl.scene.SceneBuffer.endUpdatingScene(SceneBuffer.java:74)
at ummisco.gama.opengl.JOGLRenderer.endDrawingLayers(JOGLRenderer.java:713)
at msi.gama.outputs.display.LayerManager.drawLayersOn(LayerManager.java:182)
at ummisco.gama.opengl.SWTOpenGLDisplaySurface.updateDisplay(SWTOpenGLDisplaySurface.java:168)
at ummisco.gama.ui.views.displays.LayeredDisplayView$11.run(LayeredDisplayView.java:673)
at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.ClassNotFoundException: com.google.gson.Gson cannot be found by ummisco.gama.opengl_1.7.0.qualifier
at org.eclipse.osgi.internal.loader.BundleLoader.findClassInternal(BundleLoader.java:439)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:352)
at org.eclipse.osgi.internal.loader.BundleLoader.findClass(BundleLoader.java:344)
at org.eclipse.osgi.internal.loader.ModuleClassLoader.loadClass(ModuleClassLoader.java:160)
at java.lang.ClassLoader.loadClass(ClassLoader.java:357)
... 8 more

Categories