I tried to run below Declarative Pipeline
pipeline {
agent any
stages {
stage('Hello') {
steps {
echo 'Hello World'
}
}
}
}
I got the below error
hudson.remoting.ProxyException: org.codehaus.groovy.control.MultipleCompilationErrorsException: startup failed:
jar:file:/home/durgaprasad/.jenkins/plugins/pipeline-model-definition/WEB-INF/lib/pipeline-model-definition.jar!/org/jenkinsci/plugins/pipeline/modeldefinition/ModelInterpreter.groovy: 29: unable to resolve class edu.umd.cs.findbugs.annotations.SuppressFBWarnings
# line 29, column 1.
import edu.umd.cs.findbugs.annotations.SuppressFBWarnings
^
below is the Exception stack trace
1 error
at org.codehaus.groovy.control.ErrorCollector.failIfErrors(ErrorCollector.java:310)
at org.codehaus.groovy.control.CompilationUnit.applyToSourceUnits(CompilationUnit.java:958)
at org.codehaus.groovy.control.CompilationUnit.doPhaseOperation(CompilationUnit.java:605)
at org.codehaus.groovy.control.CompilationUnit.compile(CompilationUnit.java:554)
at groovy.lang.GroovyClassLoader.doParseClass(GroovyClassLoader.java:298)
at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:268)
at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:254)
at groovy.lang.GroovyClassLoader.parseClass(GroovyClassLoader.java:250)
at groovy.lang.GroovyClassLoader.recompile(GroovyClassLoader.java:766)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:718)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
at java.lang.ClassLoader.loadClass(ClassLoader.java:411)
at org.jenkinsci.plugins.workflow.cps.CpsGroovyShell$TimingLoader.loadClass(CpsGroovyShell.java:170)
at java.lang.ClassLoader.loadClass(ClassLoader.java:411)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:677)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:787)
at groovy.lang.GroovyClassLoader.loadClass(GroovyClassLoader.java:775)
at org.jenkinsci.plugins.pipeline.modeldefinition.ModelStepLoader.getValue(ModelStepLoader.java:60)
at org.jenkinsci.plugins.workflow.cps.CpsScript.invokeMethod(CpsScript.java:113)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.codehaus.groovy.reflection.CachedMethod.invoke(CachedMethod.java:93)
at groovy.lang.MetaMethod.doMethodInvoke(MetaMethod.java:325)
at groovy.lang.MetaClassImpl.invokeMethod(MetaClassImpl.java:1213)
at groovy.lang.MetaClassImpl.invokeMethod(MetaClassImpl.java:1022)
at org.codehaus.groovy.runtime.callsite.PogoMetaClassSite.call(PogoMetaClassSite.java:42)
at org.codehaus.groovy.runtime.callsite.CallSiteArray.defaultCall(CallSiteArray.java:48)
at org.codehaus.groovy.runtime.callsite.AbstractCallSite.call(AbstractCallSite.java:113)
at org.kohsuke.groovy.sandbox.impl.Checker$1.call(Checker.java:160)
at org.kohsuke.groovy.sandbox.GroovyInterceptor.onMethodCall(GroovyInterceptor.java:23)
at org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.SandboxInterceptor.onMethodCall(SandboxInterceptor.java:157)
at org.jenkinsci.plugins.scriptsecurity.sandbox.groovy.SandboxInterceptor.onMethodCall(SandboxInterceptor.java:142)
at org.kohsuke.groovy.sandbox.impl.Checker$1.call(Checker.java:158)
at org.kohsuke.groovy.sandbox.impl.Checker.checkedCall(Checker.java:162)
at com.cloudbees.groovy.cps.sandbox.SandboxInvoker.methodCall(SandboxInvoker.java:17)
at WorkflowScript.run(WorkflowScript:1)
at ___cps.transform___(Native Method)
at com.cloudbees.groovy.cps.impl.ContinuationGroup.methodCall(ContinuationGroup.java:86)
at com.cloudbees.groovy.cps.impl.FunctionCallBlock$ContinuationImpl.dispatchOrArg(FunctionCallBlock.java:113)
at com.cloudbees.groovy.cps.impl.FunctionCallBlock$ContinuationImpl.fixArg(FunctionCallBlock.java:83)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.cloudbees.groovy.cps.impl.ContinuationPtr$ContinuationImpl.receive(ContinuationPtr.java:72)
at com.cloudbees.groovy.cps.impl.ClosureBlock.eval(ClosureBlock.java:46)
at com.cloudbees.groovy.cps.Next.step(Next.java:83)
at com.cloudbees.groovy.cps.Continuable$1.call(Continuable.java:174)
at com.cloudbees.groovy.cps.Continuable$1.call(Continuable.java:163)
at org.codehaus.groovy.runtime.GroovyCategorySupport$ThreadCategoryInfo.use(GroovyCategorySupport.java:129)
at org.codehaus.groovy.runtime.GroovyCategorySupport.use(GroovyCategorySupport.java:268)
at com.cloudbees.groovy.cps.Continuable.run0(Continuable.java:163)
at org.jenkinsci.plugins.workflow.cps.SandboxContinuable.access$001(SandboxContinuable.java:18)
at org.jenkinsci.plugins.workflow.cps.SandboxContinuable.run0(SandboxContinuable.java:51)
at org.jenkinsci.plugins.workflow.cps.CpsThread.runNextChunk(CpsThread.java:185)
at org.jenkinsci.plugins.workflow.cps.CpsThreadGroup.run(CpsThreadGroup.java:405)
at org.jenkinsci.plugins.workflow.cps.CpsThreadGroup.access$400(CpsThreadGroup.java:96)
at org.jenkinsci.plugins.workflow.cps.CpsThreadGroup$2.call(CpsThreadGroup.java:317)
at org.jenkinsci.plugins.workflow.cps.CpsThreadGroup$2.call(CpsThreadGroup.java:281)
at org.jenkinsci.plugins.workflow.cps.CpsVmExecutorService$2.call(CpsVmExecutorService.java:67)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at hudson.remoting.SingleLaneExecutorService$1.run(SingleLaneExecutorService.java:131)
at jenkins.util.ContextResettingExecutorService$1.run(ContextResettingExecutorService.java:28)
at jenkins.security.ImpersonatingExecutorService$1.run(ImpersonatingExecutorService.java:59)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
at java.util.concurrent.FutureTask.run(FutureTask.java:266)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
at java.lang.Thread.run(Thread.java:745)
Note:
I am using the following versions
Jenkins ver. 2.163 , Pipeline 2.6 , pipeline: Declarative plugin 1.4.0 .
according to the documentation this declarative pipeline should work with Pipline 2.5.
https://jenkins.io/doc/book/pipeline/jenkinsfile/
Please help me to resolve this error
In storm.yaml (version 1.0.0), I have the next configuration:
nimbus.host: "%nimbus%"
When upgrade version, I change to:
nimbus.seeds: ["%nimbus%"]
In my script to deploy all topologies, I check the storm list with next command (in version 1.0.0):
storm list -c nimbus.host=localhost
I got the next error:
Running: /usr/lib/jvm/java-8-oracle/bin/java -client -Ddaemon.name= -Dstorm.options=nimbus.host%3Dlocalhost -Dstorm.home=/usr/share/storm-1.2.3 -Dstorm.log.dir=/usr/share/storm-1.2.3/logs -Djava.library.path=/usr/local/lib:/opt/local/lib:/usr/lib -Dstorm.conf.file= -cp /usr/share/storm-1.2.3/*:/usr/share/storm-1.2.3/lib/*:/usr/share/storm-1.2.3/extlib/*:/usr/share/storm-1.2.3/extlib-daemon/*:/usr/share/storm-1.2.3/conf:/usr/share/storm-1.2.3/bin org.apache.storm.command.list
2952 [main] WARN o.a.s.u.NimbusClient - Using deprecated config nimbus.host for backward compatibility. Please update your storm.yaml so it only has config nimbus.seeds
3033 [main] WARN o.a.s.u.NimbusClient - Ignoring exception while trying to get leader nimbus info from localhost. will retry with a different seed host.
org.apache.storm.thrift.TApplicationException: Internal error processing getLeader
at org.apache.storm.thrift.TServiceClient.receiveBase(TServiceClient.java:79) ~[storm-core-1.2.3.jar:1.2.3]
at org.apache.storm.generated.Nimbus$Client.recv_getLeader(Nimbus.java:1166) ~[storm-core-1.2.3.jar:1.2.3]
at org.apache.storm.generated.Nimbus$Client.getLeader(Nimbus.java:1154) ~[storm-core-1.2.3.jar:1.2.3]
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:93) [storm-core-1.2.3.jar:1.2.3]
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:66) [storm-core-1.2.3.jar:1.2.3]
at org.apache.storm.command.list$_main.invoke(list.clj:22) [storm-core-1.2.3.jar:1.2.3]
at clojure.lang.AFn.applyToHelper(AFn.java:152) [clojure-1.7.0.jar:?]
at clojure.lang.AFn.applyTo(AFn.java:144) [clojure-1.7.0.jar:?]
at org.apache.storm.command.list.main(Unknown Source) [storm-core-1.2.3.jar:1.2.3]
Exception in thread "main" org.apache.storm.utils.NimbusLeaderNotFoundException: Could not find leader nimbus from seed hosts [localhost]. Did you specify a valid list of nimbus hosts for config nimbus.seeds?
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:120)
at org.apache.storm.utils.NimbusClient.getConfiguredClientAs(NimbusClient.java:66)
at org.apache.storm.command.list$_main.invoke(list.clj:22)
at clojure.lang.AFn.applyToHelper(AFn.java:152)
at clojure.lang.AFn.applyTo(AFn.java:144)
at org.apache.storm.command.list.main(Unknown Source)
When upgrade version, I change the command to:
storm list -c nimbus.seeds=[localhost]
But, I got the next error:
Exception in thread "main" java.lang.ExceptionInInitializerError
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at clojure.lang.RT.classForName(RT.java:2154)
at clojure.lang.RT.classForName(RT.java:2163)
at clojure.lang.RT.loadClassForName(RT.java:2182)
at clojure.lang.RT.load(RT.java:436)
at clojure.lang.RT.load(RT.java:412)
at clojure.core$load$fn__5448.invoke(core.clj:5866)
at clojure.core$load.doInvoke(core.clj:5865)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at clojure.core$load_one.invoke(core.clj:5671)
at clojure.core$load_lib$fn__5397.invoke(core.clj:5711)
at clojure.core$load_lib.doInvoke(core.clj:5710)
at clojure.lang.RestFn.applyTo(RestFn.java:142)
at clojure.core$apply.invoke(core.clj:632)
at clojure.core$load_libs.doInvoke(core.clj:5753)
at clojure.lang.RestFn.applyTo(RestFn.java:137)
at clojure.core$apply.invoke(core.clj:634)
at clojure.core$use.doInvoke(core.clj:5843)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at org.apache.storm.command.config_value$loading__5340__auto____12317.invoke(config_value.clj:16)
at org.apache.storm.command.config_value__init.load(Unknown Source)
at org.apache.storm.command.config_value__init.<clinit>(Unknown Source)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at clojure.lang.RT.classForName(RT.java:2154)
at clojure.lang.RT.classForName(RT.java:2163)
at clojure.lang.RT.loadClassForName(RT.java:2182)
at clojure.lang.RT.load(RT.java:436)
at clojure.lang.RT.load(RT.java:412)
at clojure.core$load$fn__5448.invoke(core.clj:5866)
at clojure.core$load.doInvoke(core.clj:5865)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at clojure.lang.Var.invoke(Var.java:379)
at org.apache.storm.command.config_value.<clinit>(Unknown Source)
Caused by: java.lang.IllegalArgumentException: Field NIMBUS_SEEDS must be an Iterable but was a class java.lang.String
at org.apache.storm.validation.ConfigValidationUtils$2.validateField(ConfigValidationUtils.java:125)
at org.apache.storm.validation.ConfigValidationUtils$NestableFieldValidator.validateField(ConfigValidationUtils.java:48)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:304)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:299)
at org.apache.storm.validation.ConfigValidation.validateField(ConfigValidation.java:707)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:742)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:721)
at org.apache.storm.config$validate_configs_with_schemas.invoke(config.clj:74)
at org.apache.storm.config$read_storm_config.invoke(config.clj:79)
at org.apache.storm.config$fn__975.invoke(config.clj:100)
at org.apache.storm.config__init.load(Unknown Source)
at org.apache.storm.config__init.<clinit>(Unknown Source)
... 35 more
Exception in thread "main" java.lang.ExceptionInInitializerError
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at clojure.lang.RT.classForName(RT.java:2154)
at clojure.lang.RT.classForName(RT.java:2163)
at clojure.lang.RT.loadClassForName(RT.java:2182)
at clojure.lang.RT.load(RT.java:436)
at clojure.lang.RT.load(RT.java:412)
at clojure.core$load$fn__5448.invoke(core.clj:5866)
at clojure.core$load.doInvoke(core.clj:5865)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at clojure.core$load_one.invoke(core.clj:5671)
at clojure.core$load_lib$fn__5397.invoke(core.clj:5711)
at clojure.core$load_lib.doInvoke(core.clj:5710)
at clojure.lang.RestFn.applyTo(RestFn.java:142)
at clojure.core$apply.invoke(core.clj:632)
at clojure.core$load_libs.doInvoke(core.clj:5753)
at clojure.lang.RestFn.applyTo(RestFn.java:137)
at clojure.core$apply.invoke(core.clj:634)
at clojure.core$use.doInvoke(core.clj:5843)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at org.apache.storm.command.config_value$loading__5340__auto____12317.invoke(config_value.clj:16)
at org.apache.storm.command.config_value__init.load(Unknown Source)
at org.apache.storm.command.config_value__init.<clinit>(Unknown Source)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:348)
at clojure.lang.RT.classForName(RT.java:2154)
at clojure.lang.RT.classForName(RT.java:2163)
at clojure.lang.RT.loadClassForName(RT.java:2182)
at clojure.lang.RT.load(RT.java:436)
at clojure.lang.RT.load(RT.java:412)
at clojure.core$load$fn__5448.invoke(core.clj:5866)
at clojure.core$load.doInvoke(core.clj:5865)
at clojure.lang.RestFn.invoke(RestFn.java:408)
at clojure.lang.Var.invoke(Var.java:379)
at org.apache.storm.command.config_value.<clinit>(Unknown Source)
Caused by: java.lang.IllegalArgumentException: Field NIMBUS_SEEDS must be an Iterable but was a class java.lang.String
at org.apache.storm.validation.ConfigValidationUtils$2.validateField(ConfigValidationUtils.java:125)
at org.apache.storm.validation.ConfigValidationUtils$NestableFieldValidator.validateField(ConfigValidationUtils.java:48)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:304)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:299)
at org.apache.storm.validation.ConfigValidation.validateField(ConfigValidation.java:707)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:742)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:721)
at org.apache.storm.config$validate_configs_with_schemas.invoke(config.clj:74)
at org.apache.storm.config$read_storm_config.invoke(config.clj:79)
at org.apache.storm.config$fn__975.invoke(config.clj:100)
at org.apache.storm.config__init.load(Unknown Source)
at org.apache.storm.config__init.<clinit>(Unknown Source)
... 35 more
Running: /usr/lib/jvm/java-8-oracle/bin/java -client -Ddaemon.name= -Dstorm.options=nimbus.seeds%3D%5Blocalhost%5D -Dstorm.home=/usr/share/storm-1.2.3 -Dstorm.log.dir= -Djava.library.path= -Dstorm.conf.file= -cp /usr/share/storm-1.2.3/*:/usr/share/storm-1.2.3/lib/*:/usr/share/storm-1.2.3/extlib/*:/usr/share/storm-1.2.3/extlib-daemon/*:/usr/share/storm-1.2.3/conf:/usr/share/storm-1.2.3/bin org.apache.storm.command.list
Exception in thread "main" java.lang.IllegalArgumentException: Field NIMBUS_SEEDS must be an Iterable but was a class java.lang.String
at org.apache.storm.validation.ConfigValidationUtils$2.validateField(ConfigValidationUtils.java:125)
at org.apache.storm.validation.ConfigValidationUtils$NestableFieldValidator.validateField(ConfigValidationUtils.java:48)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:304)
at org.apache.storm.validation.ConfigValidation$ListEntryTypeValidator.validateField(ConfigValidation.java:299)
at org.apache.storm.validation.ConfigValidation.validateField(ConfigValidation.java:707)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:742)
at org.apache.storm.validation.ConfigValidation.validateFields(ConfigValidation.java:721)
at org.apache.storm.config$validate_configs_with_schemas.invoke(config.clj:74)
at org.apache.storm.config$read_storm_config.invoke(config.clj:79)
at org.apache.storm.command.list$_main.invoke(list.clj:22)
at clojure.lang.AFn.applyToHelper(AFn.java:152)
at clojure.lang.AFn.applyTo(AFn.java:144)
at org.apache.storm.command.list.main(Unknown Source)
Can someone help me with this error? Thank you.
Attach:
config file in 1.0.0 version
storm.local.hostname: "%local.ip%"
storm.zookeeper.servers:
- "%zookeeper%"
nimbus.host: "%nimbus%"
# netty transport
storm.messaging.transport: "org.apache.storm.messaging.netty.Context"
storm.messaging.netty.buffer_size: 16384
storm.messaging.netty.max_retries: 10
storm.messaging.netty.min_wait_ms: 1000
storm.messaging.netty.max_wait_ms: 5000
drpc.servers:
- "%nimbus%"
drpc.port: 3772
drpc.invocations.port: 3773
### supervisor.* configs are for node supervisors
# Define the amount of workers that can be run on this machine. Each worker is assigned a port to use for communication
supervisor.slots.ports:
- 6701
- 6702
- 6703
- 6704
- 6705
- 6706
- 6707
- 6708
- 6709
- 6710
- 6711
- 6712
- 6713
- 6714
- 6715
- 6716
- 6717
supervisor.childopts: "-Xmx256m"
#how long supervisor will wait to ensure that a worker process is started
supervisor.worker.start.timeout.secs: 120
#how long between heartbeats until supervisor considers that worker dead and tries to restart it
supervisor.worker.timeout.secs: 30
#how frequently the supervisor checks on the status of the processes it's monitoring and restarts if necessary
supervisor.monitor.frequency.secs: 3
#how frequently the supervisor heartbeats to the cluster state (for nimbus)
supervisor.heartbeat.frequency.secs: 5
supervisor.enable: true
supervisor.cpu.capacity: 200.0
supervisor.memory.capacity.mb: 4096.0
### worker.* configs are for task workers
worker.heap.memory.mb: 1024
worker.childopts: "-Xmx%HEAP-MEM%m -XX:+PrintGCDetails -Xloggc:artifacts/gc.log -XX:+PrintGCDateStamps -XX:+PrintGCTimeStamps -XX:+UseGCLogFileRotation -XX:NumberOfGCLogFiles=10 -XX:GCLogFileSize=1M -XX:+HeapDumpOnOutOfMemoryError -XX:HeapDumpPath=artifacts/heapdump"
worker.gc.childopts: ""
### check https://github.com/apache/storm/blob/master/conf/defaults.yaml for compelte defaults
Error in logs/nimbus.log
2019-11-20 17:30:07.158 o.a.s.t.ProcessFunction pool-14-thread-8
[ERROR] Internal error processing getLeader
java.lang.RuntimeException: No nimbus leader participant host found,
have you started your nimbus hosts? at
org.apache.storm.zookeeper$to_NimbusInfo.invoke(zookeeper.clj:241)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.zookeeper$zk_leader_elector$reify__2212.getLeader(zookeeper.clj:294)
~[storm-core-1.2.3.jar:1.2.3] at
sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
~[?:1.8.0_201] at
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
~[?:1.8.0_201] at
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
~[?:1.8.0_201] at java.lang.reflect.Method.invoke(Method.java:498)
~[?:1.8.0_201] at
clojure.lang.Reflector.invokeMatchingMethod(Reflector.java:93)
~[clojure-1.7.0.jar:?] at
clojure.lang.Reflector.invokeNoArgInstanceMember(Reflector.java:313)
~[clojure-1.7.0.jar:?] at
org.apache.storm.daemon.nimbus$mk_reified_nimbus$reify__9284.getLeader(nimbus.clj:2428)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.generated.Nimbus$Processor$getLeader.getResult(Nimbus.java:4092)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.generated.Nimbus$Processor$getLeader.getResult(Nimbus.java:4071)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.thrift.ProcessFunction.process(ProcessFunction.java:38)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.thrift.TBaseProcessor.process(TBaseProcessor.java:39)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.security.auth.SimpleTransportPlugin$SimpleWrapProcessor.process(SimpleTransportPlugin.java:162)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.thrift.server.AbstractNonblockingServer$FrameBuffer.invoke(AbstractNonblockingServer.java:518)
~[storm-core-1.2.3.jar:1.2.3] at
org.apache.storm.thrift.server.Invocation.run(Invocation.java:18)
~[storm-core-1.2.3.jar:1.2.3] at
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
[?:1.8.0_201] at
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
[?:1.8.0_201] at java.lang.Thread.run(Thread.java:748) [?:1.8.0_201]
You are writing the list command slightly wrong. It should be storm list -c nimbus.seeds='[\"localhost\"]'. It needs to be the same as it would be in storm.yaml once the terminal interpreter is done with the string. So using nimbus.seeds='[\"localhost\"] ends up passing a string like nimbus.seeds=["localhost"] to Storm.
In my spring boot application i use spring-boot-starter-data-mongodb:2.1.3 to get coonection for MongoDB. I have some properties file to config mongoDB:
spring.data.mongodb.host=localhost
spring.data.mongodb.port=27017
spring.data.mongodb.database=database
If i set incorrect host name (spring.data.mongodb.host=incorrect host) or port my application starts successfully. But i want that application to fail as same as when i set host name with wrong format (spring.data.mongodb.host=hxxt://wrongFormat)
Caused by: com.mongodb.MongoException: host and port should be specified in host:port format
How can i do that?
Example:
application.propertires
#Wrong host
spring.data.mongodb.host=www.google.com
spring.data.mongodb.port=27017
spring.data.mongodb.database=database
DemoApplication.java:
#SpringBootApplication
public class DemoApplication {
public static void main(String[] args) {
SpringApplication.run(DemoApplication.class, args);
}
}
Config.java:
#Configuration
public class Config {
#Bean
public CommandLineRunner commandLineRunner(JobRepository jobRepository ){
return args -> jobRepository.findById("1");
}
}
JobRepository.java:
public interface JobRepository extends MongoRepository<Job, String> {}
Job.java:
#Getter
#Setter
#EqualsAndHashCode
#NoArgsConstructor
#Document(collection = "Jobs")
public class Job {
#Id
private String id = null;
private String field;
}
Your issue is that the connection check on mongo happens asynchronously. So if the input is correct, the beans are all created by spring boot and the connection fails at a later point. You can force it to explode by defining a bean that checks the connection at startup. For example, I use this:
/**
* This is a testing bean. It is only here to explode when the database is not available.
* This is to counter act default spring boot behaviour where the Mongo configuration
* will connect to a localhost DB regardless of whether it exists or not.
*
*/
public class MongoDatabaseVerifier {
private static final Logger log = LoggerFactory.getLogger(MongoDatabaseVerifier.class);
public MongoDatabaseVerifier(MongoClient mongoClient) {
checkConnectionOrThrow(mongoClient);
}
/**
* Calls {#link MongoClient#getAddress()} and throws the result away if successful
* #param client
*/
void checkConnectionOrThrow(MongoClient client) {
client.getAddress(); // this will either succeed super fast or explode
log.info("Database fully started ...");
}
}
Since this bean does its check on creation, spring will only continue the startup if it succeeds. Otherwise a bean creation exception will be thrown and the startup aborted.
I hope that helps
I tried to reproduce the issue. Though it says application is started but it is getting aborted after waiting for timeout period of 30000 ms settings to connect to the cluster.
As pointed out by others, you can write some custom logic to check the connectivity or try to set decrease connectivity timeout to fail fast.
sample project code
2019-05-20 10:28:07.381 INFO 86945 --- [ restartedMain] com.demo.Application : Started Application in 5.882 seconds (JVM running for 6.893)
2019-05-20 10:28:07.435 INFO 86945 --- [ restartedMain] org.mongodb.driver.cluster : Cluster description not yet available. Waiting for 30000 ms before timing out
2019-05-20 10:28:26.253 INFO 86945 --- [oogle.com:27017] org.mongodb.driver.cluster : Exception in monitor thread while connecting to server www.google.com:27017
com.mongodb.MongoSocketOpenException: Exception opening socket
at com.mongodb.internal.connection.SocketStream.open(SocketStream.java:67) ~[mongodb-driver-core-3.8.2.jar:na]
at com.mongodb.internal.connection.InternalStreamConnection.open(InternalStreamConnection.java:126) ~[mongodb-driver-core-3.8.2.jar:na]
at com.mongodb.internal.connection.DefaultServerMonitor$ServerMonitorRunnable.run(DefaultServerMonitor.java:117) ~[mongodb-driver-core-3.8.2.jar:na]
at java.lang.Thread.run(Thread.java:745) [na:1.8.0_101]
Caused by: java.net.SocketTimeoutException: connect timed out
at java.net.PlainSocketImpl.socketConnect(Native Method) ~[na:1.8.0_101]
at java.net.AbstractPlainSocketImpl.doConnect(AbstractPlainSocketImpl.java:350) ~[na:1.8.0_101]
at java.net.AbstractPlainSocketImpl.connectToAddress(AbstractPlainSocketImpl.java:206) ~[na:1.8.0_101]
at java.net.AbstractPlainSocketImpl.connect(AbstractPlainSocketImpl.java:188) ~[na:1.8.0_101]
at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) ~[na:1.8.0_101]
at java.net.Socket.connect(Socket.java:589) ~[na:1.8.0_101]
at com.mongodb.internal.connection.SocketStreamHelper.initialize(SocketStreamHelper.java:64) ~[mongodb-driver-core-3.8.2.jar:na]
at com.mongodb.internal.connection.SocketStream.open(SocketStream.java:62) ~[mongodb-driver-core-3.8.2.jar:na]
... 3 common frames omitted
2019-05-20 10:28:37.444 INFO 86945 --- [ restartedMain] ConditionEvaluationReportLoggingListener :
Error starting ApplicationContext. To display the conditions report re-run your application with 'debug' enabled.
2019-05-20 10:28:37.452 ERROR 86945 --- [ restartedMain] o.s.boot.SpringApplication : Application run failed
java.lang.IllegalStateException: Failed to execute CommandLineRunner
at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:816) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
at org.springframework.boot.SpringApplication.callRunners(SpringApplication.java:797) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:324) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1260) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
at org.springframework.boot.SpringApplication.run(SpringApplication.java:1248) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
at com.demo.Application.main(Application.java:12) [classes/:na]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_101]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_101]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_101]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_101]
at org.springframework.boot.devtools.restart.RestartLauncher.run(RestartLauncher.java:49) [spring-boot-devtools-2.1.3.RELEASE.jar:2.1.3.RELEASE]
Caused by: org.springframework.dao.DataAccessResourceFailureException: Timed out after 30000 ms while waiting to connect. Client view of cluster state is {type=UNKNOWN, servers=[{address=www.google.com:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]; nested exception is com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting to connect. Client view of cluster state is {type=UNKNOWN, servers=[{address=www.google.com:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
at org.springframework.data.mongodb.core.MongoExceptionTranslator.translateExceptionIfPossible(MongoExceptionTranslator.java:90) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate.potentiallyConvertRuntimeException(MongoTemplate.java:2774) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate.executeFindOneInternal(MongoTemplate.java:2629) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate.doFindOne(MongoTemplate.java:2353) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate.findById(MongoTemplate.java:843) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.repository.support.SimpleMongoRepository.findById(SimpleMongoRepository.java:118) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[na:1.8.0_101]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) ~[na:1.8.0_101]
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) ~[na:1.8.0_101]
at java.lang.reflect.Method.invoke(Method.java:498) ~[na:1.8.0_101]
at org.springframework.data.repository.core.support.RepositoryComposition$RepositoryFragments.invoke(RepositoryComposition.java:359) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.repository.core.support.RepositoryComposition.invoke(RepositoryComposition.java:200) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$ImplementationMethodExecutionInterceptor.invoke(RepositoryFactorySupport.java:644) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.doInvoke(RepositoryFactorySupport.java:608) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.lambda$invoke$3(RepositoryFactorySupport.java:595) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.repository.core.support.RepositoryFactorySupport$QueryExecutorMethodInterceptor.invoke(RepositoryFactorySupport.java:595) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.data.projection.DefaultMethodInvokingMethodInterceptor.invoke(DefaultMethodInvokingMethodInterceptor.java:59) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.aop.interceptor.ExposeInvocationInterceptor.invoke(ExposeInvocationInterceptor.java:93) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.data.repository.core.support.SurroundingTransactionDetectorMethodInterceptor.invoke(SurroundingTransactionDetectorMethodInterceptor.java:61) ~[spring-data-commons-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.aop.framework.ReflectiveMethodInvocation.proceed(ReflectiveMethodInvocation.java:186) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at org.springframework.aop.framework.JdkDynamicAopProxy.invoke(JdkDynamicAopProxy.java:212) ~[spring-aop-5.1.5.RELEASE.jar:5.1.5.RELEASE]
at com.sun.proxy.$Proxy61.findById(Unknown Source) ~[na:na]
at com.demo.TestConfig.lambda$commandLineRunner$0(TestConfig.java:15) ~[classes/:na]
at org.springframework.boot.SpringApplication.callRunner(SpringApplication.java:813) [spring-boot-2.1.3.RELEASE.jar:2.1.3.RELEASE]
... 10 common frames omitted
Caused by: com.mongodb.MongoTimeoutException: Timed out after 30000 ms while waiting to connect. Client view of cluster state is {type=UNKNOWN, servers=[{address=www.google.com:27017, type=UNKNOWN, state=CONNECTING, exception={com.mongodb.MongoSocketOpenException: Exception opening socket}, caused by {java.net.SocketTimeoutException: connect timed out}}]
at com.mongodb.internal.connection.BaseCluster.getDescription(BaseCluster.java:179) ~[mongodb-driver-core-3.8.2.jar:na]
at com.mongodb.internal.connection.MultiServerCluster.getDescription(MultiServerCluster.java:54) ~[mongodb-driver-core-3.8.2.jar:na]
at com.mongodb.client.internal.MongoClientDelegate.getConnectedClusterDescription(MongoClientDelegate.java:136) ~[mongodb-driver-3.8.2.jar:na]
at com.mongodb.client.internal.MongoClientDelegate.createClientSession(MongoClientDelegate.java:94) ~[mongodb-driver-3.8.2.jar:na]
at com.mongodb.client.internal.MongoClientDelegate$DelegateOperationExecutor.getClientSession(MongoClientDelegate.java:249) ~[mongodb-driver-3.8.2.jar:na]
at com.mongodb.client.internal.MongoClientDelegate$DelegateOperationExecutor.execute(MongoClientDelegate.java:172) ~[mongodb-driver-3.8.2.jar:na]
at com.mongodb.client.internal.FindIterableImpl.first(FindIterableImpl.java:198) ~[mongodb-driver-3.8.2.jar:na]
at org.springframework.data.mongodb.core.MongoTemplate$FindOneCallback.doInCollection(MongoTemplate.java:2813) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate$FindOneCallback.doInCollection(MongoTemplate.java:2788) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
at org.springframework.data.mongodb.core.MongoTemplate.executeFindOneInternal(MongoTemplate.java:2626) ~[spring-data-mongodb-2.1.5.RELEASE.jar:2.1.5.RELEASE]
... 35 common frames omitted
2019-05-20 10:28:37.457 INFO 86945 --- [ restartedMain] o.s.s.concurrent.ThreadPoolTaskExecutor : Shutting down ExecutorService 'applicationTaskExecutor'
Process finished with exit code 0
I have updated the spark version from 1.6 to 2.1.1 . I have updated java version and scala version . But when I start spark-shell .I get following error when starting spark-shell . Please help me with this .I am new to spark . Please post some step to correct this error . The Error is displayed when starting spark-shell.
18/10/16 11:59:37 WARN hdfs.DFSClient: DataStreamer Exception
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File
/user/cloudera/.sparkStaging/application_1539716150434_0001/spark_conf.zip could only be replicated to 0 nodes instead of minReplication (=1). There are 0 datanode(s) running and no node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1720)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3440)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:686)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:217)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:506)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1469)
at org.apache.hadoop.ipc.Client.call(Client.java:1400)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy13.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:399)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1532)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1349)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:588)
18/10/16 11:59:37 ERROR spark.SparkContext: Error initializing SparkContext.
org.apache.hadoop.ipc.RemoteException(java.io.IOException): File /user/cloudera/.sparkStaging/application_1539716150434_0001/spark_conf.zip could only be replicated to 0 nodes instead of minReplication (=1). There are 0 datanode(s) running and no node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1720)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3440)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:686)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:217)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:506)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1469)
at org.apache.hadoop.ipc.Client.call(Client.java:1400)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy13.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:399)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1532)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1349)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:588)
18/10/16 11:59:37 WARN cluster.YarnSchedulerBackend$YarnSchedulerEndpoint: Attempted to request executors before the AM has registered!
18/10/16 11:59:37 ERROR util.Utils: Uncaught exception in thread main
java.lang.NullPointerException
at org.apache.spark.network.shuffle.ExternalShuffleClient.close(ExternalShuffleClient.java:152)
at org.apache.spark.storage.BlockManager.stop(BlockManager.scala:1407)
at org.apache.spark.SparkEnv.stop(SparkEnv.scala:89)
at org.apache.spark.SparkContext$$anonfun$stop$11.apply$mcV$sp(SparkContext.scala:1849)
at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1283)
at org.apache.spark.SparkContext.stop(SparkContext.scala:1848)
at org.apache.spark.SparkContext.(SparkContext.scala:587)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2320)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:868)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$6.apply(SparkSession.scala:860)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:860)
at org.apache.spark.repl.Main$.createSparkSession(Main.scala:96)
at $line3.$read$$iw$$iw.(:15)
at $line3.$read$$iw.(:42)
at $line3.$read.(:44)
at $line3.$read$.(:48)
at $line3.$read$.()
at $line3.$eval$.$print$lzycompute(:7)
at $line3.$eval$.$print(:6)
at $line3.$eval.$print()
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)
at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)
at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)
at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)
at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)
at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)
at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)
at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)
at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)
at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply$mcV$sp(SparkILoop.scala:38)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37)
at org.apache.spark.repl.SparkILoop$$anonfun$initializeSpark$1.apply(SparkILoop.scala:37)
at scala.tools.nsc.interpreter.IMain.beQuietDuring(IMain.scala:214)
at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:37)
at org.apache.spark.repl.SparkILoop.loadFiles(SparkILoop.scala:105)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:920)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)
at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)
at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)
at org.apache.spark.repl.Main$.doMain(Main.scala:69)
at org.apache.spark.repl.Main$.main(Main.scala:52)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:743)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:187)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:212)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:126)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
org.apache.hadoop.ipc.RemoteException: File /user/cloudera/.sparkStaging/application_1539716150434_0001/spark_conf.zip could only be replicated to 0 nodes instead of minReplication (=1). There are 0 datanode(s) running and no node(s) are excluded in this operation.
at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager.chooseTarget4NewBlock(BlockManager.java:1720)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getAdditionalBlock(FSNamesystem.java:3440)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.addBlock(NameNodeRpcServer.java:686)
at org.apache.hadoop.hdfs.server.namenode.AuthorizationProviderProxyClientProtocol.addBlock(AuthorizationProviderProxyClientProtocol.java:217)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.addBlock(ClientNamenodeProtocolServerSideTranslatorPB.java:506)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:617)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:1073)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2226)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2222)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:415)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2220)
at org.apache.hadoop.ipc.Client.call(Client.java:1469)
at org.apache.hadoop.ipc.Client.call(Client.java:1400)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:232)
at com.sun.proxy.$Proxy13.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.addBlock(ClientNamenodeProtocolTranslatorPB.java:399)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:187)
at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
at com.sun.proxy.$Proxy14.addBlock(Unknown Source)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.locateFollowingBlock(DFSOutputStream.java:1532)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.nextBlockOutputStream(DFSOutputStream.java:1349)
at org.apache.hadoop.hdfs.DFSOutputStream$DataStreamer.run(DFSOutputStream.java:588)
:14: error: not found: value spark
import spark.implicits._
^
:14: error: not found: value spark
import spark.sql
^
I have brew installed apache-spark 1.6.1 on OSX El Capitan. Upon spark-shell, I receive:
Exception in thread "main" java.lang.NoSuchMethodError: org.slf4j.spi.LocationAwareLogger.log(Lorg/slf4j/Marker;Ljava/lang/String;ILjava/lang/String;[Ljava/lang/Object;Ljava/lang/Throwable;)V
at org.apache.commons.logging.impl.SLF4JLocationAwareLog.debug(SLF4JLocationAwareLog.java:133)
at org.apache.hadoop.metrics2.lib.MutableMetricsFactory.newForField(MutableMetricsFactory.java:42)
at org.apache.hadoop.metrics2.lib.MetricsSourceBuilder.add(MetricsSourceBuilder.java:133)
at org.apache.hadoop.metrics2.lib.MetricsSourceBuilder.<init>(MetricsSourceBuilder.java:59)
at org.apache.hadoop.metrics2.lib.MetricsAnnotations.newSourceBuilder(MetricsAnnotations.java:42)
at org.apache.hadoop.metrics2.impl.MetricsSystemImpl.register(MetricsSystemImpl.java:223)
at org.apache.hadoop.metrics2.MetricsSystem.register(MetricsSystem.java:60)
at org.apache.hadoop.security.UserGroupInformation$UgiMetrics.create(UserGroupInformation.java:108)
at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:224)
at org.apache.spark.util.Utils$$anonfun$getCurrentUserName$1.apply(Utils.scala:2160)
at org.apache.spark.util.Utils$$anonfun$getCurrentUserName$1.apply(Utils.scala:2160)
at scala.Option.getOrElse(Option.scala:120)
at org.apache.spark.util.Utils$.getCurrentUserName(Utils.scala:2160)
at org.apache.spark.SecurityManager.<init>(SecurityManager.scala:214)
at org.apache.spark.repl.SparkIMain.<init>(SparkIMain.scala:118)
at org.apache.spark.repl.SparkILoop$SparkILoopInterpreter.<init>(SparkILoop.scala:187)
at org.apache.spark.repl.SparkILoop.createInterpreter(SparkILoop.scala:217)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:949)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
at org.apache.spark.repl.Main$.main(Main.scala:31)
at org.apache.spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
14:17:50.398 [Thread-0] INFO o.a.spark.util.ShutdownHookManager - Shutdown hook called
14:17:50.401 [Thread-0] INFO o.a.spark.util.ShutdownHookManager - Deleting directory /private/var/folders/pc/wx3xk3sx7_35n6v1qj9knv9c0000gq/T/spark-0a979df4-f61f-4805-a942-8e62350331db
I installed Jave SE already, and I suspect this same problem is preventing my pyspark (with a similar error).