I'm using Window 10, Scala 2.10.2, Spark 1.6.1, and Java 1.8.
Below is the code I'm trying to run.
import org.apache.spark.SparkContext
import org.apache.spark.SparkConf
object WordsCount {
def main(args: Array[String]): Unit = {
val sparkConf = new SparkConf().setAppName("SparkWordCount").setMaster("local[2]")
val sc = new SparkContext(sparkConf)
val text = sc.textFile("shortTwitter.txt")
val counts = text.flatMap(line=>line.split(" ")).map(word=>(word,1)).reduceByKey(_+_)
counts.foreach(println)
}
}
and I got the following error.
"C:\Program Files\Java\jdk1.8.0_101\bin\java" -Didea.launcher.port=7533 "-Didea.launcher.bin.path=C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 2016.2.3\bin" -Dfile.encoding=UTF-8 -classpath "C:\Program Files\Java\jdk1.8.0_101\jre\lib\charsets.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\deploy.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\access-bridge-64.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\cldrdata.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\dnsns.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\jaccess.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\jfxrt.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\localedata.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\nashorn.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\sunec.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\sunjce_provider.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\sunmscapi.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\sunpkcs11.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\ext\zipfs.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\javaws.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\jce.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\jfr.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\jfxswt.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\jsse.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\management-agent.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\plugin.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\resources.jar;C:\Program Files\Java\jdk1.8.0_101\jre\lib\rt.jar;C:\Users\Administrator\IdeaProjects\choi_jieun_assignment1\out\production\choi_jieun_assignment1;C:\scala\lib\jline.jar;C:\scala\lib\scalap.jar;C:\scala\lib\diffutils.jar;C:\scala\lib\akka-actors.jar;C:\scala\lib\scala-swing.jar;C:\scala\lib\scala-actors.jar;C:\scala\lib\scala-library.jar;C:\scala\lib\scala-partest.jar;C:\scala\lib\scala-reflect.jar;C:\scala\lib\scala-compiler.jar;C:\scala\lib\typesafe-config.jar;C:\scala\lib\scala-actors-migration.jar;C:\Users\Administrator\.ivy2\cache\org.scala-lang\scala-library\jars\scala-library-2.10.2.jar;C:\Users\Administrator\.ivy2\cache\org.scala-lang\scala-reflect\jars\scala-reflect-2.10.2.jar;C:\spark\lib\datanucleus-core-3.2.10.jar;C:\spark\lib\datanucleus-rdbms-3.2.9.jar;C:\spark\lib\spark-1.6.1-yarn-shuffle.jar;C:\spark\lib\datanucleus-api-jdo-3.2.6.jar;C:\spark\lib\spark-assembly-1.6.1-hadoop2.4.0.jar;C:\spark\lib\spark-examples-1.6.1-hadoop2.4.0.jar;C:\Program Files (x86)\JetBrains\IntelliJ IDEA Community Edition 2016.2.3\lib\idea_rt.jar" com.intellij.rt.execution.application.AppMain WordsCount
Using Spark's default log4j profile: org/apache/spark/log4j-defaults.properties
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/C:/spark/lib/spark-assembly-1.6.1-hadoop2.4.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/C:/spark/lib/spark-examples-1.6.1-hadoop2.4.0.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.Log4jLoggerFactory]
16/09/14 16:03:21 INFO SparkContext: Running Spark version 1.6.1
16/09/14 16:03:22 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
16/09/14 16:03:22 INFO SecurityManager: Changing view acls to: Administrator
16/09/14 16:03:22 INFO SecurityManager: Changing modify acls to: Administrator
16/09/14 16:03:22 INFO SecurityManager: SecurityManager: authentication disabled; ui acls disabled; users with view permissions: Set(Administrator); users with modify permissions: Set(Administrator)
16/09/14 16:03:23 INFO Utils: Successfully started service 'sparkDriver' on port 50762.
16/09/14 16:03:23 ERROR SparkContext: Error initializing SparkContext.
java.lang.NoSuchMethodException: akka.remote.RemoteActorRefProvider.<init> (java.lang.String, akka.actor.ActorSystem$Settings, akka.event.EventStream, akka.actor.Scheduler, akka.actor.DynamicAccess)
at java.lang.Class.getConstructor0(Class.java:3082)
at java.lang.Class.getDeclaredConstructor(Class.java:2178)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:77)
at scala.util.Try$.apply(Try.scala:161)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:74)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:85)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:85)
at scala.util.Success.flatMap(Try.scala:200)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:85)
at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:546)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1988)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1979)
at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:266)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:457)
at WordsCount$.main(WordsCount.scala:7)
at WordsCount.main(WordsCount.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
16/09/14 16:03:23 INFO SparkContext: Successfully stopped SparkContext
Exception in thread "main" java.lang.NoSuchMethodException: akka.remote.RemoteActorRefProvider.<init>(java.lang.String, akka.actor.ActorSystem$Settings, akka.event.EventStream, akka.actor.Scheduler, akka.actor.DynamicAccess)
at java.lang.Class.getConstructor0(Class.java:3082)
at java.lang.Class.getDeclaredConstructor(Class.java:2178)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$2.apply(DynamicAccess.scala:77)
at scala.util.Try$.apply(Try.scala:161)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:74)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:85)
at akka.actor.ReflectiveDynamicAccess$$anonfun$createInstanceFor$3.apply(DynamicAccess.scala:85)
at scala.util.Success.flatMap(Try.scala:200)
at akka.actor.ReflectiveDynamicAccess.createInstanceFor(DynamicAccess.scala:85)
at akka.actor.ActorSystemImpl.<init>(ActorSystem.scala:546)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:111)
at akka.actor.ActorSystem$.apply(ActorSystem.scala:104)
at org.apache.spark.util.AkkaUtils$.org$apache$spark$util$AkkaUtils$$doCreateActorSystem(AkkaUtils.scala:121)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:53)
at org.apache.spark.util.AkkaUtils$$anonfun$1.apply(AkkaUtils.scala:52)
at org.apache.spark.util.Utils$$anonfun$startServiceOnPort$1.apply$mcVI$sp(Utils.scala:1988)
at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)
at org.apache.spark.util.Utils$.startServiceOnPort(Utils.scala:1979)
at org.apache.spark.util.AkkaUtils$.createActorSystem(AkkaUtils.scala:55)
at org.apache.spark.SparkEnv$.create(SparkEnv.scala:266)
at org.apache.spark.SparkEnv$.createDriverEnv(SparkEnv.scala:193)
at org.apache.spark.SparkContext.createSparkEnv(SparkContext.scala:288)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:457)
at WordsCount$.main(WordsCount.scala:7)
at WordsCount.main(WordsCount.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at com.intellij.rt.execution.application.AppMain.main(AppMain.java:147)
Process finished with exit code 1
I have been struggling for a couple of hours to solve this problem but couldn't figure out. Can someone help me out? Thanks!
Try installing the Java 7 JDK and you may have better luck. Although the release notes for Scala 2.10.2 seem to be missing, 2.10.4 doesn't support Java 8:
New ByteCode emitter based on ASM
Can target JDK 1.5, 1.6 and 1.7
Emits 1.6 bytecode by default
Old 1.5 backend is deprecated
...and even 2.11.x only has experimental Java 8 support:
The Scala 2.11.x series targets Java 6, with (evolving) experimental support for Java 8. In 2.11.1, Java 8 support is mostly limited to reading Java 8 bytecode and parsing Java 8 source. Stay tuned for more complete (experimental) Java 8 support. The next major release, 2.12, will most likely target Java 8 by default.
Also see: Can I use java 8 in an mixed scala 2.10 / java project built by sbt?
Related
Using azure-storage-blob-client, I see these:
10:46:38 [main] WARN JacksonVersion - Could not find version of 'jackson-annotations'.
10:46:38 [main] ERROR JacksonVersion - Version 'unknown' of package 'jackson-annotations' is not supported (older than earliest supported version - `2.10.0`), please upgrade.
10:46:38 [main] WARN JacksonVersion - Could not find version of 'jackson-core'.
10:46:38 [main] ERROR JacksonVersion - Version 'unknown' of package 'jackson-core' is not supported (older than earliest supported version - `2.10.0`), please upgrade.
10:46:38 [main] WARN JacksonVersion - Could not find version of 'jackson-databind'.
10:46:38 [main] ERROR JacksonVersion - Version 'unknown' of package 'jackson-databind' is not supported (older than earliest supported version - `2.10.0`), please upgrade.
10:46:38 [main] INFO JacksonVersion - Package versions: jackson-annotations=unknown, jackson-core=unknown, jackson-databind=unknown, jackson-dataformat-xml=2.12.5, jackson-datatype-jsr310=2.12.5, azure-core=1.21.0
10:46:38 [main] DEBUG MemberNameConverterImpl - Failed to retrieve MethodHandles used to get naming strategy. Falling back to BeanUtils.
java.lang.ClassNotFoundException: com.fasterxml.jackson.databind.introspect.AccessorNamingStrategy.Provider
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:418)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
at java.lang.ClassLoader.loadClass(ClassLoader.java:351)
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at com.azure.core.implementation.jackson.MemberNameConverterImpl.<clinit>(MemberNameConverterImpl.java:55)
at com.azure.core.implementation.jackson.ObjectMapperShim.<init>(ObjectMapperShim.java:139)
at com.azure.core.implementation.jackson.ObjectMapperShim.createHeaderMapper(ObjectMapperShim.java:127)
at com.azure.core.util.serializer.JacksonAdapter.<init>(JacksonAdapter.java:75)
at com.azure.core.util.serializer.JacksonAdapter.<init>(JacksonAdapter.java:53)
at com.azure.core.util.serializer.JacksonAdapter.createDefaultSerializerAdapter(JacksonAdapter.java:111)
at com.azure.storage.blob.implementation.util.ModelHelper.<clinit>(ModelHelper.java:53)
at com.azure.storage.blob.BlobUrlParts.parse(BlobUrlParts.java:371)
at com.azure.storage.blob.BlobServiceClientBuilder.endpoint(BlobServiceClientBuilder.java:147)
at com.azure.storage.blob.BlobServiceClientBuilder.connectionString(BlobServiceClientBuilder.java:286)
Why do I get these warning messages?
Turns out one of my dependencies was a "shaded jar" including the jackson components. This shading processes apparently stripped from the jar some kind of version info of which version of jackson was defined. It could safely be ignored.
I try to use ideaIC inspect cli to check local uncommitted changes in Ubuntu.
According to website docs, whole project code inspections had been execed successfull with command bin/inspect.sh $(pwd) $(pwd)/.idea/inspectionProfiles/Project_Default.xml temp -v2.
But when I try to use -changes option like bin/inspect.sh $(pwd) $(pwd)/.idea/inspectionProfiles/Project_Default.xml temp -changes -v2 , unexpected error occurred. I found the error message in code, but I can't found any solution about it.
OpenJDK 64-Bit Server VM warning: Option UseConcMarkSweepGC was deprecated in version 9.0 and will likely be removed in a future release.
Starting up IntelliJ IDEA 2020.2.3 (build IC-202.7660.26) ...done.
Opening project...2021-02-26 16:41:48,634 [ 1629] WARN - Container.ComponentManagerImpl - Do not use constructor injection (requestorClass=com.alibaba.p3c.idea.component.AliProjectComponent)
2021-02-26 16:41:49,018 [ 2013] WARN - Container.ComponentManagerImpl - Do not use constructor injection (requestorClass=org.jetbrains.android.compose.AndroidComposeAutoDocumentation)
Feb 26, 2021 4:41:49 PM net.sourceforge.pmd.RuleSetFactory parseRuleSetNode
WARNING: RuleSet description is missing. Future versions of PMD will require it.
Feb 26, 2021 4:41:50 PM net.sourceforge.pmd.RuleSetFactory parseRuleSetNode
WARNING: RuleSet description is missing. Future versions of PMD will require it.
2021-02-26 16:41:50,181 [ 3176] WARN - tartup.impl.StartupManagerImpl - Activities registered via registerPostStartupActivity must be dumb-aware: org.jetbrains.kotlin.idea.configuration.ui.KotlinConfigurationCheckerComponent$projectOpened$1#48ae78
done.
Initializing project...Loaded profile 'Project Default' from file '/home/user/workspace/lab/mai/bigdata/home/.idea/inspectionProfiles/Project_Default.xml'
modified file/home/user/workspace/lab/mai/bigdata/home/src/main/java/com/maiscrm/bigdata/spark/app/etl/CommonIncrementalCollectionLoader.java
done.
Inspecting with profile 'Project Default'
Running first analysis stage...
Shelving changes...
2021-02-26 16:41:55,220 [ 8215] WARN - ion.impl.NotificationCollector - Notification group 'Heap Dump Analysis' is already registered in whitelist
2021-02-26 16:41:55,220 [ 8215] WARN - ion.impl.NotificationCollector - Notification group 'Low Memory' is already registered in whitelist
Running second analysis stage...
2021-02-26 16:41:55,370 [ 8365] ERROR - spection.InspectionApplication - java.lang.IllegalArgumentException: The code below uses the same GUI thread to complete operations.Running from EDT would deadlock
java.lang.RuntimeException: java.lang.IllegalArgumentException: The code below uses the same GUI thread to complete operations.Running from EDT would deadlock
at com.intellij.openapi.application.impl.LaterInvocator.invokeAndWait(LaterInvocator.java:149)
at com.intellij.openapi.application.impl.ApplicationImpl.invokeAndWait(ApplicationImpl.java:475)
at com.intellij.openapi.application.ex.ApplicationUtil.invokeAndWaitSomewhere(ApplicationUtil.java:160)
at com.intellij.openapi.progress.impl.CoreProgressManager.runProcessWithProgressInCurrentThread(CoreProgressManager.java:535)
at com.intellij.openapi.progress.impl.CoreProgressManager.run(CoreProgressManager.java:335)
at com.intellij.openapi.vcs.changes.CallbackData.lambda$createInteractive$3(CallbackData.java:85)
at com.intellij.openapi.vcs.changes.UpdateRequestsQueue.invokeAfterUpdate(UpdateRequestsQueue.java:177)
at com.intellij.openapi.vcs.changes.ChangeListManagerImpl.invokeAfterUpdate(ChangeListManagerImpl.java:356)
at com.intellij.openapi.vcs.changes.ChangeListManagerImpl.invokeAfterUpdate(ChangeListManagerImpl.java:344)
at com.intellij.codeInspection.InspectionApplication.run(InspectionApplication.java:235)
at com.intellij.codeInspection.InspectionApplication.execute(InspectionApplication.java:140)
at com.intellij.codeInspection.InspectionApplication.startup(InspectionApplication.java:107)
at com.intellij.codeInspection.InspectionMain.main(InspectionMain.java:99)
at com.intellij.openapi.application.ApplicationStarter.main(ApplicationStarter.java:62)
at com.intellij.idea.ApplicationLoader$startApp$8.run(ApplicationLoader.kt:231)
at java.base/java.util.concurrent.CompletableFuture$UniRun.tryFire(CompletableFuture.java:783)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture.postFire(CompletableFuture.java:610)
at java.base/java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:1085)
at java.base/java.util.concurrent.CompletableFuture$Completion.run(CompletableFuture.java:478)
at com.intellij.idea.ApplicationLoader$startApp$nonEdtExecutor$1.execute(ApplicationLoader.kt:131)
at java.base/java.util.concurrent.CompletableFuture$UniCompletion.claim(CompletableFuture.java:568)
at java.base/java.util.concurrent.CompletableFuture$UniCompose.tryFire(CompletableFuture.java:1069)
at java.base/java.util.concurrent.CompletableFuture.postComplete(CompletableFuture.java:506)
at java.base/java.util.concurrent.CompletableFuture$AsyncRun.run(CompletableFuture.java:1742)
at com.intellij.util.concurrency.BoundedTaskExecutor.doRun(BoundedTaskExecutor.java:215)
at com.intellij.util.concurrency.BoundedTaskExecutor.access$200(BoundedTaskExecutor.java:26)
at com.intellij.util.concurrency.BoundedTaskExecutor$1.execute(BoundedTaskExecutor.java:194)
at com.intellij.util.concurrency.BoundedTaskExecutor$1.run(BoundedTaskExecutor.java:186)
at java.base/java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1128)
at java.base/java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:628)
at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1$1.run(Executors.java:668)
at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1$1.run(Executors.java:665)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/java.util.concurrent.Executors$PrivilegedThreadFactory$1.run(Executors.java:665)
at java.base/java.lang.Thread.run(Thread.java:834)
Caused by: java.lang.IllegalArgumentException: The code below uses the same GUI thread to complete operations.Running from EDT would deadlock
at com.intellij.openapi.projectRoots.impl.UnknownSdkInspectionCommandLineConfigurator$configureProject$1.invokeSuspend(UnknownSdkInspectionCommandLineConfigurator.kt:38)
at kotlin.coroutines.jvm.internal.BaseContinuationImpl.resumeWith(ContinuationImpl.kt:33)
at kotlinx.coroutines.DispatchedTask.run(DispatchedTask.kt:56)
at kotlinx.coroutines.EventLoopImplBase.processNextEvent(EventLoop.common.kt:272)
at kotlinx.coroutines.BlockingCoroutine.joinBlocking(Builders.kt:79)
at kotlinx.coroutines.BuildersKt__BuildersKt.runBlocking(Builders.kt:54)
at kotlinx.coroutines.BuildersKt.runBlocking(Unknown Source)
at kotlinx.coroutines.BuildersKt__BuildersKt.runBlocking$default(Builders.kt:36)
at kotlinx.coroutines.BuildersKt.runBlocking$default(Unknown Source)
at com.intellij.openapi.projectRoots.impl.UnknownSdkInspectionCommandLineConfigurator.configureProject(UnknownSdkInspectionCommandLineConfigurator.kt:37)
at com.intellij.codeInspection.InspectionApplication.configureProject(InspectionApplication.java:350)
at com.intellij.codeInspection.InspectionApplication.lambda$runUnderProgress$21(InspectionApplication.java:574)
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcess$2(CoreProgressManager.java:170)
at com.intellij.openapi.progress.impl.CoreProgressManager.registerIndicatorAndRun(CoreProgressManager.java:629)
at com.intellij.openapi.progress.impl.CoreProgressManager.executeProcessUnderProgress(CoreProgressManager.java:581)
at com.intellij.openapi.progress.impl.ProgressManagerImpl.executeProcessUnderProgress(ProgressManagerImpl.java:60)
at com.intellij.openapi.progress.impl.CoreProgressManager.runProcess(CoreProgressManager.java:157)
at com.intellij.codeInspection.InspectionApplication.runUnderProgress(InspectionApplication.java:573)
at com.intellij.codeInspection.InspectionApplication.runAnalysis(InspectionApplication.java:373)
at com.intellij.codeInspection.InspectionApplication.runAnalysisOnScope(InspectionApplication.java:343)
at com.intellij.codeInspection.InspectionApplication.lambda$run$4(InspectionApplication.java:241)
at com.intellij.openapi.vcs.changes.Waiter.onSuccess(Waiter.java:51)
at com.intellij.openapi.progress.impl.CoreProgressManager.finishTask(CoreProgressManager.java:549)
at com.intellij.openapi.progress.impl.CoreProgressManager.lambda$runProcessWithProgressInCurrentThread$9(CoreProgressManager.java:535)
at com.intellij.openapi.application.TransactionGuardImpl$2.run(TransactionGuardImpl.java:201)
at com.intellij.openapi.application.impl.ApplicationImpl.runIntendedWriteActionOnCurrentThread(ApplicationImpl.java:802)
at com.intellij.openapi.application.impl.ApplicationImpl.lambda$invokeAndWait$8(ApplicationImpl.java:475)
at com.intellij.openapi.application.impl.LaterInvocator$1.run(LaterInvocator.java:126)
at com.intellij.openapi.application.impl.FlushQueue.doRun(FlushQueue.java:84)
at com.intellij.openapi.application.impl.FlushQueue.runNextEvent(FlushQueue.java:132)
at com.intellij.openapi.application.impl.FlushQueue.flushNow(FlushQueue.java:47)
at com.intellij.openapi.application.impl.FlushQueue$FlushNow.run(FlushQueue.java:188)
at java.desktop/java.awt.event.InvocationEvent.dispatch(InvocationEvent.java:313)
at java.desktop/java.awt.EventQueue.dispatchEventImpl(EventQueue.java:776)
at java.desktop/java.awt.EventQueue$4.run(EventQueue.java:727)
at java.desktop/java.awt.EventQueue$4.run(EventQueue.java:721)
at java.base/java.security.AccessController.doPrivileged(Native Method)
at java.base/java.security.ProtectionDomain$JavaSecurityAccessImpl.doIntersectionPrivilege(ProtectionDomain.java:85)
at java.desktop/java.awt.EventQueue.dispatchEvent(EventQueue.java:746)
at com.intellij.ide.IdeEventQueue.defaultDispatchEvent(IdeEventQueue.java:971)
at com.intellij.ide.IdeEventQueue._dispatchEvent(IdeEventQueue.java:841)
at com.intellij.ide.IdeEventQueue.lambda$dispatchEvent$8(IdeEventQueue.java:452)
at com.intellij.openapi.progress.impl.CoreProgressManager.computePrioritized(CoreProgressManager.java:744)
at com.intellij.ide.IdeEventQueue.lambda$dispatchEvent$9(IdeEventQueue.java:451)
at com.intellij.openapi.application.impl.ApplicationImpl.runIntendedWriteActionOnCurrentThread(ApplicationImpl.java:802)
at com.intellij.ide.IdeEventQueue.dispatchEvent(IdeEventQueue.java:499)
at java.desktop/java.awt.EventDispatchThread.pumpOneEventForFilters(EventDispatchThread.java:203)
at java.desktop/java.awt.EventDispatchThread.pumpEventsForFilter(EventDispatchThread.java:124)
at java.desktop/java.awt.EventDispatchThread.pumpEventsForHierarchy(EventDispatchThread.java:113)
at java.desktop/java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:109)
at java.desktop/java.awt.EventDispatchThread.pumpEvents(EventDispatchThread.java:101)
at java.desktop/java.awt.EventDispatchThread.run(EventDispatchThread.java:90)
2021-02-26 16:41:55,372 [ 8367] ERROR - spection.InspectionApplication - IntelliJ IDEA 2020.2.3 Build #IC-202.7660.26
2021-02-26 16:41:55,372 [ 8367] ERROR - spection.InspectionApplication - JDK: 11.0.8; VM: OpenJDK 64-Bit Server VM; Vendor: JetBrains s.r.o.
2021-02-26 16:41:55,372 [ 8367] ERROR - spection.InspectionApplication - OS: Linux
2021-02-26 16:41:55,372 [ 8367] ERROR - spection.InspectionApplication - Last Action:
java.lang.IllegalArgumentException: The code below uses the same GUI thread to complete operations.Running from EDT would deadlock
The issue is solved in 2020.3.2 version of IntelliJ IDEA. Related issue in JB bugtracker: https://youtrack.jetbrains.com/issue/IDEA-263195
I'm trying to set up the Hive integration with Flink as shown here. I have everything configured as mentioned, all services (Hive, MySQL, Kafka) are running properly. However, as I start a Flink SQL Client on a standalone local Flink cluster with this command:
./bin/sql-client.sh embedded
I get a ClassNotFoundException: org.apache.hadoop.conf.Configuration...
This is the detailed log file with the exception trace:
2020-04-01 11:27:31,458 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.rpc.address, localhost
2020-04-01 11:27:31,459 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.rpc.port, 6123
2020-04-01 11:27:31,459 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.heap.size, 1024m
2020-04-01 11:27:31,460 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: taskmanager.memory.process.size, 1568m
2020-04-01 11:27:31,460 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: taskmanager.numberOfTaskSlots, 1
2020-04-01 11:27:31,460 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: parallelism.default, 1
2020-04-01 11:27:31,460 INFO org.apache.flink.configuration.GlobalConfiguration - Loading configuration property: jobmanager.execution.failover-strategy, region
2020-04-01 11:27:31,507 INFO org.apache.flink.core.fs.FileSystem - Hadoop is not in the classpath/dependencies. The extended set of supported File Systems via Hadoop is not available.
2020-04-01 11:27:31,527 WARN org.apache.flink.client.cli.CliFrontend - Could not load CLI class org.apache.flink.yarn.cli.FlinkYarnSessionCli.
java.lang.NoClassDefFoundError: org/apache/hadoop/yarn/exceptions/YarnException
at java.lang.Class.forName0(Native Method)
at java.lang.Class.forName(Class.java:264)
at org.apache.flink.client.cli.CliFrontend.loadCustomCommandLine(CliFrontend.java:1076)
at org.apache.flink.client.cli.CliFrontend.loadCustomCommandLines(CliFrontend.java:1030)
at org.apache.flink.table.client.gateway.local.LocalExecutor.<init>(LocalExecutor.java:135)
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:85)
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:178)
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.yarn.exceptions.YarnException
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:419)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
at java.lang.ClassLoader.loadClass(ClassLoader.java:352)
... 7 more
2020-04-01 11:27:31,543 INFO org.apache.flink.table.client.gateway.local.LocalExecutor - Using default environment file: file:/home/rudip7/flink/conf/sql-client-defaults.yaml
2020-04-01 11:27:31,861 INFO org.apache.flink.table.client.config.entries.ExecutionEntry - Property 'execution.restart-strategy.type' not specified. Using default value: fallback
2020-04-01 11:27:32,776 ERROR org.apache.flink.table.client.SqlClient - SQL Client must stop. Unexpected exception. This is a bug. Please consider filing an issue.
org.apache.flink.table.client.gateway.SqlExecutionException: Could not create execution context.
at
org.apache.flink.table.client.gateway.local.ExecutionContext$Builder.build(ExecutionContext.java:753)
at org.apache.flink.table.client.gateway.local.LocalExecutor.openSession(LocalExecutor.java:228)
at org.apache.flink.table.client.SqlClient.start(SqlClient.java:98)
at org.apache.flink.table.client.SqlClient.main(SqlClient.java:178)
Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/conf/Configuration
at org.apache.flink.table.catalog.hive.factories.HiveCatalogFactory.createCatalog(HiveCatalogFactory.java:84)
at org.apache.flink.table.client.gateway.local.ExecutionContext.createCatalog(ExecutionContext.java:371)
at org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$null$4(ExecutionContext.java:547)
at java.util.HashMap.forEach(HashMap.java:1289)
at org.apache.flink.table.client.gateway.local.ExecutionContext.lambda$initializeCatalogs$5(ExecutionContext.java:546)
at org.apache.flink.table.client.gateway.local.ExecutionContext.wrapClassLoader(ExecutionContext.java:240)
at org.apache.flink.table.client.gateway.local.ExecutionContext.initializeCatalogs(ExecutionContext.java:545)
at org.apache.flink.table.client.gateway.local.ExecutionContext.initializeTableEnvironment(ExecutionContext.java:494)
at org.apache.flink.table.client.gateway.local.ExecutionContext.<init>(ExecutionContext.java:159)
at org.apache.flink.table.client.gateway.local.ExecutionContext.<init>(ExecutionContext.java:118)
at org.apache.flink.table.client.gateway.local.ExecutionContext$Builder.build(ExecutionContext.java:742)
... 3 more
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.conf.Configuration
at java.net.URLClassLoader.findClass(URLClassLoader.java:382)
at java.lang.ClassLoader.loadClass(ClassLoader.java:419)
at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:352)
at java.lang.ClassLoader.loadClass(ClassLoader.java:352)
... 14 more
Do you have any hint about what I'm missing? I have all my Hadoop libraries on my Classpath and I don't understand why this class is missing...
I'm using Flink 1.10 and Java 8
Also, this line in the log file makes me wonder if it really my mistake:
2020-04-01 11:27:32,776 ERROR org.apache.flink.table.client.SqlClient - SQL Client must stop. Unexpected exception. This is a bug. Please consider filing an issue.
Thank you in advance!
if you not add the HADOOP_HOME in you env, you can export the HADOOP_CLASSPATH, before run the ./bin/sql-client.sh embedded
export HADOOP_CLASSPATH=`hadoop classpath`
I tried running this sample project which uses MapR.
I tried executing the class ml.Flight in the sandbox and from the below line,
val spark: SparkSession = SparkSession.builder().appName("churn").getOrCreate()
I got this error.
[user01#maprdemo ~]$ spark-submit --class ml.Flight --master local[2] spark-ml-flightdelay-1.0.jar
Warning: Unable to determine $DRILL_HOME
18/12/19 05:39:09 WARN Utils: Your hostname, maprdemo.local resolves to a loopback address: 127.0.0.1; using 10.0.3.1 instead (on interface enp0s3)
18/12/19 05:39:09 WARN Utils: Set SPARK_LOCAL_IP if you need to bind to another address
18/12/19 05:39:20 WARN NativeCodeLoader: Unable to load native-hadoop library for your platform... using builtin-java classes where applicable
18/12/19 05:39:28 ERROR SparkContext: Error initializing SparkContext.
java.io.IOException: Could not create FileClient
at com.mapr.fs.MapRFileSystem.lookupClient(MapRFileSystem.java:656)
at com.mapr.fs.MapRFileSystem.lookupClient(MapRFileSystem.java:709)
at com.mapr.fs.MapRFileSystem.getMapRFileStatus(MapRFileSystem.java:1419)
at com.mapr.fs.MapRFileSystem.getFileStatus(MapRFileSystem.java:1093)
at org.apache.spark.scheduler.EventLoggingListener.start(EventLoggingListener.scala:100)
at org.apache.spark.SparkContext.<init>(SparkContext.scala:522)
at org.apache.spark.SparkContext$.getOrCreate(SparkContext.scala:2493)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:933)
at org.apache.spark.sql.SparkSession$Builder$$anonfun$7.apply(SparkSession.scala:924)
at scala.Option.getOrElse(Option.scala:121)
at org.apache.spark.sql.SparkSession$Builder.getOrCreate(SparkSession.scala:924)
at ml.Flight$.main(Flight.scala:37)
at ml.Flight.main(Flight.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:498)
at org.apache.spark.deploy.JavaMainApplication.start(SparkApplication.scala:52)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:899)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:198)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:228)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:137)
at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
Caused by: java.io.IOException: Could not create FileClient
at com.mapr.fs.MapRClientImpl.<init>(MapRClientImpl.java:137)
at com.mapr.fs.MapRFileSystem.lookupClient(MapRFileSystem.java:650)
... 22 more
I'm new to Scala/Spark and any help is welcome. Thanks in advance.
I think you are using or exporting a different python version of spark-submit.
For example:
/opt/mapr/spark/spark-2.3.1/bin/spark-submit
I'm trying to use CoreNLP run off of Jython 2.7.1, ran using IntelliJ IDEA, with the following test code:
from edu.stanford.nlp.simple import *
s = Sentence("This is a test.")
ss = s.nerTags()
print(s)
print(ss)
Log:
Connected to pydev debugger (build 172.4155.5)
[MainThread] INFO edu.stanford.nlp.tagger.maxent.MaxentTagger - Loading POS tagger from edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger ... done [0.4 sec].
[MainThread] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz ... done [0.8 sec].
[MainThread] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz ... done [0.4 sec].
[MainThread] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz ... done [0.4 sec].
[MainThread] INFO edu.stanford.nlp.time.JollyDayHolidays - Initializing JollyDayHoliday for SUTime from classpath edu/stanford/nlp/models/sutime/jollyday/Holidays_sutime.xml as sutime.binder.1.
Traceback (most recent call last):
File "C:\Users\user\.IntelliJIdea2017.2\config\plugins\python\helpers\pydev\pydevd.py", line 1599, in <module>
globals = debugger.run(setup['file'], None, None, is_module)
File "C:\Users\user\.IntelliJIdea2017.2\config\plugins\python\helpers\pydev\pydevd.py", line 1026, in run
pydev_imports.execfile(file, globals, locals) # execute the script
File "C:/Users/user/IdeaProjects/nlptest/src/test.py", line 3, in <module>
ss = s.nerTags()
at edu.stanford.nlp.util.ReflectionLoading.loadByReflection(ReflectionLoading.java:40)
at edu.stanford.nlp.time.TimeExpressionExtractorFactory.create(TimeExpressionExtractorFactory.java:57)
at edu.stanford.nlp.time.TimeExpressionExtractorFactory.createExtractor(TimeExpressionExtractorFactory.java:38)
at edu.stanford.nlp.ie.regexp.NumberSequenceClassifier.<init>(NumberSequenceClassifier.java:86)
at edu.stanford.nlp.ie.NERClassifierCombiner.<init>(NERClassifierCombiner.java:136)
at edu.stanford.nlp.pipeline.NERCombinerAnnotator.<init>(NERCombinerAnnotator.java:91)
at edu.stanford.nlp.pipeline.AnnotatorImplementations.ner(AnnotatorImplementations.java:70)
at edu.stanford.nlp.simple.Document$2.get(Document.java:115)
at edu.stanford.nlp.simple.Document$2.get(Document.java:109)
at edu.stanford.nlp.simple.Document.runNER(Document.java:886)
at edu.stanford.nlp.simple.Sentence.nerTags(Sentence.java:528)
at edu.stanford.nlp.simple.Sentence.nerTags(Sentence.java:536)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)
at java.lang.reflect.Method.invoke(Unknown Source)
edu.stanford.nlp.util.ReflectionLoading$ReflectionLoadingException: edu.stanford.nlp.util.ReflectionLoading$ReflectionLoadingException: Error creating edu.stanford.nlp.time.TimeExpressionExtractorImpl
I added, using my IDE, the CoreNLP directory to my CLASSPATH and it seems that Jython can find it. There are several similar questions on SO on about error but none of their solutions (if they had any) actually worked for me. Has anyone else found a solution to this problem?
(For the record, CoreNLP works fine when run off of the command line.)
Edit (my attempt using Bash on Windows):
user#user:~/uh$ export JYTHONPATH="/home/user/uh/stanford-corenlp-full-2017-06-09/*:"
user#user:~/uh$ jython
Jython 2.7.0 (default:9987c746f838, Apr 29 2015, 02:25:11)
[OpenJDK 64-Bit Server VM (Oracle Corporation)] on java1.8.0_131
Type "help", "copyright", "credits" or "license" for more information.
>>> from edu.stanford.nlp.simple import *
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
ImportError: No module named edu
>>>
My further testing shows that this is purely a Jython problem and a cross-platform one at that. I tested similar setups on Windows, Ubuntu, and Mac resulting in the same error. Even running pure Java code designed to return the initial pipeline to Jython returns the same error. However, the error can be avoided by setting "ner.useSUTime" to false with the following:
props.setProperty("ner.useSUTime", "0")
I'm not sure, what's going on. What version of CoreNLP are you using? Working at the command-line, I couldn't reproduce this problem. See below. So, my first two questions are: Are you using the latest CoreNLP? (If not, try upgrading.) Does this work for you at the command-line? (In which case, it's only the use in IntelliJ that is problematic - not sure what would be going on there but something to do with how reflection and classloaders work?)
==> Pouring jython-2.7.1.sierra.bottle.tar.gz
🍺 /usr/local/Cellar/jython/2.7.1: 3,797 files, 147.4MB
$ export JYTHONPATH="/Users/manning/Software/stanford-corenlp-full-2017-06-09/*:"
$ jython
Jython 2.7.1 (default:0df7adb1b397, Jun 30 2017, 19:02:43)
[Java HotSpot(TM) 64-Bit Server VM (Oracle Corporation)] on java1.8.0_65
Type "help", "copyright", "credits" or "license" for more information.
>>> from edu.stanford.nlp.simple import *
>>> s = Sentence("This is a test.")
>>> ss = s.nerTags()
SLF4J: Class path contains multiple SLF4J bindings.
SLF4J: Found binding in [jar:file:/Users/manning/git/javanlp/projects/core/lib/slf4j-simple.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: Found binding in [jar:file:/Users/manning/Software/stanford-corenlp-full-2017-06-09/slf4j-simple.jar!/org/slf4j/impl/StaticLoggerBinder.class]
SLF4J: See http://www.slf4j.org/codes.html#multiple_bindings for an explanation.
SLF4J: Actual binding is of type [org.slf4j.impl.SimpleLoggerFactory]
[main] INFO edu.stanford.nlp.tagger.maxent.MaxentTagger - Loading POS tagger from edu/stanford/nlp/models/pos-tagger/english-left3words/english-left3words-distsim.tagger ... done [0.7 sec].
[main] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.all.3class.distsim.crf.ser.gz ... done [0.9 sec].
[main] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.muc.7class.distsim.crf.ser.gz ... done [0.6 sec].
[main] INFO edu.stanford.nlp.ie.AbstractSequenceClassifier - Loading classifier from edu/stanford/nlp/models/ner/english.conll.4class.distsim.crf.ser.gz ... done [2.4 sec].
[main] INFO edu.stanford.nlp.time.JollyDayHolidays - Initializing JollyDayHoliday for SUTime from classpath edu/stanford/nlp/models/sutime/jollyday/Holidays_sutime.xml as sutime.binder.1.
>>> print(s)
This is a test.
>>> print(ss)
[O, O, O, O, O]
>>> s2 = Sentence("Chris joined Stanford in 1999");
>>> ss2 = s2.nerTags()
>>> print(ss2)
[PERSON, O, ORGANIZATION, O, DATE]
>>>