Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Jenkins JIRA by Brent Duffy, 1 year ago
via Jenkins JIRA by Brent Duffy, 1 year ago
via Coderanch by Giannis nasdades, 1 year ago
This exception has no message.
via Jenkins JIRA by Alexander Barthel, 1 year ago
This exception has no message.
via Jenkins JIRA by Alexander Barthel, 1 year ago
This exception has no message.
via Jenkins JIRA by Trevor Baker, 1 year ago
This exception has no message.
java.lang.LinkageError: loader (instance of  org/apache/spark/sql/hive/client/IsolatedClientLoader$$anon$1): attempted  duplicate class definition for name: "com/mapr/fs/shim/LibraryLoader"	at java.lang.ClassLoader.defineClass1(Native Method)	at java.lang.ClassLoader.defineClass(ClassLoader.java:763)	at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:498)	at com.mapr.fs.ShimLoader.injectNativeLoader(ShimLoader.java:284)	at com.mapr.fs.ShimLoader.load(ShimLoader.java:223)	at com.mapr.fs.MapRFileSystem.(MapRFileSystem.java:107)	at java.lang.Class.forName0(Native Method)	at java.lang.Class.forName(Class.java:348)	at org.apache.hadoop.conf.Configuration.getClassByNameOrNull(Configuration.java:2147)	at org.apache.hadoop.conf.Configuration.getClassByName(Configuration.java:2112)	at org.apache.hadoop.conf.Configuration.getClass(Configuration.java:2206)	at org.apache.hadoop.fs.FileSystem.getFileSystemClass(FileSystem.java:2674)	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2687)	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:94)	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2723)	at org.apache.hadoop.fs.FileSystem$Cache.getUnique(FileSystem.java:2711)	at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:454)	at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:462)	at org.apache.hadoop.fs.FileSystem.newInstance(FileSystem.java:444)	at org.apache.hadoop.hive.shims.Hadoop23Shims.getNonCachedFileSystem(Hadoop23Shims.java:944)	at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3687)	at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:600)	at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554)	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508)	at org.apache.spark.sql.hive.client.HiveClientImpl.(HiveClientImpl.scala:189)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.(HiveSessionState.scala:63)	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)	at org.apache.spark.sql.Dataset.(Dataset.scala:161)	at org.apache.spark.sql.Dataset.(Dataset.scala:167)	at org.apache.spark.sql.Dataset$.apply(Dataset.scala:59)	at org.apache.spark.sql.SparkSession.createDataset(SparkSession.scala:441)	at org.apache.spark.sql.SQLContext.createDataset(SQLContext.scala:395)	at org.apache.spark.sql.SQLImplicits.rddToDatasetHolder(SQLImplicits.scala:163)	at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:35)	at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:40)	at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:42)	at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw$$iw.(:44)	at $line30.$read$$iw$$iw$$iw$$iw$$iw$$iw.(:46)	at $line30.$read$$iw$$iw$$iw$$iw$$iw.(:48)	at $line30.$read$$iw$$iw$$iw$$iw.(:50)	at $line30.$read$$iw$$iw$$iw.(:52)	at $line30.$read$$iw$$iw.(:54)	at $line30.$read$$iw.(:56)	at $line30.$read.(:58)	at $line30.$read$.(:62)	at $line30.$read$.()	at $line30.$eval$.$print$lzycompute(:7)	at $line30.$eval$.$print(:6)	at $line30.$eval.$print()	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:498)	at scala.tools.nsc.interpreter.IMain$ReadEvalPrint.call(IMain.scala:786)	at scala.tools.nsc.interpreter.IMain$Request.loadAndRun(IMain.scala:1047)	at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:638)	at scala.tools.nsc.interpreter.IMain$WrappedRequest$$anonfun$loadAndRunReq$1.apply(IMain.scala:637)	at scala.reflect.internal.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:31)	at scala.reflect.internal.util.AbstractFileClassLoader.asContext(AbstractFileClassLoader.scala:19)	at scala.tools.nsc.interpreter.IMain$WrappedRequest.loadAndRunReq(IMain.scala:637)	at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:569)	at scala.tools.nsc.interpreter.IMain.interpret(IMain.scala:565)	at scala.tools.nsc.interpreter.ILoop.interpretStartingWith(ILoop.scala:807)	at scala.tools.nsc.interpreter.ILoop.command(ILoop.scala:681)	at scala.tools.nsc.interpreter.ILoop.processLine(ILoop.scala:395)	at scala.tools.nsc.interpreter.ILoop.loop(ILoop.scala:415)	at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply$mcZ$sp(ILoop.scala:923)	at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)	at scala.tools.nsc.interpreter.ILoop$$anonfun$process$1.apply(ILoop.scala:909)	at scala.reflect.internal.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:97)	at scala.tools.nsc.interpreter.ILoop.process(ILoop.scala:909)	at org.apache.spark.repl.Main$.doMain(Main.scala:68)	at org.apache.spark.repl.Main$.main(Main.scala:51)	at org.apache.spark.repl.Main.main(Main.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:498)	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:736)	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:185)	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:210)	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:124)	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)