java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster

DataStax JIRA | Dmitry Ochnev | 8 months ago
tip
Click on the to mark the solution that helps you, Samebug will learn from it.
As a community member, you’ll be rewarded for you help.
  1. 0

    We are trying to use Spark 2.0 with Cassandra 2.2.6 (our project depends on 2.2.6). We have encountered a problem with Datastax Spark Cassandra Connector and one of its dependencies (Guava 16.0) while trying to query data from Cassandra. The error stack trace is the following (Guava 16.0 is mentioned in the bottom message): ---------------- {noformat} 2016-09-01 09:24:13 WARN TaskSetManager:66 - Lost task 1.0 in stage 0.0 (TID 1, a86ec32eb0ad): java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35) at com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92) at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31) at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56) at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82) at com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147) at com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 2016-09-01 09:24:13 WARN TaskSetManager:66 - Lost task 0.0 in stage 0.0 (TID 0, a86ec32eb0ad): java.lang.ExceptionInInitializerError at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35) at com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92) at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31) at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56) at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82) at com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147) at com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.IllegalStateException: Detected Guava issue #1635 which indicates that a version of Guava less than 16.01 is in use. This introduces codec resolution issues and potentially other incompatibility issues in the driver. Please upgrade to Guava 16.01 or later. at com.datastax.driver.core.SanityChecks.checkGuava(SanityChecks.java:62) at com.datastax.driver.core.SanityChecks.check(SanityChecks.java:36) at com.datastax.driver.core.Cluster.<clinit>(Cluster.java:67) ... 34 more {noformat} ---------------- It looks like the connection itself was successful: ---------------- 2016-09-01 09:23:57 INFO NettyUtil:79 - Found Netty's native epoll transport in the classpath, using it 2016-09-01 09:23:58 INFO Cluster:1485 - New Cassandra host cassandra-2.2.6/172.17.0.2:9042 added 2016-09-01 09:23:58 INFO CassandraConnector:35 - Connected to Cassandra cluster: Test Cluster ---------------- The error appears in the following configuration: *) Cassandra 2.2.6 in a Docker container *) A simple Mesos cluster (1 master, 1 slave) with Spark 2.0 in a Docker container. The Dockerfile is as follows: -------- FROM mesosphere/mesos:0.28.1 # install Java 8 RUN apt-get update && \ apt-get install -y --no-install-recommends software-properties-common && \ apt-add-repository -y ppa:webupd8team/java && \ apt-get update && \ /bin/echo debconf shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections && \ /bin/echo debconf shared/accepted-oracle-license-v1-1 seen true | /usr/bin/debconf-set-selections && \ apt-get install -y --no-install-recommends oracle-java8-installer oracle-java8-set-default ADD spark /spark -------- (the "spark" directory contains Apache Spark 2.0 built for Scala 2.11) *) Our application, in a Docker container: the application uses Spark 2.0 to processes time series data stored in Cassandra 2.2.6. The error appears when trying to query data from Cassandra. The error was reproduced with 2.0.0-M1, M2 and M3 from http://jcenter.bintray.com/com/datastax/spark/spark-cassandra-connector_2.11

    DataStax JIRA | 8 months ago | Dmitry Ochnev
    java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster
  2. 0

    Problem with Tomcat - unable to start webapp [Archive] - Java-Monitor Forum

    java-monitor.com | 1 year ago
    java.lang.NoClassDefFoundError: javax/faces/context/ExternalContext at org.apache.myfaces.trinidadinternal.webapp.Trinida dListenerImpl.contextDestroyed(TrinidadListenerImp l.java:39) at org.apache.catalina.core.StandardContext.listenerS top(StandardContext.java:3882) at org.apache.catalina.core.StandardContext.stop(Stan dardContext.java:4523) at org.apache.catalina.core.StandardContext.start(Sta ndardContext.java:4387) at org.apache.catalina.manager.ManagerServlet.start(M anagerServlet.java:1247) at org.apache.catalina.manager.HTMLManagerServlet.sta rt(HTMLManagerServlet.java:604) at org.apache.catalina.manager.HTMLManagerServlet.doG et(HTMLManagerServlet.java:129) at javax.servlet.http.HttpServlet.service(HttpServlet .java:617) at javax.servlet.http.HttpServlet.service(HttpServlet .java:717) at org.apache.catalina.core.ApplicationFilterChain.in ternalDoFilter(ApplicationFilterChain.java:290) at org.apache.catalina.core.ApplicationFilterChain.do Filter(ApplicationFilterChain.java:206) at org.apache.catalina.core.StandardWrapperValve.invo ke(StandardWrapperValve.java:233) at org.apache.catalina.core.StandardContextValve.invo ke(StandardContextValve.java:191) at org.apache.catalina.authenticator.AuthenticatorBas e.invoke(AuthenticatorBase.java:525) at org.apache.catalina.core.StandardHostValve.invoke( StandardHostValve.java:128) at org.apache.catalina.valves.ErrorReportValve.invoke (ErrorReportValve.java:102) at org.apache.catalina.core.StandardEngineValve.invok e(StandardEngineValve.java:109) at org.apache.catalina.connector.CoyoteAdapter.servic e(CoyoteAdapter.java:286) at org.apache.coyote.http11.Http11Processor.process(H ttp11Processor.java:845) at org.apache.coyote.http11.Http11Protocol$Http11Conn ectionHandler.process(Http11Protocol.java:583) at org.apache.tomcat.util.net.JIoEndpoint$Worker.run( JIoEndpoint.java:447)
  3. 0

    issues in spring petclinic applicaiton

    Stack Overflow | 4 years ago | user2216702
    java.lang.NoClassDefFoundError: org/springframework/asm/ClassVisitor at org.springframework.beans.factory.support.Abstract AutowireCapableBean Factory.&lt;init&gt;(AbstractAutowireCapableBeanFactory. java:121) ~[spring-beans-3.1.4 .RELEASE.jar:3.1.4.RELEASE] at org.springframework.beans.factory.support.Abstract AutowireCapableBean Factory.&lt;init&gt;(AbstractAutowireCapableBeanFactory. java:168) ~[spring-beans-3.1.4 .RELEASE.jar:3.1.4.RELEASE] at org.springframework.beans.factory.support.DefaultL istableBeanFactory. &lt;init&gt;(DefaultListableBeanFactory.java:163) ~[spring-beans-3.1.4.RELEASE.jar:3.1 .4.RELEASE] at org.springframework.context.support.AbstractRefres hableApplicationCon text.createBeanFactory(AbstractRefreshableApplicat ionContext.java:194) ~[spring- context-3.2.2.RELEASE.jar:3.2.2.RELEASE] at org.springframework.context.support.AbstractRefres hableApplicationCon text.refreshBeanFactory(AbstractRefreshableApplica tionContext.java:127) ~[spring -context-3.2.2.RELEASE.jar:3.2.2.RELEASE] at org.springframework.context.support.AbstractApplic ationContext.obtain FreshBeanFactory(AbstractApplicationContext.java:5 37) ~[spring-context-3.2.2.REL EASE.jar:3.2.2.RELEASE] at org.springframework.context.support.AbstractApplic ationContext.refres h(AbstractApplicationContext.java:451) ~[spring-context-3.2.2.RELEASE.jar:3.2.2. RELEASE] at org.springframework.web.context.ContextLoader.conf igureAndRefreshWebA pplicationContext(ContextLoader.java:389) ~[spring-web-3.2.2.RELEASE.jar:3.2.2.R ELEASE] at org.springframework.web.context.ContextLoader.init WebApplicationConte xt(ContextLoader.java:294) ~[spring-web-3.2.2.RELEASE.jar:3.2.2.RELEASE] at org.springframework.web.context.ContextLoaderListe ner.contextInitiali zed(ContextLoaderListener.java:112) [spring-web-3.2.2.RELEASE.jar:3.2.2.RELEASE] at org.apache.catalina.core.StandardContext.listenerS tart(StandardContex t.java:4791) [tomcat-embed-core-7.0.30.jar:7.0.30] at org.apache.catalina.core.StandardContext.startInte rnal(StandardContex t.java:5285) [tomcat-embed-core-7.0.30.jar:7.0.30] at org.apache.catalina.util.LifecycleBase.start(Lifec ycleBase.java:150) [tomcat-embed-core-7.0.30.jar:7.0.30] at org.apache.catalina.core.ContainerBase$StartChild. call(ContainerBase. java:1559) [tomcat-embed-core-7.0.30.jar:7.0.30] at org.apache.catalina.core.ContainerBase$StartChild. call(ContainerBase. java:1549) [tomcat-embed-core-7.0.30.jar:7.0.30] at java.util.concurrent.FutureTask$Sync.innerRun(Futu reTask.java:303) [n a:1.6.0_20] at java.util.concurrent.FutureTask.run(FutureTask.jav a:138) [na:1.6.0_20 ] at java.util.concurrent.ThreadPoolExecutor$Worker.run Task(ThreadPoolExec utor.java:886) [na:1.6.0_20] at java.util.concurrent.ThreadPoolExecutor$Worker.run (ThreadPoolExecutor .java:908) [na:1.6.0_20]
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Grails : production server only : spring.ReloadAwareAutowireCapableBeanFactory

    Stack Overflow | 3 years ago | Snite
    java.lang.NoClassDefFoundError: org/springframework/orm/jpa/EntityManagerFactoryUtils
  6. 0

    Could not initialize class javax.imageio.ImageIO

    worldwindcentral.com | 1 year ago
    java.lang.NoClassDefFoundError: Could not initialize class javax.imageio.ImageIO at gov.nasa.worldwind.formats.dds.DDSCompre ssor.compressImageURL(DDSCompressor.java :295) at gov.nasa.worldwind.layers.BasicTiledImag eLayer.readTexture(BasicTiledImageLayer. java:384) at gov.nasa.worldwind.layers.BasicTiledImag eLayer.loadTexture(BasicTiledImageLayer. java:342) at gov.nasa.worldwind.layers.BasicTiledImag eLayer$RequestTask.run(BasicTiledImageLa yer.java:263) at java.util.concurrent.ThreadPoolExecutor. runWorker(Unknown Source) at java.util.concurrent.ThreadPoolExecutor$ Worker.run(Unknown Source)

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.NoClassDefFoundError

      Could not initialize class com.datastax.driver.core.Cluster

      at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder()
    2. spark-cassandra-connector
      CassandraJoinRDD.compute
      1. com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35)
      2. com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92)
      3. com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154)
      4. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149)
      5. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149)
      6. com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31)
      7. com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56)
      8. com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82)
      9. com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147)
      10. com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22)
      10 frames
    3. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      2. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      3. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      4. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      5. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      6. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      7. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      8. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      9. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      10. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      11. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      12. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      13. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      14. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      15. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      16. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      17. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      18. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
      19. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
      20. org.apache.spark.scheduler.Task.run(Task.scala:85)
      21. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
      21 frames
    4. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames