java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster

DataStax JIRA | Dmitry Ochnev | 3 months ago
  1. 0

    We are trying to use Spark 2.0 with Cassandra 2.2.6 (our project depends on 2.2.6). We have encountered a problem with Datastax Spark Cassandra Connector and one of its dependencies (Guava 16.0) while trying to query data from Cassandra. The error stack trace is the following (Guava 16.0 is mentioned in the bottom message): ---------------- {noformat} 2016-09-01 09:24:13 WARN TaskSetManager:66 - Lost task 1.0 in stage 0.0 (TID 1, a86ec32eb0ad): java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35) at com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92) at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31) at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56) at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82) at com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147) at com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) 2016-09-01 09:24:13 WARN TaskSetManager:66 - Lost task 0.0 in stage 0.0 (TID 0, a86ec32eb0ad): java.lang.ExceptionInInitializerError at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35) at com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92) at com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149) at com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31) at com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56) at com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82) at com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147) at com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319) at org.apache.spark.rdd.RDD.iterator(RDD.scala:283) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47) at org.apache.spark.scheduler.Task.run(Task.scala:85) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.IllegalStateException: Detected Guava issue #1635 which indicates that a version of Guava less than 16.01 is in use. This introduces codec resolution issues and potentially other incompatibility issues in the driver. Please upgrade to Guava 16.01 or later. at com.datastax.driver.core.SanityChecks.checkGuava(SanityChecks.java:62) at com.datastax.driver.core.SanityChecks.check(SanityChecks.java:36) at com.datastax.driver.core.Cluster.<clinit>(Cluster.java:67) ... 34 more {noformat} ---------------- It looks like the connection itself was successful: ---------------- 2016-09-01 09:23:57 INFO NettyUtil:79 - Found Netty's native epoll transport in the classpath, using it 2016-09-01 09:23:58 INFO Cluster:1485 - New Cassandra host cassandra-2.2.6/172.17.0.2:9042 added 2016-09-01 09:23:58 INFO CassandraConnector:35 - Connected to Cassandra cluster: Test Cluster ---------------- The error appears in the following configuration: *) Cassandra 2.2.6 in a Docker container *) A simple Mesos cluster (1 master, 1 slave) with Spark 2.0 in a Docker container. The Dockerfile is as follows: -------- FROM mesosphere/mesos:0.28.1 # install Java 8 RUN apt-get update && \ apt-get install -y --no-install-recommends software-properties-common && \ apt-add-repository -y ppa:webupd8team/java && \ apt-get update && \ /bin/echo debconf shared/accepted-oracle-license-v1-1 select true | /usr/bin/debconf-set-selections && \ /bin/echo debconf shared/accepted-oracle-license-v1-1 seen true | /usr/bin/debconf-set-selections && \ apt-get install -y --no-install-recommends oracle-java8-installer oracle-java8-set-default ADD spark /spark -------- (the "spark" directory contains Apache Spark 2.0 built for Scala 2.11) *) Our application, in a Docker container: the application uses Spark 2.0 to processes time series data stored in Cassandra 2.2.6. The error appears when trying to query data from Cassandra. The error was reproduced with 2.0.0-M1, M2 and M3 from http://jcenter.bintray.com/com/datastax/spark/spark-cassandra-connector_2.11

    DataStax JIRA | 3 months ago | Dmitry Ochnev
    java.lang.NoClassDefFoundError: Could not initialize class com.datastax.driver.core.Cluster
  2. 0

    maven repo and 5.0-X version

    Google Groups | 2 years ago | Zsolt Kúti
    java.lang.NoClassDefFoundError: Could not initialize class
  3. 0

    NoClassDefFoundError while creating mock object in java

    Stack Overflow | 7 months ago | Manas Kantha
    java.lang.NoClassDefFoundError: Could not initialize class
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    External Properties file and classpath

    Oracle Community | 6 years ago | 393585
    java.lang.NoClassDefFoundError: Could not initialize class
  6. 0

    maven repo and 5.0-X version

    Google Groups | 2 years ago | Zsolt Kúti
    java.lang.NoClassDefFoundError: Could not initialize class

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.NoClassDefFoundError

      Could not initialize class com.datastax.driver.core.Cluster

      at com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder()
    2. spark-cassandra-connector
      CassandraJoinRDD.compute
      1. com.datastax.spark.connector.cql.DefaultConnectionFactory$.clusterBuilder(CassandraConnectionFactory.scala:35)
      2. com.datastax.spark.connector.cql.DefaultConnectionFactory$.createCluster(CassandraConnectionFactory.scala:92)
      3. com.datastax.spark.connector.cql.CassandraConnector$.com$datastax$spark$connector$cql$CassandraConnector$$createSession(CassandraConnector.scala:154)
      4. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149)
      5. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$3.apply(CassandraConnector.scala:149)
      6. com.datastax.spark.connector.cql.RefCountedCache.createNewValueAndKeys(RefCountedCache.scala:31)
      7. com.datastax.spark.connector.cql.RefCountedCache.acquire(RefCountedCache.scala:56)
      8. com.datastax.spark.connector.cql.CassandraConnector.openSession(CassandraConnector.scala:82)
      9. com.datastax.spark.connector.rdd.AbstractCassandraJoin$class.compute(AbstractCassandraJoin.scala:147)
      10. com.datastax.spark.connector.rdd.CassandraJoinRDD.compute(CassandraJoinRDD.scala:22)
      10 frames
    3. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      2. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      3. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      4. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      5. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      6. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      7. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      8. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      9. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      10. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      11. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      12. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      13. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      14. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      15. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
      16. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
      17. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
      18. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:79)
      19. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:47)
      20. org.apache.spark.scheduler.Task.run(Task.scala:85)
      21. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
      21 frames
    4. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames