java.lang.ClassNotFoundException: com.datastax.spark.connector.rdd.partitioner.CassandraPartition

DataStax JIRA | Vijay Pawnarkar | 2 years ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    I am getting following class not found exception when using Spark 1.2.1 with spark-cassandra-connector_2.10-1.2.0-alpha2. When the job is submitted to Spark.. it successfully adds required connector JAR file to Worker's classpath. Corresponding log entries are also attached. ---------------------------- [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, 127.0.0.1): java.lang.ClassNotFoundException: com.datastax.spark.connector.rdd.partitioner.CassandraPartition at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:274) at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:59) at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612) at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:182) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) ------------------------------ LOG indicating JAR files were added to worker classpath. 15/02/17 16:56:48 INFO Executor: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar with timestamp 1424210185005 15/02/17 16:56:48 INFO Utils: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar to C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-35f5ed4b-041d-40d8-8854-b243787de188\fetchFileTemp4665176275367448514.tmp 15/02/17 16:56:48 DEBUG Utils: fetchFile not using security 15/02/17 16:56:48 INFO Utils: Copying C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-35f5ed4b-041d-40d8-8854-b243787de188\16215993091424210185005_cache to C:\localapps\spark-1.2.1-bin-hadoop2.4\work\app-20150217165625-0006\0\.\spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar 15/02/17 16:56:48 INFO Executor: Adding file:/C:/localapps/spark-1.2.1-bin-hadoop2.4/work/app-20150217165625-0006/0/./spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar to class loader 15/02/17 16:56:50 INFO Executor: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector_2.10-1.2.0-alpha2.jar with timestamp 1424210185012 15/02/17 16:56:50 INFO Utils: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector_2.10-1.2.0-alpha2.jar to C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-78373f0b-053b-4c43-bd7c-da733e58ab0d\fetchFileTemp3822867177146190341.tmp 15/02/17 16:56:50 DEBUG Utils: fetchFile not using security 15/02/17 16:56:50 INFO Utils: Copying C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-78373f0b-053b-4c43-bd7c-da733e58ab0d\16318572381424210185012_cache to C:\localapps\spark-1.2.1-bin-hadoop2.4\work\app-20150217165625-0006\0\.\spark-cassandra-connector_2.10-1.2.0-alpha2.jar 15/02/17 16:56:50 INFO Executor: Adding file:/C:/localapps/spark-1.2.1-bin-hadoop2.4/work/app-20150217165625-0006/0/./spark-cassandra-connector_2.10-1.2.0-alpha2.jar to class loader

    DataStax JIRA | 2 years ago | Vijay Pawnarkar
    java.lang.ClassNotFoundException: com.datastax.spark.connector.rdd.partitioner.CassandraPartition
  2. 0

    I am getting following class not found exception when using Spark 1.2.1 with spark-cassandra-connector_2.10-1.2.0-alpha2. When the job is submitted to Spark.. it successfully adds required connector JAR file to Worker's classpath. Corresponding log entries are also attached. ---------------------------- [task-result-getter-0] WARN org.apache.spark.scheduler.TaskSetManager - Lost task 0.0 in stage 0.0 (TID 0, 127.0.0.1): java.lang.ClassNotFoundException: com.datastax.spark.connector.rdd.partitioner.CassandraPartition at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:274) at org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:59) at java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612) at java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) at java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62) at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:182) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) ------------------------------ LOG indicating JAR files were added to worker classpath. 15/02/17 16:56:48 INFO Executor: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar with timestamp 1424210185005 15/02/17 16:56:48 INFO Utils: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar to C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-35f5ed4b-041d-40d8-8854-b243787de188\fetchFileTemp4665176275367448514.tmp 15/02/17 16:56:48 DEBUG Utils: fetchFile not using security 15/02/17 16:56:48 INFO Utils: Copying C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-35f5ed4b-041d-40d8-8854-b243787de188\16215993091424210185005_cache to C:\localapps\spark-1.2.1-bin-hadoop2.4\work\app-20150217165625-0006\0\.\spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar 15/02/17 16:56:48 INFO Executor: Adding file:/C:/localapps/spark-1.2.1-bin-hadoop2.4/work/app-20150217165625-0006/0/./spark-cassandra-connector-java_2.10-1.2.0-alpha2.jar to class loader 15/02/17 16:56:50 INFO Executor: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector_2.10-1.2.0-alpha2.jar with timestamp 1424210185012 15/02/17 16:56:50 INFO Utils: Fetching http://127.0.0.1:64265/jars/spark-cassandra-connector_2.10-1.2.0-alpha2.jar to C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-78373f0b-053b-4c43-bd7c-da733e58ab0d\fetchFileTemp3822867177146190341.tmp 15/02/17 16:56:50 DEBUG Utils: fetchFile not using security 15/02/17 16:56:50 INFO Utils: Copying C:\Users\sparkus\AppData\Local\Temp\spark-10f5e149-5460-4899-9c8f-b19b19bdaf55\spark-fba24b2b-5847-4b04-848c-90677d12ff99\spark-78373f0b-053b-4c43-bd7c-da733e58ab0d\16318572381424210185012_cache to C:\localapps\spark-1.2.1-bin-hadoop2.4\work\app-20150217165625-0006\0\.\spark-cassandra-connector_2.10-1.2.0-alpha2.jar 15/02/17 16:56:50 INFO Executor: Adding file:/C:/localapps/spark-1.2.1-bin-hadoop2.4/work/app-20150217165625-0006/0/./spark-cassandra-connector_2.10-1.2.0-alpha2.jar to class loader

    DataStax JIRA | 2 years ago | Vijay Pawnarkar
    java.lang.ClassNotFoundException: com.datastax.spark.connector.rdd.partitioner.CassandraPartition
  3. 0

    How to resolve java.lang.ClassNotFoundException error for Apache Spark related web apps?

    Stack Overflow | 2 years ago | Nima
    java.lang.ClassNotFoundException: main.java.server.SimpleApp$1
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    How to run spark-master with Eclipse, what am I doing wrong?

    Stack Overflow | 2 years ago
    java.lang.ClassNotFoundException: mavenj.testing123$1
  6. 0

    Apache Spark only in Windows standalone mode: java.lang.ClassNotFoundException

    Stack Overflow | 2 years ago | Ben
    java.lang.ClassNotFoundException: yyyyyyy.xxxxxxx.SomeClass$1

  1. bandoca 2 times, last 3 days ago
  2. jokester 2 times, last 5 days ago
  3. arturgajowy 8 times, last 5 days ago
  4. rp 1 times, last 2 weeks ago
  5. tzrlk 1 times, last 4 weeks ago
64 more registered users
31 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.ClassNotFoundException

    com.datastax.spark.connector.rdd.partitioner.CassandraPartition

    at java.net.URLClassLoader$1.run()
  2. Java RT
    Class.forName
    1. java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    2. java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    3. java.security.AccessController.doPrivileged(Native Method)
    4. java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    5. java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    6. java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    7. java.lang.Class.forName0(Native Method)
    8. java.lang.Class.forName(Class.java:274)
    8 frames
  3. Spark
    JavaDeserializationStream$$anon$1.resolveClass
    1. org.apache.spark.serializer.JavaDeserializationStream$$anon$1.resolveClass(JavaSerializer.scala:59)
    1 frame
  4. Java RT
    ObjectInputStream.readObject
    1. java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1612)
    2. java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517)
    3. java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771)
    4. java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
    5. java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990)
    6. java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915)
    7. java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798)
    8. java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350)
    9. java.io.ObjectInputStream.readObject(ObjectInputStream.java:370)
    9 frames
  5. Spark
    Executor$TaskRunner.run
    1. org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:62)
    2. org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:87)
    3. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:182)
    3 frames
  6. Java RT
    Thread.run
    1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    3. java.lang.Thread.run(Thread.java:745)
    3 frames