java.lang.ExceptionInInitializerError

null

Samebug tips0

There are no available Samebug tips.

Don't give up yet. Paste your full stack trace to get a solution.

Solutions on the web3

  • via by Russell Spitzer,
  • via by Russell Spitzer,
  • Stack trace

    • java.lang.ExceptionInInitializerError: null at $line41.$read$$iwC.<init>(<console>:7)[na:na] at $line41.$read.<init>(<console>:64)[na:na] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)[na:1.8.0_60] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)[na:1.8.0_60] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[na:1.8.0_60] at java.lang.reflect.Constructor.newInstance(Constructor.java:422)[na:1.8.0_60] at java.lang.Class.newInstance(Class.java:442)[na:1.8.0_60] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.resolveDirectOuterInstance(AnyObjectFactory.scala:92)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer$lzycompute(AnyObjectFactory.scala:81)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer(AnyObjectFactory.scala:78)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp(AnyObjectFactory.scala:113)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.newInstance(AnyObjectFactory.scala:112)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter$$anonfun$convertPF$1.applyOrElse(GettableDataToMappedTypeConverter.scala:264)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:44)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter.convert(GettableDataToMappedTypeConverter.scala:22)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.ClassBasedRowReader.read(ClassBasedRowReader.scala:38)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)[scala-library-2.10.5.jar:na] at scala.collection.Iterator$$anon$13.next(Iterator.scala:372)[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.util.CountingIterator.next(CountingIterator.scala:16)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$10.next(Iterator.scala:312)[scala-library-2.10.5.jar:na] at scala.collection.Iterator$class.foreach(Iterator.scala:727)[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)[scala-library-2.10.5.jar:na] at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.to(Iterator.scala:1157)[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)[scala-library-2.10.5.jar:na] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.scheduler.Task.run(Task.scala:89)[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)[na:1.8.0_60] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)[na:1.8.0_60] at java.lang.Thread.run(Thread.java:745)[na:1.8.0_60] Caused by: java.lang.NullPointerException: null at $line4.$read$$iwC$$iwC.<init>(<console>:15)[na:na] at $line4.$read$$iwC.<init>(<console>:24)[na:na] at $line4.$read.<init>(<console>:26)[na:na] at $line4.$read$.<init>(<console>:30)[na:na] at $line4.$read$.<clinit>(<console>)[na:na] ... 44 more

    Write tip

    You have a different solution? A short tip here would help you and many other users who saw this issue last week.

    No Bugmate found.