Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

java.lang.NullPointerException: null	at $line4.$read$$iwC$$iwC.(:15)	at $line4.$read$$iwC.(:24)	at $line4.$read.(:26)	at $line4.$read$.(:30)	at $line4.$read$.()	at $line41.$read$$iwC.(:7)	at $line41.$read.(:64)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:422)	at java.lang.Class.newInstance(Class.java:442)	at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.resolveDirectOuterInstance(AnyObjectFactory.scala:92)	at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer$lzycompute(AnyObjectFactory.scala:81)	at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer(AnyObjectFactory.scala:78)	at com.datastax.spark.connector.rdd.reader.AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp(AnyObjectFactory.scala:113)	at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)	at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.newInstance(AnyObjectFactory.scala:112)	at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter$$anonfun$convertPF$1.applyOrElse(GettableDataToMappedTypeConverter.scala:264)	at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:44)	at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter.convert(GettableDataToMappedTypeConverter.scala:22)	at com.datastax.spark.connector.rdd.reader.ClassBasedRowReader.read(ClassBasedRowReader.scala:38)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)	at scala.collection.Iterator$$anon$11.next(Iterator.scala:328)	at scala.collection.Iterator$$anon$13.next(Iterator.scala:372)	at com.datastax.spark.connector.util.CountingIterator.next(CountingIterator.scala:16)	at scala.collection.Iterator$$anon$10.next(Iterator.scala:312)	at scala.collection.Iterator$class.foreach(Iterator.scala:727)	at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)	at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)	at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)	at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)	at scala.collection.AbstractIterator.to(Iterator.scala:1157)	at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)	at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)	at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)	at scala.collection.AbstractIterator.toArray(Iterator.scala:1157)	at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)	at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)	at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)	at org.apache.spark.scheduler.Task.run(Task.scala:89)	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)	at java.lang.Thread.run(Thread.java:745)