java.lang.ExceptionInInitializerError: null

DataStax JIRA | Russell Spitzer | 7 months ago
  1. 0

    When running in non-Local mode the reflection code breaks with user defined objects. This only seems to occur when the master is not running in local mode. In DSE we don't see this problem unless `sqlContext.implicits._` is also invoked in the spark-shell. This line is always invoked in OSS in the preinit. {code} object SampleObject extends Serializable { case class ClassInObject(key: Int, value: String) } sc.cassandraTable[SampleObject.ClassInObject]("read_test", "simple_kv").count {code} {code} ERROR 2016-05-12 13:35:01,764 Logging.scala:95 - org.apache.spark.executor.Executor: Exception in task 0.0 in stage 3.0 (TID 3) java.lang.ExceptionInInitializerError: null at $line41.$read$$iwC.<init>(<console>:7) ~[na:na] at $line41.$read.<init>(<console>:64) ~[na:na] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_60] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_60] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_60] at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_60] at java.lang.Class.newInstance(Class.java:442) ~[na:1.8.0_60] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.resolveDirectOuterInstance(AnyObjectFactory.scala:92) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer$lzycompute(AnyObjectFactory.scala:81) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer(AnyObjectFactory.scala:78) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp(AnyObjectFactory.scala:113) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) ~[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.newInstance(AnyObjectFactory.scala:112) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter$$anonfun$convertPF$1.applyOrElse(GettableDataToMappedTypeConverter.scala:264) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:44) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter.convert(GettableDataToMappedTypeConverter.scala:22) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.ClassBasedRowReader.read(ClassBasedRowReader.scala:38) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) ~[scala-library-2.10.5.jar:na] at scala.collection.Iterator$$anon$13.next(Iterator.scala:372) ~[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.util.CountingIterator.next(CountingIterator.scala:16) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$10.next(Iterator.scala:312) ~[scala-library-2.10.5.jar:na] at scala.collection.Iterator$class.foreach(Iterator.scala:727) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48) ~[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103) ~[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.to(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toArray(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858) ~[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858) ~[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.scheduler.Task.run(Task.scala:89) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_60] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_60] at java.lang.Thread.run(Thread.java:745) [na:1.8.0_60] Caused by: java.lang.NullPointerException: null at $line4.$read$$iwC$$iwC.<init>(<console>:15) ~[na:na] at $line4.$read$$iwC.<init>(<console>:24) ~[na:na] at $line4.$read.<init>(<console>:26) ~[na:na] at $line4.$read$.<init>(<console>:30) ~[na:na] at $line4.$read$.<clinit>(<console>) ~[na:na] ... 44 common frames omitted {code}

    DataStax JIRA | 7 months ago | Russell Spitzer
    java.lang.ExceptionInInitializerError: null
  2. 0

    When running in non-Local mode the reflection code breaks with user defined objects. This only seems to occur when the master is not running in local mode. In DSE we don't see this problem unless `sqlContext.implicits._` is also invoked in the spark-shell. This line is always invoked in OSS in the preinit. {code} object SampleObject extends Serializable { case class ClassInObject(key: Int, value: String) } sc.cassandraTable[SampleObject.ClassInObject]("read_test", "simple_kv").count {code} {code} ERROR 2016-05-12 13:35:01,764 Logging.scala:95 - org.apache.spark.executor.Executor: Exception in task 0.0 in stage 3.0 (TID 3) java.lang.ExceptionInInitializerError: null at $line41.$read$$iwC.<init>(<console>:7) ~[na:na] at $line41.$read.<init>(<console>:64) ~[na:na] at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) ~[na:1.8.0_60] at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) ~[na:1.8.0_60] at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) ~[na:1.8.0_60] at java.lang.reflect.Constructor.newInstance(Constructor.java:422) ~[na:1.8.0_60] at java.lang.Class.newInstance(Class.java:442) ~[na:1.8.0_60] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.resolveDirectOuterInstance(AnyObjectFactory.scala:92) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer$lzycompute(AnyObjectFactory.scala:81) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer(AnyObjectFactory.scala:78) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp(AnyObjectFactory.scala:113) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141) ~[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.rdd.reader.AnyObjectFactory.newInstance(AnyObjectFactory.scala:112) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter$$anonfun$convertPF$1.applyOrElse(GettableDataToMappedTypeConverter.scala:264) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:44) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter.convert(GettableDataToMappedTypeConverter.scala:22) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.reader.ClassBasedRowReader.read(ClassBasedRowReader.scala:38) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) ~[scala-library-2.10.5.jar:na] at scala.collection.Iterator$$anon$13.next(Iterator.scala:372) ~[scala-library-2.10.5.jar:na] at com.datastax.spark.connector.util.CountingIterator.next(CountingIterator.scala:16) ~[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575] at scala.collection.Iterator$$anon$10.next(Iterator.scala:312) ~[scala-library-2.10.5.jar:na] at scala.collection.Iterator$class.foreach(Iterator.scala:727) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48) ~[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103) ~[scala-library-2.10.5.jar:na] at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.to(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252) ~[scala-library-2.10.5.jar:na] at scala.collection.AbstractIterator.toArray(Iterator.scala:1157) ~[scala-library-2.10.5.jar:na] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858) ~[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858) ~[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT] at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.scheduler.Task.run(Task.scala:89) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) ~[spark-core_2.10-1.6.1.2.jar:1.6.1.2] at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) [na:1.8.0_60] at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) [na:1.8.0_60] at java.lang.Thread.run(Thread.java:745) [na:1.8.0_60] Caused by: java.lang.NullPointerException: null at $line4.$read$$iwC$$iwC.<init>(<console>:15) ~[na:na] at $line4.$read$$iwC.<init>(<console>:24) ~[na:na] at $line4.$read.<init>(<console>:26) ~[na:na] at $line4.$read$.<init>(<console>:30) ~[na:na] at $line4.$read$.<clinit>(<console>) ~[na:na] ... 44 common frames omitted {code}

    DataStax JIRA | 7 months ago | Russell Spitzer
    java.lang.ExceptionInInitializerError: null
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    HQL Exception (org.hibernate.dialect.Dialect$3)

    Stack Overflow | 4 years ago | Hari
    java.lang.NullPointerException: null

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.NullPointerException

      null

      at $line4.$read$$iwC$$iwC.<init>()
    2. $line4
      $read$.<clinit>
      1. $line4.$read$$iwC$$iwC.<init>(<console>:15)[na:na]
      2. $line4.$read$$iwC.<init>(<console>:24)[na:na]
      3. $line4.$read.<init>(<console>:26)[na:na]
      4. $line4.$read$.<init>(<console>:30)[na:na]
      5. $line4.$read$.<clinit>(<console>)[na:na]
      5 frames
    3. $line41
      $read.<init>
      1. $line41.$read$$iwC.<init>(<console>:7)[na:na]
      2. $line41.$read.<init>(<console>:64)[na:na]
      2 frames
    4. Java RT
      Class.newInstance
      1. sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)[na:1.8.0_60]
      2. sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)[na:1.8.0_60]
      3. sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)[na:1.8.0_60]
      4. java.lang.reflect.Constructor.newInstance(Constructor.java:422)[na:1.8.0_60]
      5. java.lang.Class.newInstance(Class.java:442)[na:1.8.0_60]
      5 frames
    5. spark-cassandra-connector
      AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp
      1. com.datastax.spark.connector.rdd.reader.AnyObjectFactory.resolveDirectOuterInstance(AnyObjectFactory.scala:92)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      2. com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer$lzycompute(AnyObjectFactory.scala:81)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      3. com.datastax.spark.connector.rdd.reader.AnyObjectFactory.com$datastax$spark$connector$rdd$reader$AnyObjectFactory$$argBuffer(AnyObjectFactory.scala:78)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      4. com.datastax.spark.connector.rdd.reader.AnyObjectFactory$$anonfun$newInstance$1.apply$mcVI$sp(AnyObjectFactory.scala:113)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      4 frames
    6. Scala
      Range.foreach$mVc$sp
      1. scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)[scala-library-2.10.5.jar:na]
      1 frame
    7. spark-cassandra-connector
      CassandraTableScanRDD$$anonfun$17.apply
      1. com.datastax.spark.connector.rdd.reader.AnyObjectFactory.newInstance(AnyObjectFactory.scala:112)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      2. com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter$$anonfun$convertPF$1.applyOrElse(GettableDataToMappedTypeConverter.scala:264)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      3. com.datastax.spark.connector.types.TypeConverter$class.convert(TypeConverter.scala:44)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      4. com.datastax.spark.connector.rdd.reader.GettableDataToMappedTypeConverter.convert(GettableDataToMappedTypeConverter.scala:22)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      5. com.datastax.spark.connector.rdd.reader.ClassBasedRowReader.read(ClassBasedRowReader.scala:38)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      6. com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      7. com.datastax.spark.connector.rdd.CassandraTableScanRDD$$anonfun$17.apply(CassandraTableScanRDD.scala:309)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      7 frames
    8. Scala
      Iterator$$anon$13.next
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)[scala-library-2.10.5.jar:na]
      2. scala.collection.Iterator$$anon$13.next(Iterator.scala:372)[scala-library-2.10.5.jar:na]
      2 frames
    9. spark-cassandra-connector
      CountingIterator.next
      1. com.datastax.spark.connector.util.CountingIterator.next(CountingIterator.scala:16)[spark-cassandra-connector_2.10-1.6.0-M2-26-g66c3575.jar:1.6.0-M2-26-g66c3575]
      1 frame
    10. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$10.next(Iterator.scala:312)[scala-library-2.10.5.jar:na]
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)[scala-library-2.10.5.jar:na]
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)[scala-library-2.10.5.jar:na]
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)[scala-library-2.10.5.jar:na]
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)[scala-library-2.10.5.jar:na]
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)[scala-library-2.10.5.jar:na]
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)[scala-library-2.10.5.jar:na]
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)[scala-library-2.10.5.jar:na]
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)[scala-library-2.10.5.jar:na]
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)[scala-library-2.10.5.jar:na]
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)[scala-library-2.10.5.jar:na]
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)[scala-library-2.10.5.jar:na]
      12 frames
    11. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)[spark-core_2.10-1.6.1.2.jar:1.6.1.2]
      2. org.apache.spark.rdd.RDD$$anonfun$take$1$$anonfun$28.apply(RDD.scala:1328)[spark-core_2.10-1.6.1.2.jar:1.6.1.2]
      3. org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT]
      4. org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1858)[spark-core_2.10-1.6.1.2.jar:5.0.0-SNAPSHOT]
      5. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)[spark-core_2.10-1.6.1.2.jar:1.6.1.2]
      6. org.apache.spark.scheduler.Task.run(Task.scala:89)[spark-core_2.10-1.6.1.2.jar:1.6.1.2]
      7. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)[spark-core_2.10-1.6.1.2.jar:1.6.1.2]
      7 frames
    12. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)[na:1.8.0_60]
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)[na:1.8.0_60]
      3. java.lang.Thread.run(Thread.java:745)[na:1.8.0_60]
      3 frames