java.lang.ClassCastException: org.apache.spark.sql.catalyst.expressions.MutableAny cannot be cast to org.apache.spark.sql.catalyst.expressions.MutableInt

Apache's JIRA Issue Tracker | Michael Armbrust | 2 years ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    From the user list. It looks like data is not implemented correctly in in-memory caching. We should also check the JDBC datasource support for date. {code} Stack trace of an exception being reported since upgrade to 1.3.0: java.lang.ClassCastException: java.sql.Date cannot be cast to java.lang.Integer at scala.runtime.BoxesRunTime.unboxToInt(BoxesRunTime.java:105) ~[scala-library-2.11.6.jar:na] at org.apache.spark.sql.catalyst.expressions.GenericRow.getInt(rows.scala:83) ~[spark-catalyst_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.IntColumnStats.gatherStats(ColumnStats.scala:191) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.NullableColumnBuilder$class.appendFrom(NullableColumnBuilder.scala:56) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.NativeColumnBuilder.org$apache$spark$sql$columnar$compression$CompressibleColumnBuilder$$super$appendFrom(ColumnBuilder.scala:87) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.compression.CompressibleColumnBuilder$class.appendFrom(CompressibleColumnBuilder.scala:78) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.NativeColumnBuilder.appendFrom(ColumnBuilder.scala:87) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at org.apache.spark.sql.columnar.InMemoryRelation$$anonfun$3$$anon$1.next(InMemoryColumnarTableScan.scala:135) ~[spark-sql_2.11-1.3.0.jar:1.3.0] at {code}

    Apache's JIRA Issue Tracker | 2 years ago | Michael Armbrust
    java.lang.ClassCastException: org.apache.spark.sql.catalyst.expressions.MutableAny cannot be cast to org.apache.spark.sql.catalyst.expressions.MutableInt

    Root Cause Analysis

    1. java.lang.ClassCastException

      org.apache.spark.sql.catalyst.expressions.MutableAny cannot be cast to org.apache.spark.sql.catalyst.expressions.MutableInt

      at org.apache.spark.sql.catalyst.expressions.SpecificMutableRow.getInt()
    2. Spark Project Catalyst
      SpecificMutableRow.getInt
      1. org.apache.spark.sql.catalyst.expressions.SpecificMutableRow.getInt(SpecificMutableRow.scala:248)[spark-catalyst_2.11-1.3.0.jar:1.3.0]
      1 frame
    3. Spark Project SQL
      InMemoryRelation$$anonfun$3$$anon$1.next
      1. org.apache.spark.sql.columnar.IntColumnStats.gatherStats(ColumnStats.scala:191)[spark-sql_2.11-1.3.0.jar:1.3.0]
      2. org.apache.spark.sql.columnar.NullableColumnBuilder$class.appendFrom(NullableColumnBuilder.scala:56)[spark-sql_2.11-1.3.0.jar:1.3.0]
      3. org.apache.spark.sql.columnar.NativeColumnBuilder.org$apache$spark$sql$columnar$compression$CompressibleColumnBuilder$$super$appendFrom(ColumnBuilder.scala:87)[spark-sql_2.11-1.3.0.jar:1.3.0]
      4. org.apache.spark.sql.columnar.compression.CompressibleColumnBuilder$class.appendFrom(CompressibleColumnBuilder.scala:78)[spark-sql_2.11-1.3.0.jar:1.3.0]
      5. org.apache.spark.sql.columnar.NativeColumnBuilder.appendFrom(ColumnBuilder.scala:87)[spark-sql_2.11-1.3.0.jar:1.3.0]
      6. org.apache.spark.sql.columnar.InMemoryRelation$$anonfun$3$$anon$1.next(InMemoryColumnarTableScan.scala:135)[spark-sql_2.11-1.3.0.jar:1.3.0]
      7. org.apache.spark.sql.columnar.InMemoryRelation$$anonfun$3$$anon$1.next(InMemoryColumnarTableScan.scala:111)[spark-sql_2.11-1.3.0.jar:1.3.0]
      7 frames
    4. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.storage.MemoryStore.unrollSafely(MemoryStore.scala:249)[spark-core_2.11-1.3.0.jar:1.3.0]
      2. org.apache.spark.CacheManager.putInBlockManager(CacheManager.scala:172)[spark-core_2.11-1.3.0.jar:1.3.0]
      3. org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:79)[spark-core_2.11-1.3.0.jar:1.3.0]
      4. org.apache.spark.rdd.RDD.iterator(RDD.scala:242)[spark-core_2.11-1.3.0.jar:1.3.0]
      5. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)[spark-core_2.11-1.3.0.jar:1.3.0]
      6. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)[spark-core_2.11-1.3.0.jar:1.3.0]
      7. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)[spark-core_2.11-1.3.0.jar:1.3.0]
      8. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)[spark-core_2.11-1.3.0.jar:1.3.0]
      9. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)[spark-core_2.11-1.3.0.jar:1.3.0]
      10. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)[spark-core_2.11-1.3.0.jar:1.3.0]
      11. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:61)[spark-core_2.11-1.3.0.jar:1.3.0]
      12. org.apache.spark.scheduler.Task.run(Task.scala:64)[spark-core_2.11-1.3.0.jar:1.3.0]
      13. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)[spark-core_2.11-1.3.0.jar:1.3.0]
      13 frames
    5. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)[na:1.8.0_11]
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)[na:1.8.0_11]
      3. java.lang.Thread.run(Thread.java:745)[na:1.8.0_11]
      3 frames