org.apache.spark.SparkException: Task failed while writing rows.

DataStax JIRA | Dmytro Popovych | 1 year ago
  1. 0

    Will be nice to add [the same fix|https://github.com/datastax/spark-cassandra-connector/commit/6601ce67f6ea3aff5f6a8132c89bdba2bf1d1d20#diff-3692b29c789a7d0d0e9238c662c693d6R49] for values in complex types (set/list/map). Sorry, that PR isn't attached, I fixed the problem locally, but feel like my scala skills aren't good enough to contribute in the project :) {code} 5/09/19 20:33:38 WARN TaskSetManager: Lost task 2.0 in stage 0.0 (TID 13, 10.0.2.218): org.apache.spark.SparkException: Task failed while writing rows. at org.apache.spark.sql.execution.datasources.DefaultWriterContainer.writeRows(WriterContainer.scala:251) at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150) at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) at org.apache.spark.scheduler.Task.run(Task.scala:88) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.ClassCastException: java.util.Date cannot be cast to java.sql.Timestamp at org.apache.spark.sql.catalyst.CatalystTypeConverters$TimestampConverter$.toCatalystImpl(CatalystTypeConverters.scala:308) at org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:205) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:203) at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772) at scala.collection.immutable.Map$Map2.foreach(Map.scala:130) at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:203) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:188) at org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102) at org.apache.spark.sql.catalyst.CatalystTypeConverters$$anonfun$createToCatalystConverter$2.apply(CatalystTypeConverters.scala:396) at org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:63) at org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:60) at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) at org.apache.spark.sql.execution.datasources.DefaultWriterContainer.writeRows(WriterContainer.scala:242) ... 8 more {code}

    DataStax JIRA | 1 year ago | Dmytro Popovych
    org.apache.spark.SparkException: Task failed while writing rows.
  2. 0

    Will be nice to add [the same fix|https://github.com/datastax/spark-cassandra-connector/commit/6601ce67f6ea3aff5f6a8132c89bdba2bf1d1d20#diff-3692b29c789a7d0d0e9238c662c693d6R49] for values in complex types (set/list/map). Sorry, that PR isn't attached, I fixed the problem locally, but feel like my scala skills aren't good enough to contribute in the project :) {code} 5/09/19 20:33:38 WARN TaskSetManager: Lost task 2.0 in stage 0.0 (TID 13, 10.0.2.218): org.apache.spark.SparkException: Task failed while writing rows. at org.apache.spark.sql.execution.datasources.DefaultWriterContainer.writeRows(WriterContainer.scala:251) at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150) at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150) at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66) at org.apache.spark.scheduler.Task.run(Task.scala:88) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) Caused by: java.lang.ClassCastException: java.util.Date cannot be cast to java.sql.Timestamp at org.apache.spark.sql.catalyst.CatalystTypeConverters$TimestampConverter$.toCatalystImpl(CatalystTypeConverters.scala:308) at org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:205) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:203) at scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772) at scala.collection.immutable.Map$Map2.foreach(Map.scala:130) at scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:203) at org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:188) at org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102) at org.apache.spark.sql.catalyst.CatalystTypeConverters$$anonfun$createToCatalystConverter$2.apply(CatalystTypeConverters.scala:396) at org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:63) at org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:60) at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) at org.apache.spark.sql.execution.datasources.DefaultWriterContainer.writeRows(WriterContainer.scala:242) ... 8 more {code}

    DataStax JIRA | 1 year ago | Dmytro Popovych
    org.apache.spark.SparkException: Task failed while writing rows.
  3. 0

    Entity with java.util.Date column gives ClassCastException in BIRT

    objectdb.com | 1 year ago
    java.lang.ClassCastException: java.util.Date cannot be cast to java.sql.Timestamp
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Entity with java.util.Date column gives ClassCastException in BIRT

    objectdb.com | 1 year ago
    java.lang.ClassCastException: java.util.Date cannot be cast to java.sql.Timestamp
  6. 0

    The calculator step can convert Timestamp fields to Date (see calculator1.ktr), and Date fields can be converted to Timestamp type. From a user's perspective this raises the expectation that we can in principle use Timestamp fields wherever it is appropriate for Date fields to appear. From a developer's perspective it also holds true since the ValueMeta interface for timestamps is a subclass of that of the one for Dates, and the internal data types are java.util.Date and java.sql.Timestamp, which are also proper subclasses of each other. However, this does not work as expected. When we create a new field (Calculator2.ktr) of the Date type having a "Date a + b days" calulation to add an integer (b) to a timestamp field (a) the error is: 2013/11/15 14:54:18 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : !Calculator.ErrorInStepRunning : 2013/11/15 14:54:18 - Calculator.0 - resultType: 9; targetMeta: 3 2013/11/15 14:54:18 - Calculator.0 - java.util.Date cannot be cast to java.sql.Timestamp 2013/11/15 14:54:18 - Calculator.0 - ! 2013/11/15 14:54:18 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : Unexpected error 2013/11/15 14:54:18 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : org.pentaho.di.core.exception.KettleStepException: 2013/11/15 14:54:18 - Calculator.0 - Error while running the step 2013/11/15 14:54:18 - Calculator.0 - 2013/11/15 14:54:18 - Calculator.0 - resultType: 9; targetMeta: 3 2013/11/15 14:54:18 - Calculator.0 - java.util.Date cannot be cast to java.sql.Timestamp 2013/11/15 14:54:18 - Calculator.0 - 2013/11/15 14:54:18 - Calculator.0 - 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.processRow(Calculator.java:198) 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.trans.step.RunThread.run(RunThread.java:60) 2013/11/15 14:54:18 - Calculator.0 - at java.lang.Thread.run(Thread.java:722) 2013/11/15 14:54:18 - Calculator.0 - Caused by: org.pentaho.di.core.exception.KettleValueException: 2013/11/15 14:54:18 - Calculator.0 - resultType: 9; targetMeta: 3 2013/11/15 14:54:18 - Calculator.0 - java.util.Date cannot be cast to java.sql.Timestamp 2013/11/15 14:54:18 - Calculator.0 - 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.calcFields(Calculator.java:793) 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.processRow(Calculator.java:179) 2013/11/15 14:54:18 - Calculator.0 - ... 2 more 2013/11/15 14:54:18 - Calculator.0 - Caused by: java.lang.ClassCastException: java.util.Date cannot be cast to java.sql.Timestamp 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.core.row.value.ValueMetaTimestamp.getTimestamp(ValueMetaTimestamp.java:124) 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.core.row.value.ValueMetaTimestamp.getDate(ValueMetaTimestamp.java:66) 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.core.row.value.ValueMetaBase.convertData(ValueMetaBase.java:3495) 2013/11/15 14:54:18 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.calcFields(Calculator.java:791) 2013/11/15 14:54:18 - Calculator.0 - ... 3 more When we use the same calculation but set the result type to Timestamp, a different error occurs: 2013/11/15 14:56:54 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : !Calculator.ErrorInStepRunning : 2013/11/15 14:56:54 - Calculator.0 - resultType: 9; targetMeta: 9 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - Timestamp : can't be converted to a timestamp 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - ! 2013/11/15 14:56:54 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : Unexpected error 2013/11/15 14:56:54 - Calculator.0 - ERROR (version TRUNK-SNAPSHOT, build 1 from 2013-11-13 09.29.02 by rbouman) : org.pentaho.di.core.exception.KettleStepException: 2013/11/15 14:56:54 - Calculator.0 - Error while running the step 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - resultType: 9; targetMeta: 9 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - Timestamp : can't be converted to a timestamp 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.processRow(Calculator.java:198) 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.trans.step.RunThread.run(RunThread.java:60) 2013/11/15 14:56:54 - Calculator.0 - at java.lang.Thread.run(Thread.java:722) 2013/11/15 14:56:54 - Calculator.0 - Caused by: org.pentaho.di.core.exception.KettleValueException: 2013/11/15 14:56:54 - Calculator.0 - resultType: 9; targetMeta: 9 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - Timestamp : can't be converted to a timestamp 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.calcFields(Calculator.java:793) 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.processRow(Calculator.java:179) 2013/11/15 14:56:54 - Calculator.0 - ... 2 more 2013/11/15 14:56:54 - Calculator.0 - Caused by: org.pentaho.di.core.exception.KettleValueException: 2013/11/15 14:56:54 - Calculator.0 - Timestamp : can't be converted to a timestamp 2013/11/15 14:56:54 - Calculator.0 - 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.core.row.value.ValueMetaTimestamp.convertData(ValueMetaTimestamp.java:401) 2013/11/15 14:56:54 - Calculator.0 - at org.pentaho.di.trans.steps.calculator.Calculator.calcFields(Calculator.java:791) 2013/11/15 14:56:54 - Calculator.0 - ... 3 more

    Pentaho BI Platform Tracking | 3 years ago | Roland Bouman
    org.pentaho.di.core.exception.KettleStepException: Error while running the step resultType: 9; targetMeta: 3 java.util.Date cannot be cast to java.sql.Timestamp

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.ClassCastException

      java.util.Date cannot be cast to java.sql.Timestamp

      at org.apache.spark.sql.catalyst.CatalystTypeConverters$TimestampConverter$.toCatalystImpl()
    2. Spark Project Catalyst
      CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply
      1. org.apache.spark.sql.catalyst.CatalystTypeConverters$TimestampConverter$.toCatalystImpl(CatalystTypeConverters.scala:308)
      2. org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102)
      3. org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:205)
      4. org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter$$anonfun$toCatalystImpl$4.apply(CatalystTypeConverters.scala:203)
      4 frames
    3. Scala
      TraversableLike$WithFilter.foreach
      1. scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply(TraversableLike.scala:772)
      2. scala.collection.immutable.Map$Map2.foreach(Map.scala:130)
      3. scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala:771)
      3 frames
    4. Spark Project Catalyst
      CatalystTypeConverters$$anonfun$createToCatalystConverter$2.apply
      1. org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:203)
      2. org.apache.spark.sql.catalyst.CatalystTypeConverters$MapConverter.toCatalystImpl(CatalystTypeConverters.scala:188)
      3. org.apache.spark.sql.catalyst.CatalystTypeConverters$CatalystTypeConverter.toCatalyst(CatalystTypeConverters.scala:102)
      4. org.apache.spark.sql.catalyst.CatalystTypeConverters$$anonfun$createToCatalystConverter$2.apply(CatalystTypeConverters.scala:396)
      4 frames
    5. Spark Project SQL
      RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply
      1. org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:63)
      2. org.apache.spark.sql.execution.RDDConversions$$anonfun$rowToRowRdd$1$$anonfun$apply$2.apply(ExistingRDD.scala:60)
      2 frames
    6. Scala
      Iterator$$anon$11.next
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      1 frame
    7. org.apache.spark
      InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply
      1. org.apache.spark.sql.execution.datasources.DefaultWriterContainer.writeRows(WriterContainer.scala:242)
      2. org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150)
      3. org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1$$anonfun$apply$mcV$sp$3.apply(InsertIntoHadoopFsRelation.scala:150)
      3 frames
    8. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:66)
      2. org.apache.spark.scheduler.Task.run(Task.scala:88)
      3. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:214)
      3 frames
    9. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames