scala.MatchError: TimestampType (of class org.apache.spark.sql.catalyst.types.TimestampType$)

nabble.com | 4 months ago
  1. 0

    Apache Spark User List - spark sql: timestamp in json - fails

    nabble.com | 4 months ago
    scala.MatchError: TimestampType (of class org.apache.spark.sql.catalyst.types.TimestampType$)
  2. 0

    eco-release-metadata/RELEASENOTES.1.2.0.md at master · aw-was-here/eco-release-metadata · GitHub

    github.com | 3 months ago
    scala.MatchError: TimestampType (of class org.apache.spark.sql.catalyst.types.TimestampType$)
  3. 0

    Apache Spark User List - spark sql: timestamp in json - fails

    nabble.com | 4 months ago
    scala.MatchError: TimestampType (of class org.apache.spark.sql.catalyst.types.TimestampType$) at org.apache.spark.sql.json.JsonRDD$.enforceCorrectType(JsonRDD.scala:348) at org.apache.spark.sql.json.JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1$$anonfun$apply$12.apply(JsonRDD.scala:381)
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    GitHub comment 2#132493746

    GitHub | 1 year ago | d6y
    scala.MatchError: 0 (of class java.lang.Integer)
  6. 0

    Facing "scala.MatchError: 1201 (of class java.lang.Integer)" while creating Data frame

    Stack Overflow | 6 months ago | Priyaranjan Swain
    scala.MatchError: 1201 (of class java.lang.Integer)

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. scala.MatchError

      TimestampType (of class org.apache.spark.sql.catalyst.types.TimestampType$)

      at org.apache.spark.sql.json.JsonRDD$.enforceCorrectType()
    2. Spark Project SQL
      JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1$$anonfun$apply$12.apply
      1. org.apache.spark.sql.json.JsonRDD$.enforceCorrectType(JsonRDD.scala:348)
      2. org.apache.spark.sql.json.JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1$$anonfun$apply$12.apply(JsonRDD.scala:381)
      2 frames
    3. Scala
      Option.map
      1. scala.Option.map(Option.scala:145)
      1 frame
    4. Spark Project SQL
      JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1.apply
      1. org.apache.spark.sql.json.JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1.apply(JsonRDD.scala:380)
      2. org.apache.spark.sql.json.JsonRDD$$anonfun$org$apache$spark$sql$json$JsonRDD$$asRow$1.apply(JsonRDD.scala:365)
      2 frames
    5. Scala
      ArraySeq.foreach
      1. scala.collection.mutable.ArraySeq.foreach(ArraySeq.scala:73)
      1 frame
    6. Spark Project SQL
      JsonRDD$$anonfun$jsonStringToRow$1.apply
      1. org.apache.spark.sql.json.JsonRDD$.org$apache$spark$sql$json$JsonRDD$$asRow(JsonRDD.scala:365)
      2. org.apache.spark.sql.json.JsonRDD$$anonfun$jsonStringToRow$1.apply(JsonRDD.scala:38)
      3. org.apache.spark.sql.json.JsonRDD$$anonfun$jsonStringToRow$1.apply(JsonRDD.scala:38)
      3 frames
    7. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      3. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      4. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      5. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      7. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      8. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      9. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      10. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      11. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      12. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      13. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      13 frames
    8. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.RDD$$anonfun$16.apply(RDD.scala:774)
      2. org.apache.spark.rdd.RDD$$anonfun$16.apply(RDD.scala:774)
      3. org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)
      4. org.apache.spark.SparkContext$$anonfun$runJob$4.apply(SparkContext.scala:1121)
      5. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62)
      6. org.apache.spark.scheduler.Task.run(Task.scala:54)
      7. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:180)
      7 frames
    9. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames