org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.parse.ASTNode cannot be cast to org.antlr.runtime.tree.CommonTree;

  1. 0

    Spark-hive exception while using hive context to run query: org.apache.spark.sql.AnalysisException

    Stack Overflow | 3 months ago | Mayur Maheshwari
    org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.parse.ASTNode cannot be cast to org.antlr.runtime.tree.CommonTree;
  2. 0

    how to add archive files using HiveContext.sql?

    Stack Overflow | 8 months ago | user6122449
    org.apache.spark.sql.AnalysisException: cannot recognize input near 'add' 'archive' 'hdfs'; line 1 pos 0
  3. 0

    QA - SPARK Parallel unloader - INTERSECT and EXCEPT not working

    GitHub | 6 months ago | Pyrobal
    org.apache.spark.sql.AnalysisException: missing EOF at 'select' near 'EXCEPT'; line 1 pos 79
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    [jira] [Created] (SPARK-13588) Unable to Map Parquet file to Hive Table using HiveContext

    spark-issues | 9 months ago | Akshat Thakar (JIRA)
    org.apache.spark.sql.AnalysisException: missing EOF at 'stored' near 'cdc_new'; line 1 pos 44

    1 unregistered visitors
    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. org.apache.spark.sql.AnalysisException

      org.apache.hadoop.hive.ql.parse.ASTNode cannot be cast to org.antlr.runtime.tree.CommonTree;

      at org.apache.spark.sql.hive.HiveQl$.createPlan()
    2. Spark Project Hive
      ExtendedHiveQlParser$$anonfun$hiveQl$1.apply
      1. org.apache.spark.sql.hive.HiveQl$.createPlan(HiveQl.scala:324)
      2. org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$hiveQl$1.apply(ExtendedHiveQlParser.scala:41)
      3. org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$hiveQl$1.apply(ExtendedHiveQlParser.scala:40)
      3 frames
    3. scala-parser-combinators
      Parsers$$anon$2$$anonfun$apply$14.apply
      1. scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136)
      2. scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135)
      3. scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
      4. scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
      5. scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
      6. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
      7. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
      8. scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202)
      9. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
      10. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
      11. scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
      12. scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
      13. scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
      13 frames
    4. Scala
      DynamicVariable.withValue
      1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
      1 frame
    5. scala-parser-combinators
      PackratParsers$$anon$1.apply
      1. scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890)
      2. scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110)
      2 frames
    6. Spark Project Catalyst
      AbstractSparkSQLParser.parse
      1. org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:34)
      1 frame
    7. Spark Project Hive
      HiveQLDialect$$anonfun$parse$1.apply
      1. org.apache.spark.sql.hive.HiveQl$.parseSql(HiveQl.scala:295)
      2. org.apache.spark.sql.hive.HiveQLDialect$$anonfun$parse$1.apply(HiveContext.scala:66)
      3. org.apache.spark.sql.hive.HiveQLDialect$$anonfun$parse$1.apply(HiveContext.scala:66)
      3 frames
    8. org.apache.spark
      ClientWrapper.withHiveState
      1. org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:290)
      2. org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:237)
      3. org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:236)
      4. org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:279)
      4 frames
    9. Spark Project Hive
      HiveQLDialect.parse
      1. org.apache.spark.sql.hive.HiveQLDialect.parse(HiveContext.scala:65)
      1 frame
    10. Spark Project SQL
      SparkSQLParser$$anonfun$org$apache$spark$sql$execution$SparkSQLParser$$others$1.apply
      1. org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:211)
      2. org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:211)
      3. org.apache.spark.sql.execution.SparkSQLParser$$anonfun$org$apache$spark$sql$execution$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:114)
      4. org.apache.spark.sql.execution.SparkSQLParser$$anonfun$org$apache$spark$sql$execution$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:113)
      4 frames
    11. scala-parser-combinators
      Parsers$$anon$2$$anonfun$apply$14.apply
      1. scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136)
      2. scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135)
      3. scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
      4. scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)
      5. scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
      6. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
      7. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)
      8. scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202)
      9. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
      10. scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)
      11. scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)
      12. scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
      13. scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)
      13 frames
    12. Scala
      DynamicVariable.withValue
      1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
      1 frame
    13. scala-parser-combinators
      PackratParsers$$anon$1.apply
      1. scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890)
      2. scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110)
      2 frames
    14. Spark Project Catalyst
      AbstractSparkSQLParser.parse
      1. org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:34)
      1 frame
    15. Spark Project SQL
      SQLContext$$anonfun$1.apply
      1. org.apache.spark.sql.SQLContext$$anonfun$1.apply(SQLContext.scala:208)
      2. org.apache.spark.sql.SQLContext$$anonfun$1.apply(SQLContext.scala:208)
      2 frames
    16. org.apache.spark
      DDLParser.parse
      1. org.apache.spark.sql.execution.datasources.DDLParser.parse(DDLParser.scala:43)
      1 frame
    17. Spark Project SQL
      SQLContext.parseSql
      1. org.apache.spark.sql.SQLContext.parseSql(SQLContext.scala:231)
      1 frame
    18. Spark Project Hive
      HiveContext.parseSql
      1. org.apache.spark.sql.hive.HiveContext.parseSql(HiveContext.scala:331)
      1 frame
    19. Spark Project SQL
      SQLContext.sql
      1. org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
      1 frame