Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by Mayur Maheshwari
, 1 year ago
org.apache.hadoop.hive.ql.parse.ASTNode cannot be cast to org.antlr.runtime.tree.CommonTree;
via Stack Overflow by Hokam
, 7 months ago
cannot recognize input near 'MERGE' 'INTO' 'emp_with_orc'; line 1 pos 0
via Stack Overflow by user6122449
, 2 years ago
cannot recognize input near 'add' 'archive' 'hdfs'; line 1 pos 0
org.apache.spark.sql.AnalysisException: org.apache.hadoop.hive.ql.parse.ASTNode cannot be cast to org.antlr.runtime.tree.CommonTree;	at org.apache.spark.sql.hive.HiveQl$.createPlan(HiveQl.scala:324)	at org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$hiveQl$1.apply(ExtendedHiveQlParser.scala:41)	at org.apache.spark.sql.hive.ExtendedHiveQlParser$$anonfun$hiveQl$1.apply(ExtendedHiveQlParser.scala:40)	at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136)	at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)	at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)	at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)	at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890)	at scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110)	at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:34)	at org.apache.spark.sql.hive.HiveQl$.parseSql(HiveQl.scala:295)	at org.apache.spark.sql.hive.HiveQLDialect$$anonfun$parse$1.apply(HiveContext.scala:66)	at org.apache.spark.sql.hive.HiveQLDialect$$anonfun$parse$1.apply(HiveContext.scala:66)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:290)	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:237)	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:236)	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:279)	at org.apache.spark.sql.hive.HiveQLDialect.parse(HiveContext.scala:65)	at org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:211)	at org.apache.spark.sql.SQLContext$$anonfun$2.apply(SQLContext.scala:211)	at org.apache.spark.sql.execution.SparkSQLParser$$anonfun$org$apache$spark$sql$execution$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:114)	at org.apache.spark.sql.execution.SparkSQLParser$$anonfun$org$apache$spark$sql$execution$SparkSQLParser$$others$1.apply(SparkSQLParser.scala:113)	at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:136)	at scala.util.parsing.combinator.Parsers$Success.map(Parsers.scala:135)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$map$1.apply(Parsers.scala:242)	at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1$$anonfun$apply$2.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Failure.append(Parsers.scala:202)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$Parser$$anonfun$append$1.apply(Parsers.scala:254)	at scala.util.parsing.combinator.Parsers$$anon$3.apply(Parsers.scala:222)	at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)	at scala.util.parsing.combinator.Parsers$$anon$2$$anonfun$apply$14.apply(Parsers.scala:891)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at scala.util.parsing.combinator.Parsers$$anon$2.apply(Parsers.scala:890)	at scala.util.parsing.combinator.PackratParsers$$anon$1.apply(PackratParsers.scala:110)	at org.apache.spark.sql.catalyst.AbstractSparkSQLParser.parse(AbstractSparkSQLParser.scala:34)	at org.apache.spark.sql.SQLContext$$anonfun$1.apply(SQLContext.scala:208)	at org.apache.spark.sql.SQLContext$$anonfun$1.apply(SQLContext.scala:208)	at org.apache.spark.sql.execution.datasources.DDLParser.parse(DDLParser.scala:43)	at org.apache.spark.sql.SQLContext.parseSql(SQLContext.scala:231)	at org.apache.spark.sql.hive.HiveContext.parseSql(HiveContext.scala:331)	at org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)