Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via GitHub by edanshalom
, 1 year ago
This exception has no message.
java.lang.AbstractMethodError: 	at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:104)	at org.apache.spark.sql.catalyst.expressions.Expression$$anonfun$genCode$2.apply(Expression.scala:101)	at scala.Option.getOrElse(Option.scala:121)	at org.apache.spark.sql.catalyst.expressions.Expression.genCode(Expression.scala:101)	at org.apache.spark.sql.execution.ProjectExec$$anonfun$5.apply(basicPhysicalOperators.scala:57)	at org.apache.spark.sql.execution.ProjectExec$$anonfun$5.apply(basicPhysicalOperators.scala:57)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)	at scala.collection.AbstractTraversable.map(Traversable.scala:104)	at org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:57)	at org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)	at org.apache.spark.sql.execution.InputAdapter.consume(WholeStageCodegenExec.scala:218)	at org.apache.spark.sql.execution.InputAdapter.doProduce(WholeStageCodegenExec.scala:244)	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)	at org.apache.spark.sql.execution.InputAdapter.produce(WholeStageCodegenExec.scala:218)	at org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)	at org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)	at org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)	at org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)	at org.apache.spark.sql.execution.WholeStageCodegenExec.doCodeGen(WholeStageCodegenExec.scala:309)	at org.apache.spark.sql.execution.WholeStageCodegenExec.doExecute(WholeStageCodegenExec.scala:347)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$1.apply(SparkPlan.scala:115)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)	at org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:114)	at org.apache.spark.sql.execution.SparkPlan.getByteArrayRdd(SparkPlan.scala:240)	at org.apache.spark.sql.execution.SparkPlan.executeTake(SparkPlan.scala:323)	at org.apache.spark.sql.execution.CollectLimitExec.executeCollect(limit.scala:39)	at org.apache.spark.sql.Dataset$$anonfun$org$apache$spark$sql$Dataset$$execute$1$1.apply(Dataset.scala:2183)	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:57)	at org.apache.spark.sql.Dataset.withNewExecutionId(Dataset.scala:2532)	at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$execute$1(Dataset.scala:2182)	at org.apache.spark.sql.Dataset.org$apache$spark$sql$Dataset$$collect(Dataset.scala:2189)	at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1925)	at org.apache.spark.sql.Dataset$$anonfun$head$1.apply(Dataset.scala:1924)	at org.apache.spark.sql.Dataset.withTypedCallback(Dataset.scala:2562)	at org.apache.spark.sql.Dataset.head(Dataset.scala:1924)	at org.apache.spark.sql.Dataset.take(Dataset.scala:2139)	at org.apache.spark.sql.Dataset.showString(Dataset.scala:239)	at org.apache.spark.sql.Dataset.show(Dataset.scala:526)	at org.apache.spark.sql.Dataset.show(Dataset.scala:486)	at org.apache.spark.sql.Dataset.show(Dataset.scala:495)