Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

org.apache.hadoop.hive.serde2.SerDeException: java.lang.NullPointerException 	at org.apache.hadoop.hive.serde2.thrift.ThriftDeserializer.initialize(ThriftDeserializer.java:68)	at org.apache.hadoop.hive.ql.plan.TableDesc.getDeserializer(TableDesc.java:80)	at org.apache.spark.sql.hive.execution.HiveTableScan.addColumnMetadataToConf(HiveTableScan.scala:86)	at org.apache.spark.sql.hive.execution.HiveTableScan.(HiveTableScan.scala:100)	at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$14.apply(HiveStrategies.scala:188)	at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$$anonfun$14.apply(HiveStrategies.scala:188)	at org.apache.spark.sql.SQLContext$SparkPlanner.pruneFilterProject(SQLContext.scala:364)	at org.apache.spark.sql.hive.HiveStrategies$HiveTableScans$.apply(HiveStrategies.scala:184)	at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)	at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)	at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59)	at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54)	at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:280)	at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)	at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)	at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)	at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59)	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:402)	at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:400)	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:406)	at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:406)	at org.apache.spark.sql.hive.HiveContext$QueryExecution.stringResult(HiveContext.scala:406)	at org.apache.spark.sql.hive.thriftserver.SparkSQLDriver.run(SparkSQLDriver.scala:59)	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.processCmd(SparkSQLCLIDriver.scala:291)	at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:413)	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver$.main(SparkSQLCLIDriver.scala:226)	at org.apache.spark.sql.hive.thriftserver.SparkSQLCLIDriver.main(SparkSQLCLIDriver.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source)	at java.lang.reflect.Method.invoke(Unknown Source)	at org.apache.spark.deploy.SparkSubmit$.launch(SparkSubmit.scala:328)	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:75)	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)