Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by user6608138
, 1 year ago
java.lang.NoClassDefFoundError: org/apache/hadoop/hbase/util/Bytes	at org.apache.hadoop.hive.hbase.HBaseSerDe.parseColumnsMapping(HBaseSerDe.java:184)	at org.apache.hadoop.hive.hbase.HBaseSerDeParameters.(HBaseSerDeParameters.java:73)	at org.apache.hadoop.hive.hbase.HBaseSerDe.initialize(HBaseSerDe.java:117)	at org.apache.hadoop.hive.serde2.AbstractSerDe.initialize(AbstractSerDe.java:53)	at org.apache.hadoop.hive.serde2.SerDeUtils.initializeSerDe(SerDeUtils.java:521)	at org.apache.hadoop.hive.metastore.MetaStoreUtils.getDeserializer(MetaStoreUtils.java:391)	at org.apache.hadoop.hive.ql.metadata.Table.getDeserializerFromMetaStore(Table.java:276)	at org.apache.hadoop.hive.ql.metadata.Table.getDeserializer(Table.java:258)	at org.apache.hadoop.hive.ql.metadata.Table.getCols(Table.java:605)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1$$anonfun$3.apply(ClientWrapper.scala:331)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1$$anonfun$3.apply(ClientWrapper.scala:326)	at scala.Option.map(Option.scala:145)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1.apply(ClientWrapper.scala:326)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$getTableOption$1.apply(ClientWrapper.scala:321)	at org.apache.spark.sql.hive.client.ClientWrapper$$anonfun$withHiveState$1.apply(ClientWrapper.scala:279)	at org.apache.spark.sql.hive.client.ClientWrapper.liftedTree1$1(ClientWrapper.scala:226)	at org.apache.spark.sql.hive.client.ClientWrapper.retryLocked(ClientWrapper.scala:225)	at org.apache.spark.sql.hive.client.ClientWrapper.withHiveState(ClientWrapper.scala:268)	at org.apache.spark.sql.hive.client.ClientWrapper.getTableOption(ClientWrapper.scala:321)	at org.apache.spark.sql.hive.client.ClientInterface$class.getTable(ClientInterface.scala:122)	at org.apache.spark.sql.hive.client.ClientWrapper.getTable(ClientWrapper.scala:60)	at org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:384)	at org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:457)	at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:161)	at org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:457)	at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:303)