java.lang.AbstractMethodError: org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf;

DataStax JIRA | Yana Kadiyska | 1 year ago
  1. 0

    {code} CREATE TABLE test1( customer_id int , uri text , browser text, epoch bigint , PRIMARY KEY (customer_id , epoch,uri) ) {code} Start spark shell: {code} ~/Github/spark-1.4.0-bin-cdh4$ SPARK_CLASSPATH=spark-cassandra-connector_2.10-1.4.0-M1.jar bin/spark-shell --conf spark.cassandra.connection.host=127.0.0.1 {code} {code} scala>import org.apache.spark.sql.cassandra._ scala> val cass=new CassandraSQLContext(sc) cass: org.apache.spark.sql.cassandra.CassandraSQLContext = org.apache.spark.sql.cassandra.CassandraSQLContext@3a665b22 scala> cass.sql("select * from yana_test.test1") java.lang.AbstractMethodError: org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf; at org.apache.spark.sql.catalyst.analysis.Catalog$class.processTableIdentifier(Catalog.scala:62) at org.apache.spark.sql.cassandra.CassandraCatalog.processTableIdentifier(CassandraCatalog.scala:11) at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:156) at org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.lookupRelation(CassandraSQLContext.scala:218) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:222) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:233) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:229) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:221) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:242) at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273) at scala.collection.AbstractIterator.to(Iterator.scala:1157) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252) at scala.collection.AbstractIterator.toArray(Iterator.scala:1157) at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:272) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:227) at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:212) {code}

    DataStax JIRA | 1 year ago | Yana Kadiyska
    java.lang.AbstractMethodError: org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf;
  2. 0

    {code} CREATE TABLE test1( customer_id int , uri text , browser text, epoch bigint , PRIMARY KEY (customer_id , epoch,uri) ) {code} Start spark shell: {code} ~/Github/spark-1.4.0-bin-cdh4$ SPARK_CLASSPATH=spark-cassandra-connector_2.10-1.4.0-M1.jar bin/spark-shell --conf spark.cassandra.connection.host=127.0.0.1 {code} {code} scala>import org.apache.spark.sql.cassandra._ scala> val cass=new CassandraSQLContext(sc) cass: org.apache.spark.sql.cassandra.CassandraSQLContext = org.apache.spark.sql.cassandra.CassandraSQLContext@3a665b22 scala> cass.sql("select * from yana_test.test1") java.lang.AbstractMethodError: org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf; at org.apache.spark.sql.catalyst.analysis.Catalog$class.processTableIdentifier(Catalog.scala:62) at org.apache.spark.sql.cassandra.CassandraCatalog.processTableIdentifier(CassandraCatalog.scala:11) at org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:156) at org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.lookupRelation(CassandraSQLContext.scala:218) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:222) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:233) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:229) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:221) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:242) at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) at scala.collection.Iterator$class.foreach(Iterator.scala:727) at scala.collection.AbstractIterator.foreach(Iterator.scala:1157) at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103) at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47) at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273) at scala.collection.AbstractIterator.to(Iterator.scala:1157) at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265) at scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252) at scala.collection.AbstractIterator.toArray(Iterator.scala:1157) at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:272) at org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:227) at org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:212) {code}

    DataStax JIRA | 1 year ago | Yana Kadiyska
    java.lang.AbstractMethodError: org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf;
  3. 0

    spark-shell example not working

    GitHub | 10 months ago | INRIX-Trang-Nguyen
    java.lang.AbstractMethodError: org.apache.spark.sql.catalyst.expressions.Expression.genCode(Lorg/apache/spark/sql/catalyst/expressions/codegen/CodeGenContext;Lorg/apache/spark/sql/catalyst/expressions/codegen/GeneratedExpressionCode;)Ljava/lang/String;
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Spark breaks down when running points.show()

    GitHub | 4 months ago | momir81
    java.lang.AbstractMethodError: org.apache.spark.sql.catalyst.expressions.Expression.genCode(Lorg/apache/spark/sql/catalyst/expressions/codegen/CodeGenContext;Lorg/apache/spark/sql/catalyst/expressions/codegen/GeneratedExpressionCode;)Ljava/lang/String;
  6. 0

    cc.sql 执行创建表的时候报如下错误

    GitHub | 5 months ago | shijiyu
    java.lang.AbstractMethodError: org.apache.spark.sql.CarbonContext$$anon$1.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$_setter_$org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$overrides_$eq(Ljava/util/concurrent/ConcurrentHashMap;)V

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.AbstractMethodError

      org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.conf()Lorg/apache/spark/sql/catalyst/CatalystConf;

      at org.apache.spark.sql.catalyst.analysis.Catalog$class.processTableIdentifier()
    2. Spark Project Catalyst
      Catalog$class.processTableIdentifier
      1. org.apache.spark.sql.catalyst.analysis.Catalog$class.processTableIdentifier(Catalog.scala:62)
      1 frame
    3. spark-cassandra-connector
      CassandraCatalog.processTableIdentifier
      1. org.apache.spark.sql.cassandra.CassandraCatalog.processTableIdentifier(CassandraCatalog.scala:11)
      1 frame
    4. Spark Project Catalyst
      OverrideCatalog$class.lookupRelation
      1. org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:156)
      1 frame
    5. spark-cassandra-connector
      CassandraSQLContext$$anon$2.lookupRelation
      1. org.apache.spark.sql.cassandra.CassandraSQLContext$$anon$2.lookupRelation(CassandraSQLContext.scala:218)
      1 frame
    6. Spark Project Catalyst
      TreeNode$$anonfun$4.apply
      1. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:222)
      2. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:233)
      3. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:229)
      4. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
      5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
      6. org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
      7. org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:221)
      8. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:242)
      8 frames
    7. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      12 frames
    8. Spark Project Catalyst
      TreeNode.transform
      1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:272)
      2. org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:227)
      3. org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:212)
      3 frames