java.util.NoSuchElementException

There are no available Samebug tips for this exception. Do you have an idea how to solve this issue? A short tip would help users who saw this issue last week.

  • {code:sql} create table export_table( objectid int, utcstamp timestamp, service_location_id int, service_location_name text, meterid int, primary key(meterid, utcstamp)); {code} {code:java} val cc = new CassandraSQLContext(sc) val rdd1: SchemaRDD = cc.cassandraSql("select objectid, meterid, utcstamp from export_table where meterid = 4317 and utcstamp > '2013-07-26 20:30:00-0700'"); {code} {noformat} Exception in thread "main" java.util.NoSuchElementException: head of empty list at scala.collection.immutable.Nil$.head(List.scala:337) at scala.collection.immutable.Nil$.head(List.scala:334) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103) at org.apache.spark.rdd.RDD.toArray(RDD.scala:794) at com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18) at com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala) {noformat}
    via by Piotr Kołaczkowski,
  • {code:sql} create table export_table( objectid int, utcstamp timestamp, service_location_id int, service_location_name text, meterid int, primary key(meterid, utcstamp)); {code} {code:java} val cc = new CassandraSQLContext(sc) val rdd1: SchemaRDD = cc.cassandraSql("select objectid, meterid, utcstamp from export_table where meterid = 4317 and utcstamp > '2013-07-26 20:30:00-0700'"); {code} {noformat} Exception in thread "main" java.util.NoSuchElementException: head of empty list at scala.collection.immutable.Nil$.head(List.scala:337) at scala.collection.immutable.Nil$.head(List.scala:334) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103) at org.apache.spark.rdd.RDD.toArray(RDD.scala:794) at com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18) at com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala) {noformat}
    via by Piotr Kołaczkowski,
    • java.util.NoSuchElementException: head of empty list at scala.collection.immutable.Nil$.head(List.scala:337) at scala.collection.immutable.Nil$.head(List.scala:334) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103) at org.apache.spark.rdd.RDD.toArray(RDD.scala:794) at com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18) at com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala)

    Users with the same issue

    Handemelindo
    1 times, last one,
    bandoca
    3 times, last one,
    rp
    2 times, last one,
    harshg
    1 times, last one,
    jokester
    2 times, last one,
    2 more bugmates