java.util.NoSuchElementException: head of empty list

DataStax JIRA | Piotr Kołaczkowski | 2 years ago
  1. 0

    {code:sql} create table export_table( objectid int, utcstamp timestamp, service_location_id int, service_location_name text, meterid int, primary key(meterid, utcstamp)); {code} {code:java} val cc = new CassandraSQLContext(sc) val rdd1: SchemaRDD = cc.cassandraSql("select objectid, meterid, utcstamp from export_table where meterid = 4317 and utcstamp > '2013-07-26 20:30:00-0700'"); {code} {noformat} Exception in thread "main" java.util.NoSuchElementException: head of empty list at scala.collection.immutable.Nil$.head(List.scala:337) at scala.collection.immutable.Nil$.head(List.scala:334) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103) at org.apache.spark.rdd.RDD.toArray(RDD.scala:794) at com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18) at com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala) {noformat}

    DataStax JIRA | 2 years ago | Piotr Kołaczkowski
    java.util.NoSuchElementException: head of empty list
  2. 0

    {code:sql} create table export_table( objectid int, utcstamp timestamp, service_location_id int, service_location_name text, meterid int, primary key(meterid, utcstamp)); {code} {code:java} val cc = new CassandraSQLContext(sc) val rdd1: SchemaRDD = cc.cassandraSql("select objectid, meterid, utcstamp from export_table where meterid = 4317 and utcstamp > '2013-07-26 20:30:00-0700'"); {code} {noformat} Exception in thread "main" java.util.NoSuchElementException: head of empty list at scala.collection.immutable.Nil$.head(List.scala:337) at scala.collection.immutable.Nil$.head(List.scala:334) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75) at scala.collection.immutable.List.foreach(List.scala:318) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75) at org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438) at org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103) at org.apache.spark.rdd.RDD.toArray(RDD.scala:794) at com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18) at com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala) {noformat}

    DataStax JIRA | 2 years ago | Piotr Kołaczkowski
    java.util.NoSuchElementException: head of empty list
  3. 0

    Play-scala-mongodb throws exception and quits when accessing /db

    GitHub | 2 years ago | aschneider-techempower
    java.util.NoSuchElementException: head of empty list
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Doesn't work with -Yno-generic-signatures

    GitHub | 3 years ago | japgolly
    org.json4s.package$MappingException: unknown error
  6. 0

    Can't add custom serializer for collection class

    GitHub | 3 years ago | danarmak
    org.json4s.package$MappingException: unknown error

  1. harshg 1 times, last 8 months ago
2 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.util.NoSuchElementException

    head of empty list

    at scala.collection.immutable.Nil$.head()
  2. Scala
    Nil$.head
    1. scala.collection.immutable.Nil$.head(List.scala:337)
    2. scala.collection.immutable.Nil$.head(List.scala:334)
    2 frames
  3. spark-cassandra-connector
    CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply
    1. org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:76)
    2. org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$$anonfun$partitionColumnPushDown$1.apply(CassandraStrategies.scala:75)
    2 frames
  4. Scala
    List.foreach
    1. scala.collection.immutable.List.foreach(List.scala:318)
    1 frame
  5. spark-cassandra-connector
    CassandraStrategies$CassandraTableScans$.apply
    1. org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.partitionColumnPushDown(CassandraStrategies.scala:75)
    2. org.apache.spark.sql.cassandra.CassandraStrategies$CassandraTableScans$.apply(CassandraStrategies.scala:203)
    2 frames
  6. Spark Project Catalyst
    QueryPlanner$$anonfun$1.apply
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2 frames
  7. Scala
    Iterator$$anon$13.hasNext
    1. scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
    1 frame
  8. Spark Project Catalyst
    QueryPlanner.apply
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner.apply(QueryPlanner.scala:59)
    1 frame
  9. Spark Project SQL
    SchemaRDD.collect
    1. org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:407)
    2. org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:405)
    3. org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:411)
    4. org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:411)
    5. org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:438)
    6. org.apache.spark.sql.SchemaRDD.collect(SchemaRDD.scala:103)
    6 frames
  10. Spark
    RDD.toArray
    1. org.apache.spark.rdd.RDD.toArray(RDD.scala:794)
    1 frame
  11. com.esri.scala
    TestCassandraSQL.main
    1. com.esri.scala.TestCassandraSQL$.main(TestCassandraSQL.scala:18)
    2. com.esri.scala.TestCassandraSQL.main(TestCassandraSQL.scala)
    2 frames