Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Samebug tips

  1. ,

    Check if the field you try to read really exists in the database. If it is optional, just use com.mongodb.casbah.commons.MongoDBObject#getAs

Solutions on the web

via DataStax JIRA by Prashant Kumar, 1 year ago
via DataStax JIRA by Prashant Kumar, 2 years ago
java.util.NoSuchElementException: key not found: 'text'	at scala.collection.MapLike$class.default(MapLike.scala:228)	at scala.collection.AbstractMap.default(Map.scala:58)	at scala.collection.MapLike$class.apply(MapLike.scala:141)	at scala.collection.AbstractMap.apply(Map.scala:58)	at com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:81)	at com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchPartitionKey$1.apply(Schema.scala:199)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchPartitionKey$1.apply(Schema.scala:198)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.Iterator$class.foreach(Iterator.scala:727)	at scala.collection.AbstractIterator.foreach(Iterator.scala:1157)	at scala.collection.IterableLike$class.foreach(IterableLike.scala:72)	at scala.collection.AbstractIterable.foreach(Iterable.scala:54)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)	at scala.collection.AbstractTraversable.map(Traversable.scala:105)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchPartitionKey(Schema.scala:198)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:239)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:238)	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)	at scala.collection.immutable.Set$Set3.foreach(Set.scala:115)	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:238)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:247)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:246)	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)	at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)	at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:246)	at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:252)	at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:249)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)	at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)	at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)	at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)	at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:249)	at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:65)	at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKey$3.apply(PairRDDFunctions.scala:573)	at org.apache.spark.rdd.PairRDDFunctions$$anonfun$combineByKey$3.apply(PairRDDFunctions.scala:573)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)	at org.apache.spark.rdd.RDD.withScope(RDD.scala:310)	at org.apache.spark.rdd.PairRDDFunctions.combineByKey(PairRDDFunctions.scala:572)	at com.yuanmai.logistics.analyze.route.Main$.main(Main.scala:230)