Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Samebug tips

  1. ,

    Check if the field you try to read really exists in the database. If it is optional, just use com.mongodb.casbah.commons.MongoDBObject#getAs

Solutions on the web

via DataStax JIRA by Maciek Bryński, 1 year ago
via DataStax JIRA by Maciek Bryński, 1 year ago
via GitHub by mantognini
, 1 year ago
key not found: def foo$1(y$6 : Int, m$5 : MutableInteger$0): (Unit, MutableInteger$0) = { var m$8 = m$5 ({ toto$0 x$25 = 1 val res$203 = bar$1(m$8) m$8 = res$203._2 res$203._1 m$8 = MutableInteger$0(y$6) }, m$8) }
via GitHub by VladUreche
, 2 years ago
key not found: type T
via GitHub by betehess
, 2 years ago
java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType'	at scala.collection.MapLike$class.default(MapLike.scala:228)	at scala.collection.AbstractMap.default(Map.scala:58)	at scala.collection.MapLike$class.apply(MapLike.scala:141)	at scala.collection.AbstractMap.apply(Map.scala:58)	at com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:79)	at com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:208)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:204)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)	at scala.collection.AbstractTraversable.map(Traversable.scala:105)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchRegularColumns(Schema.scala:204)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:233)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:230)	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)	at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)	at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)	at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:230)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:239)	at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:238)	at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)	at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)	at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)	at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)	at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:238)	at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:244)	at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:241)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)	at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)	at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)	at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)	at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)	at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241)	at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)	at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1277)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)	at org.apache.spark.rdd.RDD.withScope(RDD.scala:306)	at org.apache.spark.rdd.RDD.take(RDD.scala:1272)	at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121)	at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122)	at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1312)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)	at org.apache.spark.rdd.RDD.withScope(RDD.scala:306)	at org.apache.spark.rdd.RDD.first(RDD.scala:1311)	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.(:31)	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.(:36)	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.(:38)	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.(:40)	at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.(:42)	at $iwC$$iwC$$iwC$$iwC$$iwC.(:44)	at $iwC$$iwC$$iwC$$iwC.(:46)	at $iwC$$iwC$$iwC.(:48)	at $iwC$$iwC.(:50)	at $iwC.(:52)