java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType'

DataStax JIRA | Maciek Bryński | 1 year ago
  1. 0

    When using spark-connector version 1.5.0-M2 there is no possibility to connect to my Cassandra. On 1.4.0 everything is working. Spark version: 1.5.1. {code} bin/spark-shell --packages com.datastax.spark:spark-cassandra-connector_2.10:1.5.0-M2 scala> import com.datastax.spark.connector._ scala> val rdd = sc.cassandraTable("keyspace", "table") scala> rdd.first() java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType' at scala.collection.MapLike$class.default(MapLike.scala:228) at scala.collection.AbstractMap.default(Map.scala:58) at scala.collection.MapLike$class.apply(MapLike.scala:141) at scala.collection.AbstractMap.apply(Map.scala:58) at com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:79) at com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:208) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:204) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) at scala.collection.AbstractTraversable.map(Traversable.scala:105) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchRegularColumns(Schema.scala:204) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:233) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:230) at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722) at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:230) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:239) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:238) at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722) at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:238) at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:244) at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:241) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109) at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139) at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109) at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120) at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241) at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) at scala.Option.getOrElse(Option.scala:120) at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1277) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:306) at org.apache.spark.rdd.RDD.take(RDD.scala:1272) at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121) at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122) at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1312) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:306) at org.apache.spark.rdd.RDD.first(RDD.scala:1311) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42) at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:44) at $iwC$$iwC$$iwC$$iwC.<init>(<console>:46) at $iwC$$iwC$$iwC.<init>(<console>:48) at $iwC$$iwC.<init>(<console>:50) at $iwC.<init>(<console>:52) at <init>(<console>:54) at .<init>(<console>:58) at .<clinit>(<console>) at .<init>(<console>:7) at .<clinit>(<console>) at $print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657) at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) at org.apache.spark.repl.Main$.main(Main.scala:31) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:672) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) {code} Same code on 1.4.0 {code} bin/spark-shell --packages com.datastax.spark:spark-cassandra-connector_2.10:1.4.0 scala> import com.datastax.spark.connector._ scala> val rdd = sc.cassandraTable("keyspace", "table") scala> rdd.first() res0: com.datastax.spark.connector.CassandraRow = CassandraRow{key: fdhaisfoiafh78qfrqew, column1: fkljdhasfds89afudas98hfdasif, value: 2015-10-21 15:32:47+0200} {code}

    DataStax JIRA | 1 year ago | Maciek Bryński
    java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType'
  2. 0

    When using spark-connector version 1.5.0-M2 there is no possibility to connect to my Cassandra. On 1.4.0 everything is working. Spark version: 1.5.1. {code} bin/spark-shell --packages com.datastax.spark:spark-cassandra-connector_2.10:1.5.0-M2 scala> import com.datastax.spark.connector._ scala> val rdd = sc.cassandraTable("keyspace", "table") scala> rdd.first() java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType' at scala.collection.MapLike$class.default(MapLike.scala:228) at scala.collection.AbstractMap.default(Map.scala:58) at scala.collection.MapLike$class.apply(MapLike.scala:141) at scala.collection.AbstractMap.apply(Map.scala:58) at com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:79) at com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:208) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:204) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at scala.collection.TraversableLike$class.map(TraversableLike.scala:244) at scala.collection.AbstractTraversable.map(Traversable.scala:105) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchRegularColumns(Schema.scala:204) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:233) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:230) at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722) at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:230) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:239) at com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:238) at scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722) at scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153) at scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306) at scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721) at com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:238) at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:244) at com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:241) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110) at com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109) at com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139) at com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109) at com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120) at com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241) at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59) at com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) at scala.Option.getOrElse(Option.scala:120) at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) at org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1277) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:306) at org.apache.spark.rdd.RDD.take(RDD.scala:1272) at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121) at com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122) at org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1312) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:306) at org.apache.spark.rdd.RDD.first(RDD.scala:1311) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40) at $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42) at $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:44) at $iwC$$iwC$$iwC$$iwC.<init>(<console>:46) at $iwC$$iwC$$iwC.<init>(<console>:48) at $iwC$$iwC.<init>(<console>:50) at $iwC.<init>(<console>:52) at <init>(<console>:54) at .<init>(<console>:58) at .<clinit>(<console>) at .<init>(<console>:7) at .<clinit>(<console>) at $print(<console>) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) at org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657) at org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) at org.apache.spark.repl.Main$.main(Main.scala:31) at org.apache.spark.repl.Main.main(Main.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:672) at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120) at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) {code} Same code on 1.4.0 {code} bin/spark-shell --packages com.datastax.spark:spark-cassandra-connector_2.10:1.4.0 scala> import com.datastax.spark.connector._ scala> val rdd = sc.cassandraTable("keyspace", "table") scala> rdd.first() res0: com.datastax.spark.connector.CassandraRow = CassandraRow{key: fdhaisfoiafh78qfrqew, column1: fkljdhasfds89afudas98hfdasif, value: 2015-10-21 15:32:47+0200} {code}

    DataStax JIRA | 1 year ago | Maciek Bryński
    java.util.NoSuchElementException: key not found: 'org.apache.cassandra.db.marshal.DateType'
  3. 0

    a bug in bagOfWords

    GitHub | 2 years ago | breezer2020
    java.util.NoSuchElementException: key not found: of
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    can: HostConnector dies when going from Idle => Unconnected

    GitHub | 3 years ago | jrudolph
    java.util.NoSuchElementException: key not found: Actor[akka://default/user/IO-HTTP/host-connector-0/1#1484964495]
  6. 0

    java.util.NoSuchElementException during ClusterSharding recovery

    GitHub | 2 years ago | cowboy129
    java.util.NoSuchElementException: key not found: Actor[akka.tcp://ClusterSystem@xxx/user/sharding/ConnectionActives#5543584 72]

  1. poroszd 1 times, last 2 months ago
  2. Mark 1 times, last 3 months ago
  3. rp 3 times, last 3 months ago
  4. poroszd 1 times, last 5 months ago
  5. silex 1 times, last 6 months ago
1 more registered users
5 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.util.NoSuchElementException

    key not found: 'org.apache.cassandra.db.marshal.DateType'

    at scala.collection.MapLike$class.default()
  2. Scala
    AbstractMap.apply
    1. scala.collection.MapLike$class.default(MapLike.scala:228)
    2. scala.collection.AbstractMap.default(Map.scala:58)
    3. scala.collection.MapLike$class.apply(MapLike.scala:141)
    4. scala.collection.AbstractMap.apply(Map.scala:58)
    4 frames
  3. spark-cassandra-connector
    Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply
    1. com.datastax.spark.connector.types.ColumnType$.fromDriverType(ColumnType.scala:79)
    2. com.datastax.spark.connector.cql.ColumnDef$.apply(Schema.scala:117)
    3. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:208)
    4. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchRegularColumns$1.apply(Schema.scala:204)
    4 frames
  4. Scala
    AbstractTraversable.map
    1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
    2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
    3. scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    4. scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
    5. scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
    6. scala.collection.AbstractTraversable.map(Traversable.scala:105)
    6 frames
  5. spark-cassandra-connector
    Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply
    1. com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchRegularColumns(Schema.scala:204)
    2. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:233)
    3. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchTables$1$2.apply(Schema.scala:230)
    3 frames
  6. Scala
    TraversableLike$WithFilter.map
    1. scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
    2. scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)
    3. scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
    4. scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
    5. scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
    5 frames
  7. spark-cassandra-connector
    Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply
    1. com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchTables$1(Schema.scala:230)
    2. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:239)
    3. com.datastax.spark.connector.cql.Schema$$anonfun$com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1$2.apply(Schema.scala:238)
    3 frames
  8. Scala
    TraversableLike$WithFilter.map
    1. scala.collection.TraversableLike$WithFilter$$anonfun$map$2.apply(TraversableLike.scala:722)
    2. scala.collection.immutable.HashSet$HashSet1.foreach(HashSet.scala:153)
    3. scala.collection.immutable.HashSet$HashTrieSet.foreach(HashSet.scala:306)
    4. scala.collection.TraversableLike$WithFilter.map(TraversableLike.scala:721)
    4 frames
  9. spark-cassandra-connector
    CassandraTableScanRDD.getPartitions
    1. com.datastax.spark.connector.cql.Schema$.com$datastax$spark$connector$cql$Schema$$fetchKeyspaces$1(Schema.scala:238)
    2. com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:244)
    3. com.datastax.spark.connector.cql.Schema$$anonfun$fromCassandra$1.apply(Schema.scala:241)
    4. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:121)
    5. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withClusterDo$1.apply(CassandraConnector.scala:120)
    6. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:110)
    7. com.datastax.spark.connector.cql.CassandraConnector$$anonfun$withSessionDo$1.apply(CassandraConnector.scala:109)
    8. com.datastax.spark.connector.cql.CassandraConnector.closeResourceAfterUse(CassandraConnector.scala:139)
    9. com.datastax.spark.connector.cql.CassandraConnector.withSessionDo(CassandraConnector.scala:109)
    10. com.datastax.spark.connector.cql.CassandraConnector.withClusterDo(CassandraConnector.scala:120)
    11. com.datastax.spark.connector.cql.Schema$.fromCassandra(Schema.scala:241)
    12. com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.tableDef(CassandraTableRowReaderProvider.scala:51)
    13. com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef$lzycompute(CassandraTableScanRDD.scala:59)
    14. com.datastax.spark.connector.rdd.CassandraTableScanRDD.tableDef(CassandraTableScanRDD.scala:59)
    15. com.datastax.spark.connector.rdd.CassandraTableRowReaderProvider$class.verify(CassandraTableRowReaderProvider.scala:146)
    16. com.datastax.spark.connector.rdd.CassandraTableScanRDD.verify(CassandraTableScanRDD.scala:59)
    17. com.datastax.spark.connector.rdd.CassandraTableScanRDD.getPartitions(CassandraTableScanRDD.scala:143)
    17 frames
  10. Spark
    RDD$$anonfun$partitions$2.apply
    1. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
    2. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
    2 frames
  11. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:120)
    1 frame
  12. Spark
    RDD.take
    1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
    2. org.apache.spark.rdd.RDD$$anonfun$take$1.apply(RDD.scala:1277)
    3. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    4. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    5. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    6. org.apache.spark.rdd.RDD.take(RDD.scala:1272)
    6 frames
  13. spark-cassandra-connector
    CassandraRDD.take
    1. com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:121)
    2. com.datastax.spark.connector.rdd.CassandraRDD.take(CassandraRDD.scala:122)
    2 frames
  14. Spark
    RDD.first
    1. org.apache.spark.rdd.RDD$$anonfun$first$1.apply(RDD.scala:1312)
    2. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    3. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    4. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    5. org.apache.spark.rdd.RDD.first(RDD.scala:1311)
    5 frames
  15. Unknown
    $iwC.<init>
    1. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
    2. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:36)
    3. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:38)
    4. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40)
    5. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42)
    6. $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:44)
    7. $iwC$$iwC$$iwC$$iwC.<init>(<console>:46)
    8. $iwC$$iwC$$iwC.<init>(<console>:48)
    9. $iwC$$iwC.<init>(<console>:50)
    10. $iwC.<init>(<console>:52)
    10 frames