com.datastax.driver.core.exceptions.InvalidQueryException: Invalid null value for partition key part key

DataStax JIRA | Russell Spitzer | 1 year ago
  1. 0

    {code} 19:30:49 com.datastax.driver.core.exceptions.InvalidQueryException: Invalid null value for partition key part key 19:30:49 at com.datastax.driver.core.exceptions.InvalidQueryException.copy(InvalidQueryException.java:35) 19:30:49 at com.datastax.driver.core.DefaultResultSetFuture.extractCauseFromExecutionException(DefaultResultSetFuture.java:289) 19:30:49 at com.datastax.driver.core.DefaultResultSetFuture.getUninterruptibly(DefaultResultSetFuture.java:205) 19:30:49 at com.datastax.driver.core.AbstractSession.execute(AbstractSession.java:52) 19:30:49 at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source) 19:30:49 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 19:30:49 at java.lang.reflect.Method.invoke(Method.java:606) 19:30:49 at com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:33) 19:30:49 at com.sun.proxy.$Proxy12.execute(Unknown Source) 19:30:49 at com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:233) 19:30:49 at com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:231) 19:30:49 at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:396) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.hasNext(CountingIterator.scala:12) 19:30:49 at scala.collection.Iterator$class.foreach(Iterator.scala:750) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.foreach(CountingIterator.scala:4) 19:30:49 at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) 19:30:49 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) 19:30:49 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) 19:30:49 at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:295) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.to(CountingIterator.scala:4) 19:30:49 at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:287) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.toBuffer(CountingIterator.scala:4) 19:30:49 at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:274) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.toArray(CountingIterator.scala:4) 19:30:49 at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885) 19:30:49 at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885) 19:30:49 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765) 19:30:49 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765) 19:30:49 at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63) 19:30:49 at org.apache.spark.scheduler.Task.run(Task.scala:70) 19:30:49 at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) 19:30:49 at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) 19:30:49 at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) 19:30:49 at java.lang.Thread.run(Thread.java:744) {code} This came up on jenkins for a doc only change PR. We've also seen jenkins intermittently fail the actor streaming job which uses the same api.

    DataStax JIRA | 1 year ago | Russell Spitzer
    com.datastax.driver.core.exceptions.InvalidQueryException: Invalid null value for partition key part key
  2. 0

    {code} 19:30:49 com.datastax.driver.core.exceptions.InvalidQueryException: Invalid null value for partition key part key 19:30:49 at com.datastax.driver.core.exceptions.InvalidQueryException.copy(InvalidQueryException.java:35) 19:30:49 at com.datastax.driver.core.DefaultResultSetFuture.extractCauseFromExecutionException(DefaultResultSetFuture.java:289) 19:30:49 at com.datastax.driver.core.DefaultResultSetFuture.getUninterruptibly(DefaultResultSetFuture.java:205) 19:30:49 at com.datastax.driver.core.AbstractSession.execute(AbstractSession.java:52) 19:30:49 at sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source) 19:30:49 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 19:30:49 at java.lang.reflect.Method.invoke(Method.java:606) 19:30:49 at com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:33) 19:30:49 at com.sun.proxy.$Proxy12.execute(Unknown Source) 19:30:49 at com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:233) 19:30:49 at com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:231) 19:30:49 at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:396) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.hasNext(CountingIterator.scala:12) 19:30:49 at scala.collection.Iterator$class.foreach(Iterator.scala:750) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.foreach(CountingIterator.scala:4) 19:30:49 at scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59) 19:30:49 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104) 19:30:49 at scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48) 19:30:49 at scala.collection.TraversableOnce$class.to(TraversableOnce.scala:295) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.to(CountingIterator.scala:4) 19:30:49 at scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:287) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.toBuffer(CountingIterator.scala:4) 19:30:49 at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:274) 19:30:49 at com.datastax.spark.connector.util.CountingIterator.toArray(CountingIterator.scala:4) 19:30:49 at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885) 19:30:49 at org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885) 19:30:49 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765) 19:30:49 at org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765) 19:30:49 at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63) 19:30:49 at org.apache.spark.scheduler.Task.run(Task.scala:70) 19:30:49 at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) 19:30:49 at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) 19:30:49 at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) 19:30:49 at java.lang.Thread.run(Thread.java:744) {code} This came up on jenkins for a doc only change PR. We've also seen jenkins intermittently fail the actor streaming job which uses the same api.

    DataStax JIRA | 1 year ago | Russell Spitzer
    com.datastax.driver.core.exceptions.InvalidQueryException: Invalid null value for partition key part key
  3. 0

    The sum of all clustering columns is too long (65927 > 65535)

    Stack Overflow | 2 years ago | Anil
    com.datastax.driver.core.exceptions.InvalidQueryException: The sum of all clustering columns is too long (65927 > 65535)
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    cassandra-driver-core: pass list values to parameterized SimpleStatement

    Stack Overflow | 2 years ago | DoNuT
    com.datastax.driver.core.exceptions.InvalidQueryException: Expected 4 or 0 byte int (8)
  6. 0

    Unable to find compaction strategy class 'org.apache.cassandra.db.compaction.DateTieredCompactionStrategy

    GitHub | 1 year ago | liyichao
    com.datastax.driver.core.exceptions.InvalidQueryException: Unable to find compaction strategy class 'org.apache.cassandra.db.compaction.DateTieredCompactionStrategy'

    4 unregistered visitors
    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. com.datastax.driver.core.exceptions.InvalidQueryException

      Invalid null value for partition key part key

      at com.datastax.driver.core.exceptions.InvalidQueryException.copy()
    2. DataStax Java Driver for Apache Cassandra - Core
      AbstractSession.execute
      1. com.datastax.driver.core.exceptions.InvalidQueryException.copy(InvalidQueryException.java:35)
      2. com.datastax.driver.core.DefaultResultSetFuture.extractCauseFromExecutionException(DefaultResultSetFuture.java:289)
      3. com.datastax.driver.core.DefaultResultSetFuture.getUninterruptibly(DefaultResultSetFuture.java:205)
      4. com.datastax.driver.core.AbstractSession.execute(AbstractSession.java:52)
      4 frames
    3. Java RT
      Method.invoke
      1. sun.reflect.GeneratedMethodAccessor2.invoke(Unknown Source)
      2. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      3. java.lang.reflect.Method.invoke(Method.java:606)
      3 frames
    4. spark-cassandra-connector
      SessionProxy.invoke
      1. com.datastax.spark.connector.cql.SessionProxy.invoke(SessionProxy.scala:33)
      1 frame
    5. com.sun.proxy
      $Proxy12.execute
      1. com.sun.proxy.$Proxy12.execute(Unknown Source)
      1 frame
    6. spark-cassandra-connector
      CassandraJoinRDD$$anonfun$fetchIterator$1.apply
      1. com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:233)
      2. com.datastax.spark.connector.rdd.CassandraJoinRDD$$anonfun$fetchIterator$1.apply(CassandraJoinRDD.scala:231)
      2 frames
    7. Scala
      Iterator$$anon$12.hasNext
      1. scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:396)
      1 frame
    8. spark-cassandra-connector
      CountingIterator.hasNext
      1. com.datastax.spark.connector.util.CountingIterator.hasNext(CountingIterator.scala:12)
      1 frame
    9. Scala
      Iterator$class.foreach
      1. scala.collection.Iterator$class.foreach(Iterator.scala:750)
      1 frame
    10. spark-cassandra-connector
      CountingIterator.foreach
      1. com.datastax.spark.connector.util.CountingIterator.foreach(CountingIterator.scala:4)
      1 frame
    11. Scala
      TraversableOnce$class.to
      1. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:59)
      2. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:104)
      3. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:48)
      4. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:295)
      4 frames
    12. spark-cassandra-connector
      CountingIterator.to
      1. com.datastax.spark.connector.util.CountingIterator.to(CountingIterator.scala:4)
      1 frame
    13. Scala
      TraversableOnce$class.toBuffer
      1. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:287)
      1 frame
    14. spark-cassandra-connector
      CountingIterator.toBuffer
      1. com.datastax.spark.connector.util.CountingIterator.toBuffer(CountingIterator.scala:4)
      1 frame
    15. Scala
      TraversableOnce$class.toArray
      1. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:274)
      1 frame
    16. spark-cassandra-connector
      CountingIterator.toArray
      1. com.datastax.spark.connector.util.CountingIterator.toArray(CountingIterator.scala:4)
      1 frame
    17. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885)
      2. org.apache.spark.rdd.RDD$$anonfun$collect$1$$anonfun$12.apply(RDD.scala:885)
      3. org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765)
      4. org.apache.spark.SparkContext$$anonfun$runJob$5.apply(SparkContext.scala:1765)
      5. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63)
      6. org.apache.spark.scheduler.Task.run(Task.scala:70)
      7. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213)
      7 frames
    18. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      3. java.lang.Thread.run(Thread.java:744)
      3 frames