Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by Vico_Wu
, 1 year ago
via Stack Overflow by Raúl García
, 7 months ago
via Stack Overflow by colossal
, 1 year ago
java.lang.IllegalStateException: No current assignment for partition ABTest-0	at org.apache.kafka.clients.consumer.internals.SubscriptionState.assignedState(SubscriptionState.java:231)	at org.apache.kafka.clients.consumer.internals.SubscriptionState.needOffsetReset(SubscriptionState.java:295)	at org.apache.kafka.clients.consumer.KafkaConsumer.seekToEnd(KafkaConsumer.java:1169)	at org.apache.spark.streaming.kafka010.DirectKafkaInputDStream.latestOffsets(DirectKafkaInputDStream.scala:179)	at org.apache.spark.streaming.kafka010.DirectKafkaInputDStream.compute(DirectKafkaInputDStream.scala:196)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:335)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:333)	at scala.Option.orElse(Option.scala:289)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:330)	at org.apache.spark.streaming.dstream.ForEachDStream.generateJob(ForEachDStream.scala:48)	at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:117)	at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:116)	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)	at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)	at scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)	at org.apache.spark.streaming.DStreamGraph.generateJobs(DStreamGraph.scala:116)	at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:248)	at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:246)	at scala.util.Try$.apply(Try.scala:192)	at org.apache.spark.streaming.scheduler.JobGenerator.generateJobs(JobGenerator.scala:246)	at org.apache.spark.streaming.scheduler.JobGenerator.org$apache$spark$streaming$scheduler$JobGenerator$$processEvent(JobGenerator.scala:182)	at org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:88)	at org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:87)	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)