java.lang.ArrayIndexOutOfBoundsException: 1  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:102)  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:97)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)  at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)  at scala.collection.AbstractTraversable.map(Traversable.scala:105)  at kafka.client.ClientUtils$.parseBrokerList(ClientUtils.scala:97)  at kafka.producer.BrokerPartitionInfo.<init>(BrokerPartitionInfo.scala:32)  at kafka.producer.async.DefaultEventHandler.<init>(DefaultEventHandler.scala:41)  at kafka.producer.Producer.<init>(Producer.scala:59)  at kafka.perf.ProducerPerformance$ProducerThread.<init>(ProducerPerformance.scala:196)  at kafka.perf.ProducerPerformance$$anonfun$main$1.apply$mcVI$sp(ProducerPerformance.scala:57)  at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)  at kafka.perf.ProducerPerformance$.main(ProducerPerformance.scala:56) 

hortonworks.com | 2 months ago
  1. 0

    Kafka broker produces an array index out of bounds error - Hortonworks

    hortonworks.com | 2 months ago
    java.lang.ArrayIndexOutOfBoundsException: 1  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:102)  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:97)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)  at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)  at scala.collection.AbstractTraversable.map(Traversable.scala:105)  at kafka.client.ClientUtils$.parseBrokerList(ClientUtils.scala:97)  at kafka.producer.BrokerPartitionInfo.<init>(BrokerPartitionInfo.scala:32)  at kafka.producer.async.DefaultEventHandler.<init>(DefaultEventHandler.scala:41)  at kafka.producer.Producer.<init>(Producer.scala:59)  at kafka.perf.ProducerPerformance$ProducerThread.<init>(ProducerPerformance.scala:196)  at kafka.perf.ProducerPerformance$$anonfun$main$1.apply$mcVI$sp(ProducerPerformance.scala:57)  at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)  at kafka.perf.ProducerPerformance$.main(ProducerPerformance.scala:56) 
  2. 0

    pio deploy in IntelliJ Idea fails

    Google Groups | 1 year ago | VicP
    java.lang.ArrayIndexOutOfBoundsException: 1 at io.prediction.data.storage.jdbc.JDBCUtils$$anonfun$stringToMap$1.apply(JDBCUtils.scala:78) at io.prediction.data.storage.jdbc.JDBCUtils$$anonfun$stringToMap$1.apply(JDBCUtils.scala:76)* scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
  3. 0

    Apache Spark User List - Spark MLLIB Decision Tree - ArrayIndexOutOfBounds Exception

    nabble.com | 11 months ago
    java.lang.ArrayIndexOutOfBoundsException: 6301 org.apache.spark.mllib.tree.DecisionTree$.updateBinForOrderedFeature$1(DecisionTree.scala:648) org.apache.spark.mllib.tree.DecisionTree$.binaryOrNotCategoricalBinSeqOp$1(DecisionTree.scala:706) org.apache.spark.mllib.tree.DecisionTree$.org$apache$spark$mllib$tree$DecisionTree$$binSeqOp$1(DecisionTree.scala:798) org.apache.spark.mllib.tree.DecisionTree$$anonfun$3.apply(DecisionTree.scala:830) org.apache.spark.mllib.tree.DecisionTree$$anonfun$3.apply(DecisionTree.scala:830) scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:144) scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:144) scala.collection.Iterator$class.foreach(Iterator.scala:727) org.apache.spark.InterruptibleIterator.foreach(InterruptibleIterator.scala:28) scala.collection.TraversableOnce$class.foldLeft(TraversableOnce.scala:144) org.apache.spark.InterruptibleIterator.foldLeft(InterruptibleIterator.scala:28) scala.collection.TraversableOnce$class.aggregate(TraversableOnce.scala:201) org.apache.spark.InterruptibleIterator.aggregate(InterruptibleIterator.scala:28) org.apache.spark.mllib.rdd.RDDFunctions$$anonfun$4.apply(RDDFunctions.scala:99) org.apache.spark.mllib.rdd.RDDFunctions$$anonfun$4.apply(RDDFunctions.scala:99) org.apache.spark.mllib.rdd.RDDFunctions$$anonfun$5.apply(RDDFunctions.scala:100) org.apache.spark.mllib.rdd.RDDFunctions$$anonfun$5.apply(RDDFunctions.scala:100) org.apache.spark.rdd.RDD$$anonfun$13.apply(RDD.scala:596) org.apache.spark.rdd.RDD$$anonfun$13.apply(RDD.scala:596) org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:262) org.apache.spark.rdd.RDD.iterator(RDD.scala:229) org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:62) org.apache.spark.scheduler.Task.run(Task.scala:54) org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:177) java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) java.lang.Thread.run(Thread.java:745) Driver stacktrace: at $apache$spark$scheduler$DAGScheduler$$failJobAndIndependentStages(DAGScheduler.scala:1185) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1174) at org.apache.spark.scheduler.DAGScheduler$$anonfun$abortStage$1.apply(DAGScheduler.scala:1173) at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59) at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47) at org.apache.spark.scheduler.DAGScheduler.abortStage(DAGScheduler.scala:1173) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688) at org.apache.spark.scheduler.DAGScheduler$$anonfun$handleTaskSetFailed$1.apply(DAGScheduler.scala:688)
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Apache Spark User List - Spark MLLIB Decision Tree - ArrayIndexOutOfBounds Exception

    nabble.com | 11 months ago
    java.lang.ArrayIndexOutOfBoundsException: 6301 at org.apache.spark.mllib.tree.DecisionTree$.updateBinForOrderedFeature$1(DecisionTree.scala:648) at org.apache.spark.mllib.tree.DecisionTree$.binaryOrNotCategoricalBinSeqOp$1(DecisionTree.scala:706) at org.apache.spark.mllib.tree.DecisionTree$.org$apache$spark$mllib$tree$DecisionTree$$binSeqOp$1(DecisionTree.scala:798) at org.apache.spark.mllib.tree.DecisionTree$$anonfun$3.apply(DecisionTree.scala:830) at org.apache.spark.mllib.tree.DecisionTree$$anonfun$3.apply(DecisionTree.scala:830) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:144) at scala.collection.TraversableOnce$$anonfun$foldLeft$1.apply(TraversableOnce.scala:144)
  6. 0

    RE: java.lang.ArrayIndexOutOfBoundsException: 0 on Yarn Client

    apache.org | 11 months ago
    java.lang.ArrayIndexOutOfBoundsException: 0 at org.apache.spark.sql.catalyst.CatalystTypeConverters$.convertRowWithConverters(CatalystTypeConverters.scala:348) at org.apache.spark.sql.catalyst.CatalystTypeConverters$$anonfun$createToCatalystConverter$4.apply(CatalystTypeConverters.scala:180) at org.apache.spark.sql.SQLContext$$anonfun$9.apply(SQLContext.scala:488) at org.apache.spark.sql.SQLContext$$anonfun$9.apply(SQLContext.scala:488)

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.ArrayIndexOutOfBoundsException

      1  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:102)  at kafka.client.ClientUtils$$anonfun$parseBrokerList$1.apply(ClientUtils.scala:97)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)  at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)  at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)  at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)  at scala.collection.AbstractTraversable.map(Traversable.scala:105)  at kafka.client.ClientUtils$.parseBrokerList(ClientUtils.scala:97)  at kafka.producer.BrokerPartitionInfo.<init>(BrokerPartitionInfo.scala:32)  at kafka.producer.async.DefaultEventHandler.<init>(DefaultEventHandler.scala:41)  at kafka.producer.Producer.<init>(Producer.scala:59)  at kafka.perf.ProducerPerformance$ProducerThread.<init>(ProducerPerformance.scala:196)  at kafka.perf.ProducerPerformance$$anonfun$main$1.apply$mcVI$sp(ProducerPerformance.scala:57)  at scala.collection.immutable.Range.foreach$mVc$sp(Range.scala:141)  at kafka.perf.ProducerPerformance$.main(ProducerPerformance.scala:56) 

      at kafka.perf.ProducerPerformance.main()
    2. kafka.perf
      ProducerPerformance.main
      1. kafka.perf.ProducerPerformance.main(ProducerPerformance.scala)
      1 frame