java.lang.NullPointerException

Stack Overflow | MaxNevermind | 2 months ago
  1. 0

    Unknown Error

    GitHub | 3 years ago | Gitweazle
    java.lang.NullPointerException
  2. Speed up your debug routine!

    Automated exception search integrated into your IDE

  1. ajinkya_w 1 times, last 2 months ago
  2. emmanuelstroem 1 times, last 4 months ago
  3. adawolfs 31 times, last 1 month ago
  4. Diogo Jaym 6 times, last 5 months ago
  5. Bardh 7 times, last 6 months ago
6 more registered users
20 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.NullPointerException

    No message provided

    at java.io.File.<init>()
  2. Java RT
    File.<init>
    1. java.io.File.<init>(File.java:363)
    1 frame
  3. Spark
    NettyBlockRpcServer$$anonfun$2.apply
    1. org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:77)
    2. org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:80)
    3. org.apache.spark.shuffle.IndexShuffleBlockResolver.getDataFile(IndexShuffleBlockResolver.scala:54)
    4. org.apache.spark.shuffle.IndexShuffleBlockResolver.getBlockData(IndexShuffleBlockResolver.scala:199)
    5. org.apache.spark.storage.BlockManager.getBlockData(BlockManager.scala:278)
    6. org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$2.apply(NettyBlockRpcServer.scala:60)
    7. org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$2.apply(NettyBlockRpcServer.scala:60)
    7 frames
  4. Scala
    ArrayOps$ofRef.map
    1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    3. scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
    4. scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
    5. scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
    6. scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
    6 frames
  5. Spark
    NettyBlockRpcServer.receive
    1. org.apache.spark.network.netty.NettyBlockRpcServer.receive(NettyBlockRpcServer.scala:60)
    1 frame
  6. Spark
    TransportChannelHandler.channelRead0
    1. org.apache.spark.network.server.TransportRequestHandler.processRpcRequest(TransportRequestHandler.java:158)
    2. org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:106)
    3. org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)
    4. org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)
    4 frames
  7. Netty
    AbstractChannelHandlerContext.fireChannelRead
    1. io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)
    2. io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
    3. io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
    4. io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266)
    5. io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
    6. io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
    7. io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)
    8. io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
    9. io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
    9 frames
  8. Spark
    TransportFrameDecoder.channelRead
    1. org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)
    1 frame
  9. Netty
    SingleThreadEventExecutor$2.run
    1. io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)
    2. io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)
    3. io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)
    4. io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)
    5. io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)
    6. io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)
    7. io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)
    8. io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)
    9. io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)
    9 frames
  10. Java RT
    Thread.run
    1. java.lang.Thread.run(Thread.java:745)
    1 frame
  11. Spark
    ShuffleBlockFetcherIterator.next
    1. org.apache.spark.storage.ShuffleBlockFetcherIterator.throwFetchFailedException(ShuffleBlockFetcherIterator.scala:357)
    2. org.apache.spark.storage.ShuffleBlockFetcherIterator.next(ShuffleBlockFetcherIterator.scala:332)
    3. org.apache.spark.storage.ShuffleBlockFetcherIterator.next(ShuffleBlockFetcherIterator.scala:54)
    3 frames
  12. Scala
    Iterator$$anon$11.hasNext
    1. scala.collection.Iterator$$anon$11.next(Iterator.scala:409)
    2. scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:434)
    3. scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440)
    4. scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
    4 frames
  13. Spark
    InterruptibleIterator.hasNext
    1. org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:32)
    2. org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39)
    2 frames
  14. Scala
    Iterator$$anon$11.hasNext
    1. scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)
    1 frame
  15. Spark Project Catalyst
    GeneratedClass$GeneratedIterator.processNext
    1. org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.sort_addToSorter$(Unknown Source)
    2. org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)
    2 frames
  16. Spark Project SQL
    SortMergeJoinExec$$anonfun$doExecute$1.apply
    1. org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)
    2. org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)
    3. org.apache.spark.sql.execution.RowIteratorFromScala.advanceNext(RowIterator.scala:83)
    4. org.apache.spark.sql.execution.joins.SortMergeJoinScanner.advancedBufferedToRowWithNullFreeJoinKey(SortMergeJoinExec.scala:730)
    5. org.apache.spark.sql.execution.joins.SortMergeJoinScanner.<init>(SortMergeJoinExec.scala:605)
    6. org.apache.spark.sql.execution.joins.SortMergeJoinExec$$anonfun$doExecute$1.apply(SortMergeJoinExec.scala:162)
    7. org.apache.spark.sql.execution.joins.SortMergeJoinExec$$anonfun$doExecute$1.apply(SortMergeJoinExec.scala:100)
    7 frames
  17. Spark
    Executor$TaskRunner.run
    1. org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)
    2. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    3. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    4. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    5. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    6. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    7. org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)
    8. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    9. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    10. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    11. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    12. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    13. org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)
    14. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    15. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    16. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    17. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    18. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    19. org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)
    20. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    21. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    22. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)
    23. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)
    24. org.apache.spark.rdd.RDD.iterator(RDD.scala:283)
    25. org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)
    26. org.apache.spark.scheduler.Task.run(Task.scala:85)
    27. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)
    27 frames
  18. Java RT
    Thread.run
    1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    3. java.lang.Thread.run(Thread.java:745)
    3 frames