Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Sakai JIRA by Terry Brady, 1 year ago
via Sakai JIRA by Terry Brady, 1 year ago
via Oracle Community by 3004, 1 year ago
This exception has no message.
via Oracle Community by 3004, 1 year ago
This exception has no message.
via GitHub by yavladys
, 1 year ago
java.lang.NullPointerException: 	at java.io.File.(File.java:363)	at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:77)	at org.apache.spark.storage.DiskBlockManager.getFile(DiskBlockManager.scala:80)	at org.apache.spark.shuffle.IndexShuffleBlockResolver.getDataFile(IndexShuffleBlockResolver.scala:54)	at org.apache.spark.shuffle.IndexShuffleBlockResolver.getBlockData(IndexShuffleBlockResolver.scala:199)	at org.apache.spark.storage.BlockManager.getBlockData(BlockManager.scala:278)	at org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$2.apply(NettyBlockRpcServer.scala:60)	at org.apache.spark.network.netty.NettyBlockRpcServer$$anonfun$2.apply(NettyBlockRpcServer.scala:60)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)	at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)	at org.apache.spark.network.netty.NettyBlockRpcServer.receive(NettyBlockRpcServer.scala:60)	at org.apache.spark.network.server.TransportRequestHandler.processRpcRequest(TransportRequestHandler.java:158)	at org.apache.spark.network.server.TransportRequestHandler.handle(TransportRequestHandler.java:106)	at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:119)	at org.apache.spark.network.server.TransportChannelHandler.channelRead0(TransportChannelHandler.java:51)	at io.netty.channel.SimpleChannelInboundHandler.channelRead(SimpleChannelInboundHandler.java:105)	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)	at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)	at io.netty.handler.timeout.IdleStateHandler.channelRead(IdleStateHandler.java:266)	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)	at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)	at io.netty.handler.codec.MessageToMessageDecoder.channelRead(MessageToMessageDecoder.java:103)	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)	at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)	at org.apache.spark.network.util.TransportFrameDecoder.channelRead(TransportFrameDecoder.java:85)	at io.netty.channel.AbstractChannelHandlerContext.invokeChannelRead(AbstractChannelHandlerContext.java:308)	at io.netty.channel.AbstractChannelHandlerContext.fireChannelRead(AbstractChannelHandlerContext.java:294)	at io.netty.channel.DefaultChannelPipeline.fireChannelRead(DefaultChannelPipeline.java:846)	at io.netty.channel.nio.AbstractNioByteChannel$NioByteUnsafe.read(AbstractNioByteChannel.java:131)	at io.netty.channel.nio.NioEventLoop.processSelectedKey(NioEventLoop.java:511)	at io.netty.channel.nio.NioEventLoop.processSelectedKeysOptimized(NioEventLoop.java:468)	at io.netty.channel.nio.NioEventLoop.processSelectedKeys(NioEventLoop.java:382)	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:354)	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:111)	at java.lang.Thread.run(Thread.java:745)	at org.apache.spark.storage.ShuffleBlockFetcherIterator.throwFetchFailedException(ShuffleBlockFetcherIterator.scala:357)	at org.apache.spark.storage.ShuffleBlockFetcherIterator.next(ShuffleBlockFetcherIterator.scala:332)	at org.apache.spark.storage.ShuffleBlockFetcherIterator.next(ShuffleBlockFetcherIterator.scala:54)	at scala.collection.Iterator$$anon$11.next(Iterator.scala:409)	at scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:434)	at scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440)	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)	at org.apache.spark.util.CompletionIterator.hasNext(CompletionIterator.scala:32)	at org.apache.spark.InterruptibleIterator.hasNext(InterruptibleIterator.scala:39)	at scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:408)	at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.sort_addToSorter$(Unknown Source)	at org.apache.spark.sql.catalyst.expressions.GeneratedClass$GeneratedIterator.processNext(Unknown Source)	at org.apache.spark.sql.execution.BufferedRowIterator.hasNext(BufferedRowIterator.java:43)	at org.apache.spark.sql.execution.WholeStageCodegenExec$$anonfun$8$$anon$1.hasNext(WholeStageCodegenExec.scala:370)	at org.apache.spark.sql.execution.RowIteratorFromScala.advanceNext(RowIterator.scala:83)	at org.apache.spark.sql.execution.joins.SortMergeJoinScanner.advancedBufferedToRowWithNullFreeJoinKey(SortMergeJoinExec.scala:730)	at org.apache.spark.sql.execution.joins.SortMergeJoinScanner.(SortMergeJoinExec.scala:605)	at org.apache.spark.sql.execution.joins.SortMergeJoinExec$$anonfun$doExecute$1.apply(SortMergeJoinExec.scala:162)	at org.apache.spark.sql.execution.joins.SortMergeJoinExec$$anonfun$doExecute$1.apply(SortMergeJoinExec.scala:100)	at org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.ZippedPartitionsRDD2.compute(ZippedPartitionsRDD.scala:89)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)	at org.apache.spark.scheduler.Task.run(Task.scala:85)	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)	at java.lang.Thread.run(Thread.java:745)