java.lang.NullPointerException

  1. 0

    Write textfile from spark streaming

    Stack Overflow | 3 months ago | Somasundaram Sekar
    java.lang.NullPointerException
  2. 0

    mlcp still not working on Windows

    GitHub | 1 year ago | patrickmcelwee
    java.lang.NullPointerException
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    Twitter Streaming no output in REPL on windows

    Stack Overflow | 2 years ago
    java.lang.NullPointerException
  5. 0

    Job fails on loading com.databricks.spark.csv in SparkR shell - Code Help

    codehelpnet.com | 1 year ago
    java.lang.NullPointerException

  1. muffinmannen 3 times, last 8 months ago
18 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.NullPointerException

    No message provided

    at java.lang.ProcessBuilder.start()
  2. Java RT
    ProcessBuilder.start
    1. java.lang.ProcessBuilder.start(ProcessBuilder.java:1012)
    1 frame
  3. Hadoop
    FileSystem$4.next
    1. org.apache.hadoop.util.Shell.runCommand(Shell.java:483)
    2. org.apache.hadoop.util.Shell.run(Shell.java:456)
    3. org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:722)
    4. org.apache.hadoop.util.Shell.execCommand(Shell.java:815)
    5. org.apache.hadoop.util.Shell.execCommand(Shell.java:798)
    6. org.apache.hadoop.fs.FileUtil.execCommand(FileUtil.java:1097)
    7. org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:657)
    8. org.apache.hadoop.fs.RawLocalFileSystem$DeprecatedRawLocalFileStatus.getPermission(RawLocalFileSystem.java:632)
    9. org.apache.hadoop.fs.LocatedFileStatus.<init>(LocatedFileStatus.java:49)
    10. org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1729)
    11. org.apache.hadoop.fs.FileSystem$4.next(FileSystem.java:1711)
    11 frames
  4. Hadoop
    FileInputFormat.getSplits
    1. org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:305)
    2. org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:265)
    3. org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:387)
    3 frames
  5. Spark
    RDD$$anonfun$partitions$2.apply
    1. org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:121)
    2. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:248)
    3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:246)
    3 frames
  6. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:121)
    1 frame
  7. Spark
    RDD.partitions
    1. org.apache.spark.rdd.RDD.partitions(RDD.scala:246)
    1 frame
  8. Spark Project Streaming
    FileInputDStream$$anonfun$5.apply
    1. org.apache.spark.streaming.dstream.FileInputDStream$$anonfun$5.apply(FileInputDStream.scala:285)
    2. org.apache.spark.streaming.dstream.FileInputDStream$$anonfun$5.apply(FileInputDStream.scala:275)
    2 frames
  9. Scala
    AbstractTraversable.map
    1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    3. scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
    4. scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:35)
    5. scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
    6. scala.collection.AbstractTraversable.map(Traversable.scala:104)
    6 frames
  10. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply
    1. org.apache.spark.streaming.dstream.FileInputDStream.org$apache$spark$streaming$dstream$FileInputDStream$$filesToRDD(FileInputDStream.scala:275)
    2. org.apache.spark.streaming.dstream.FileInputDStream.compute(FileInputDStream.scala:155)
    3. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4 frames
  11. Scala
    DynamicVariable.withValue
    1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    1 frame
  12. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1.apply
    1. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    2. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    3. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:335)
    5. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:333)
    5 frames
  13. Scala
    Option.orElse
    1. scala.Option.orElse(Option.scala:289)
    1 frame
  14. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply
    1. org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:330)
    2. org.apache.spark.streaming.dstream.MappedDStream.compute(MappedDStream.scala:36)
    3. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4 frames
  15. Scala
    DynamicVariable.withValue
    1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    1 frame
  16. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1.apply
    1. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    2. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    3. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:335)
    5. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:333)
    5 frames
  17. Scala
    Option.orElse
    1. scala.Option.orElse(Option.scala:289)
    1 frame
  18. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply
    1. org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:330)
    2. org.apache.spark.streaming.dstream.MappedDStream.compute(MappedDStream.scala:36)
    3. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    4 frames
  19. Scala
    DynamicVariable.withValue
    1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    1 frame
  20. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1.apply
    1. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    2. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    3. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)
    4. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:335)
    5. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:333)
    5 frames
  21. Scala
    Option.orElse
    1. scala.Option.orElse(Option.scala:289)
    1 frame
  22. Spark Project Streaming
    TransformedDStream$$anonfun$6.apply
    1. org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:330)
    2. org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)
    3. org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)
    3 frames
  23. Scala
    List.map
    1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
    3. scala.collection.immutable.List.foreach(List.scala:381)
    4. scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
    5. scala.collection.immutable.List.map(List.scala:285)
    5 frames
  24. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply
    1. org.apache.spark.streaming.dstream.TransformedDStream.compute(TransformedDStream.scala:42)
    2. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    3. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:341)
    3 frames
  25. Scala
    DynamicVariable.withValue
    1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:58)
    1 frame
  26. Spark Project Streaming
    DStream$$anonfun$getOrCompute$1.apply
    1. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    2. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:340)
    3. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:415)
    4. org.apache.spark.streaming.dstream.TransformedDStream.createRDDWithLocalProperties(TransformedDStream.scala:65)
    5. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:335)
    6. org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:333)
    6 frames
  27. Scala
    Option.orElse
    1. scala.Option.orElse(Option.scala:289)
    1 frame
  28. Spark Project Streaming
    DStreamGraph$$anonfun$1.apply
    1. org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:330)
    2. org.apache.spark.streaming.dstream.ForEachDStream.generateJob(ForEachDStream.scala:48)
    3. org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:117)
    4. org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:116)
    4 frames
  29. Scala
    AbstractTraversable.flatMap
    1. scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    2. scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:241)
    3. scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
    4. scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:48)
    5. scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:241)
    6. scala.collection.AbstractTraversable.flatMap(Traversable.scala:104)
    6 frames
  30. Spark Project Streaming
    JobGenerator$$anonfun$3.apply
    1. org.apache.spark.streaming.DStreamGraph.generateJobs(DStreamGraph.scala:116)
    2. org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:248)
    3. org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:246)
    3 frames
  31. Scala
    Try$.apply
    1. scala.util.Try$.apply(Try.scala:192)
    1 frame
  32. Spark Project Streaming
    JobGenerator$$anon$1.onReceive
    1. org.apache.spark.streaming.scheduler.JobGenerator.generateJobs(JobGenerator.scala:246)
    2. org.apache.spark.streaming.scheduler.JobGenerator.org$apache$spark$streaming$scheduler$JobGenerator$$processEvent(JobGenerator.scala:182)
    3. org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:88)
    4. org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:87)
    4 frames
  33. Spark
    EventLoop$$anon$1.run
    1. org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
    1 frame