Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by cricket_007
, 1 year ago
Input path does not exist: hdfs://sandbox.hortonworks.com:8020/output1/_SUCCESS
via Stack Overflow by J. Acelot
, 1 year ago
Input path does not exist: hdfs://route_to_hdfs/test/data_2.txt.**_COPYING_**
via Google Groups by Unknown author, 1 year ago
Input path does not exist: hdfs:// 10.0.1.227:8020/home/gobblinoutput/working/GobblinKafkaQuickStart/input/job_GobblinKafkaQuickStart_1458708541198.wulist
via Stack Overflow by salem
, 10 months ago
Input path does not exist: hdfs://localhost:9000/user/utente/input/hadoop
via joeyoung.io by Unknown author, 1 year ago
Input path does not exist: hdfs://localhost:9000/user/joeyoung/grep-temp-1908205908
org.apache.hadoop.mapreduce.lib.input.InvalidInputException: Input path does not exist: hdfs://sandbox.hortonworks.com:8020/output1/_SUCCESS	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.singleThreadedListStatus(FileInputFormat.java:323)	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.listStatus(FileInputFormat.java:265)	at org.apache.hadoop.mapreduce.lib.input.FileInputFormat.getSplits(FileInputFormat.java:387)	at org.apache.spark.rdd.NewHadoopRDD.getPartitions(NewHadoopRDD.scala:120)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:242)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:240)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:240)	at org.apache.spark.streaming.dstream.FileInputDStream$$anonfun$4.apply(FileInputDStream.scala:276)	at org.apache.spark.streaming.dstream.FileInputDStream$$anonfun$4.apply(FileInputDStream.scala:266)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:34)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)	at scala.collection.AbstractTraversable.map(Traversable.scala:105)	at org.apache.spark.streaming.dstream.FileInputDStream.org$apache$spark$streaming$dstream$FileInputDStream$$filesToRDD(FileInputDStream.scala:266)	at org.apache.spark.streaming.dstream.FileInputDStream.compute(FileInputDStream.scala:153)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344)	at scala.Option.orElse(Option.scala:257)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341)	at org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)	at org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.immutable.List.foreach(List.scala:318)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)	at scala.collection.AbstractTraversable.map(Traversable.scala:105)	at org.apache.spark.streaming.dstream.TransformedDStream.compute(TransformedDStream.scala:42)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)	at org.apache.spark.streaming.dstream.TransformedDStream.createRDDWithLocalProperties(TransformedDStream.scala:65)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344)	at scala.Option.orElse(Option.scala:257)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341)	at org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)	at org.apache.spark.streaming.dstream.TransformedDStream$$anonfun$6.apply(TransformedDStream.scala:42)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)	at scala.collection.immutable.List.foreach(List.scala:318)	at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)	at scala.collection.AbstractTraversable.map(Traversable.scala:105)	at org.apache.spark.streaming.dstream.TransformedDStream.compute(TransformedDStream.scala:42)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)	at org.apache.spark.streaming.dstream.TransformedDStream.createRDDWithLocalProperties(TransformedDStream.scala:65)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344)	at scala.Option.orElse(Option.scala:257)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341)	at org.apache.spark.streaming.dstream.MappedDStream.compute(MappedDStream.scala:35)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344)	at scala.Option.orElse(Option.scala:257)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341)	at org.apache.spark.streaming.dstream.MappedDStream.compute(MappedDStream.scala:35)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1$$anonfun$apply$7.apply(DStream.scala:352)	at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1$$anonfun$1.apply(DStream.scala:351)	at org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:346)	at org.apache.spark.streaming.dstream.DStream$$anonfun$getOrCompute$1.apply(DStream.scala:344)	at scala.Option.orElse(Option.scala:257)	at org.apache.spark.streaming.dstream.DStream.getOrCompute(DStream.scala:341)	at org.apache.spark.streaming.dstream.ForEachDStream.generateJob(ForEachDStream.scala:47)	at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:115)	at org.apache.spark.streaming.DStreamGraph$$anonfun$1.apply(DStreamGraph.scala:114)	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:251)	at scala.collection.TraversableLike$$anonfun$flatMap$1.apply(TraversableLike.scala:251)	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)	at scala.collection.TraversableLike$class.flatMap(TraversableLike.scala:251)	at scala.collection.AbstractTraversable.flatMap(Traversable.scala:105)	at org.apache.spark.streaming.DStreamGraph.generateJobs(DStreamGraph.scala:114)	at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:253)	at org.apache.spark.streaming.scheduler.JobGenerator$$anonfun$3.apply(JobGenerator.scala:251)	at scala.util.Try$.apply(Try.scala:161)	at org.apache.spark.streaming.scheduler.JobGenerator.generateJobs(JobGenerator.scala:251)	at org.apache.spark.streaming.scheduler.JobGenerator.org$apache$spark$streaming$scheduler$JobGenerator$$processEvent(JobGenerator.scala:182)	at org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:88)	at org.apache.spark.streaming.scheduler.JobGenerator$$anon$1.onReceive(JobGenerator.scala:87)	at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)