java.io.IOException: Filesystem closed
Searched on Google with the first line of a JAVA stack trace?
We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.
Recommended solutions based on your search
Solutions on the web
via Stack Overflow by hba
, 2 years agovia Stack Overflow by user3660070
, 1 year agovia nabble.com by Unknown author, 2 years ago
via GitHub by ylangisc
, 2 years agovia Spring JIRA by Jason Hubbard, 1 year ago
via GitHub by Riordon
, 1 year agojava.io.IOException: Filesystem closed
at org.apache.hadoop.hdfs.DFSClient.checkOpen(DFSClient.java:398)
at org.apache.hadoop.hdfs.DFSOutputStream.hflush(DFSOutputStream.java:1465)
at org.apache.hadoop.hdfs.DFSOutputStream.sync(DFSOutputStream.java:1450)
at org.apache.hadoop.fs.FSDataOutputStream.sync(FSDataOutputStream.java:116)
at org.apache.spark.util.FileLogger$$anonfun$flush$2.apply(FileLogger.scala:137)
at org.apache.spark.util.FileLogger$$anonfun$flush$2.apply(FileLogger.scala:137)
at scala.Option.foreach(Option.scala:236)
at org.apache.spark.util.FileLogger.flush(FileLogger.scala:137)
at org.apache.spark.scheduler.EventLoggingListener.logEvent(EventLoggingListener.scala:69)
at org.apache.spark.scheduler.EventLoggingListener.onApplicationEnd(EventLoggingListener.scala:101)
at org.apache.spark.scheduler.SparkListenerBus$$anonfun$postToAll$13.apply(SparkListenerBus.scala:67)
at org.apache.spark.scheduler.SparkListenerBus$$anonfun$postToAll$13.apply(SparkListenerBus.scala:67)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:59)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:47)
at org.apache.spark.scheduler.SparkListenerBus$class.postToAll(SparkListenerBus.scala:67)
at org.apache.spark.scheduler.LiveListenerBus.postToAll(LiveListenerBus.scala:31)
at org.apache.spark.scheduler.LiveListenerBus.post(LiveListenerBus.scala:78)
at org.apache.spark.SparkContext.postApplicationEnd(SparkContext.scala:1081)
at org.apache.spark.SparkContext.stop(SparkContext.scala:828)
at org.apache.spark.deploy.yarn.ApplicationMaster$$anon$1.run(ApplicationMaster.scala:460)