java.lang.IllegalStateException: SparkContext has been shutdown

Stack Overflow | mahdi62 | 5 months ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    java.lang.IllegalStateException: SparkContext has been shutdown

    Stack Overflow | 5 months ago | mahdi62
    java.lang.IllegalStateException: SparkContext has been shutdown

    Root Cause Analysis

    1. java.lang.IllegalStateException

      SparkContext has been shutdown

      at org.apache.spark.SparkContext.runJob()
    2. Spark
      RDD.count
      1. org.apache.spark.SparkContext.runJob(SparkContext.scala:1824)
      2. org.apache.spark.SparkContext.runJob(SparkContext.scala:1845)
      3. org.apache.spark.SparkContext.runJob(SparkContext.scala:1858)
      4. org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)
      5. org.apache.spark.rdd.RDD.count(RDD.scala:1157)
      5 frames
    3. Unknown
      UnionStream$$anonfun$creatingFunc$5.apply
      1. UnionStream$$anonfun$creatingFunc$5.apply(UnionStreaming.scala:453)
      2. UnionStream$$anonfun$creatingFunc$5.apply(UnionStreaming.scala:451)
      2 frames
    4. Spark Project Streaming
      ForEachDStream$$anonfun$1.apply
      1. org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
      2. org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
      3. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:50)
      4. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
      5. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
      6. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)
      7. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:49)
      8. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
      9. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
      9 frames
    5. Scala
      Try$.apply
      1. scala.util.Try$.apply(Try.scala:161)
      1 frame
    6. Spark Project Streaming
      JobScheduler$JobHandler$$anonfun$run$1.apply
      1. org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
      2. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:224)
      3. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
      4. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
      4 frames
    7. Scala
      DynamicVariable.withValue
      1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
      1 frame
    8. Spark Project Streaming
      JobScheduler$JobHandler.run
      1. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:223)
      1 frame
    9. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames