org.apache.spark.SparkException: Job cancelled because SparkContext was shut down at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:703) at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:702) at scala.collection.mutable.HashSet.foreach(HashSet.s cala:79) at org.apache.spark.scheduler.DAGScheduler.cleanUpAft erSchedulerStop(DAGScheduler.scala:702) at org.apache.spark.scheduler.DAGSchedulerEventProces sLoop.onStop(DAGScheduler.scala:1525) at org.apache.spark.util.EventLoop.stop(EventLoop.sca la:84) at org.apache.spark.scheduler.DAGScheduler.stop(DAGSc heduler.scala:1449) at org.apache.spark.SparkContext$$anonfun$stop$7.appl y$mcV$sp(SparkContext.scala:1724) at org.apache.spark.util.Utils$.tryLogNonFatalError(U tils.scala:1184) at org.apache.spark.SparkContext.stop(SparkContext.sc ala:1723) at org.apache.spark.scheduler.cluster.YarnClientSched ulerBackend$MonitorThread.run(YarnClientSchedulerB ackend.scala:146) at org.apache.spark.scheduler.DAGScheduler.runJob(DAG Scheduler.scala:567) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1824) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1837) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1914) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply$mcV$sp(In sertIntoHadoopFsRelation.scala:150) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.SQLExecution$.withN ewExecutionId(SQLExecution.scala:56) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation.run(InsertIntoHadoopFsRelation .scala:108) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doE xecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:140) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:138) at org.apache.spark.rdd.RDDOperationScope$.withScope( RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(S parkPlan.scala:138) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd$lzycompute(SQLContext.scala:933) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd(SQLContext.scala:933) at org.apache.spark.sql.execution.datasources.Resolve dDataSource$.apply(ResolvedDataSource.scala:197) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:146) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:137) at org.apache.spark.sql.DataFrame.save(DataFrame.scal a:1808) at com.acnielsen.madras.utils.ndx_scala_util$.newHive TableData(ndx_scala_util.scala:1264) at com.acnielsen.madras.utils.ndx_scala_util$.UPDATE( ndx_scala_util.scala:238) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:658) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:652)

cloudera.com | 9 months ago
tip
Do you know that we can give you better hits? Get more relevant results from Samebug’s stack trace search.
  1. 0

    ERROR ActorSystemImpl - Running my spark job on ya... - Cloudera Community

    cloudera.com | 9 months ago
    org.apache.spark.SparkException: Job cancelled because SparkContext was shut down at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:703) at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:702) at scala.collection.mutable.HashSet.foreach(HashSet.s cala:79) at org.apache.spark.scheduler.DAGScheduler.cleanUpAft erSchedulerStop(DAGScheduler.scala:702) at org.apache.spark.scheduler.DAGSchedulerEventProces sLoop.onStop(DAGScheduler.scala:1525) at org.apache.spark.util.EventLoop.stop(EventLoop.sca la:84) at org.apache.spark.scheduler.DAGScheduler.stop(DAGSc heduler.scala:1449) at org.apache.spark.SparkContext$$anonfun$stop$7.appl y$mcV$sp(SparkContext.scala:1724) at org.apache.spark.util.Utils$.tryLogNonFatalError(U tils.scala:1184) at org.apache.spark.SparkContext.stop(SparkContext.sc ala:1723) at org.apache.spark.scheduler.cluster.YarnClientSched ulerBackend$MonitorThread.run(YarnClientSchedulerB ackend.scala:146) at org.apache.spark.scheduler.DAGScheduler.runJob(DAG Scheduler.scala:567) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1824) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1837) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1914) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply$mcV$sp(In sertIntoHadoopFsRelation.scala:150) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.SQLExecution$.withN ewExecutionId(SQLExecution.scala:56) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation.run(InsertIntoHadoopFsRelation .scala:108) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doE xecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:140) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:138) at org.apache.spark.rdd.RDDOperationScope$.withScope( RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(S parkPlan.scala:138) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd$lzycompute(SQLContext.scala:933) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd(SQLContext.scala:933) at org.apache.spark.sql.execution.datasources.Resolve dDataSource$.apply(ResolvedDataSource.scala:197) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:146) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:137) at org.apache.spark.sql.DataFrame.save(DataFrame.scal a:1808) at com.acnielsen.madras.utils.ndx_scala_util$.newHive TableData(ndx_scala_util.scala:1264) at com.acnielsen.madras.utils.ndx_scala_util$.UPDATE( ndx_scala_util.scala:238) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:658) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:652)
  2. 0

    NullPointerException when Training Imagenet

    GitHub | 11 months ago | GalaxyStyle
    org.apache.spark.SparkException: Job aborted due to stage failure: Task 1 in stage 2.0 failed 4 times, most recent failure: Lost task 1.3 in stage 2.0 (TID 10, pc-2): java.lang.NullPointerException
  3. 0

    Spark 1.3.0 on YARN: Application failed 2 times due to AM Container

    queforum.com | 1 year ago
    org.apache.spark.SparkException: Application finished with failed status at org.apache.spark.deploy.yarn.Client.run(Client.sca la:622) at org.apache.spark.deploy.yarn.Client$.main(Client.s cala:647) at org.apache.spark.deploy.yarn.Client.main(Client.sc ala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Nativ e Method) at sun.reflect.NativeMethodAccessorImpl.invoke(Native MethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(De legatingMethodAccessorImpl.java:43)
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. org.apache.spark.SparkException

      Job cancelled because SparkContext was shut down at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:703) at org.apache.spark.scheduler.DAGScheduler$$anonfun$c leanUpAfterSchedulerStop$1.apply(DAGScheduler.scal a:702) at scala.collection.mutable.HashSet.foreach(HashSet.s cala:79) at org.apache.spark.scheduler.DAGScheduler.cleanUpAft erSchedulerStop(DAGScheduler.scala:702) at org.apache.spark.scheduler.DAGSchedulerEventProces sLoop.onStop(DAGScheduler.scala:1525) at org.apache.spark.util.EventLoop.stop(EventLoop.sca la:84) at org.apache.spark.scheduler.DAGScheduler.stop(DAGSc heduler.scala:1449) at org.apache.spark.SparkContext$$anonfun$stop$7.appl y$mcV$sp(SparkContext.scala:1724) at org.apache.spark.util.Utils$.tryLogNonFatalError(U tils.scala:1184) at org.apache.spark.SparkContext.stop(SparkContext.sc ala:1723) at org.apache.spark.scheduler.cluster.YarnClientSched ulerBackend$MonitorThread.run(YarnClientSchedulerB ackend.scala:146) at org.apache.spark.scheduler.DAGScheduler.runJob(DAG Scheduler.scala:567) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1824) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1837) at org.apache.spark.SparkContext.runJob(SparkContext. scala:1914) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply$mcV$sp(In sertIntoHadoopFsRelation.scala:150) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation$$anonfun$run$1.apply(InsertInt oHadoopFsRelation.scala:108) at org.apache.spark.sql.execution.SQLExecution$.withN ewExecutionId(SQLExecution.scala:56) at org.apache.spark.sql.execution.datasources.InsertI ntoHadoopFsRelation.run(InsertIntoHadoopFsRelation .scala:108) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult$lzycompute(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.sid eEffectResult(commands.scala:57) at org.apache.spark.sql.execution.ExecutedCommand.doE xecute(commands.scala:69) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:140) at org.apache.spark.sql.execution.SparkPlan$$anonfun$ execute$5.apply(SparkPlan.scala:138) at org.apache.spark.rdd.RDDOperationScope$.withScope( RDDOperationScope.scala:147) at org.apache.spark.sql.execution.SparkPlan.execute(S parkPlan.scala:138) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd$lzycompute(SQLContext.scala:933) at org.apache.spark.sql.SQLContext$QueryExecution.toR dd(SQLContext.scala:933) at org.apache.spark.sql.execution.datasources.Resolve dDataSource$.apply(ResolvedDataSource.scala:197) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:146) at org.apache.spark.sql.DataFrameWriter.save(DataFram eWriter.scala:137) at org.apache.spark.sql.DataFrame.save(DataFrame.scal a:1808) at com.acnielsen.madras.utils.ndx_scala_util$.newHive TableData(ndx_scala_util.scala:1264) at com.acnielsen.madras.utils.ndx_scala_util$.UPDATE( ndx_scala_util.scala:238) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:658) at com.acnielsen.madras.pkgews_panel_extract$$anonfun $p_signed_rank_yago$1.apply(pkgews_panel_extract.s cala:652)

      at scala.collection.IndexedSeqOptimized$class.foreach()
    2. Scala
      ArrayOps$ofRef.foreach
      1. scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
      2. scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108)
      2 frames
    3. com.acnielsen.madras
      pkgews_panel_extract.main
      1. com.acnielsen.madras.pkgews_panel_extract$.p_signed_rank_yago(pkgews_panel_extract.scala:652)
      2. com.acnielsen.madras.pkgews_panel_extract$.p_main(pkgews_panel_extract.scala:4844)
      3. com.acnielsen.madras.pkgews_panel_extract$.main(pkgews_panel_extract.scala:4655)
      4. com.acnielsen.madras.pkgews_panel_extract.main(pkgews_panel_extract.scala)
      4 frames
    4. Java RT
      Method.invoke
      1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
      3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      4. java.lang.reflect.Method.invoke(Method.java:606)
      4 frames