Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by vatsal mevada
, 1 year ago
via Unix & Linux by Henry
, 1 year ago
Job 0 cancelled because SparkContext was shut down
via GitHub by jmuhlenkamp
, 1 year ago
Job 34 cancelled because SparkContext was shut down
via GitHub by dahaian
, 6 months ago
Job 995 cancelled because SparkContext was shut down
via Talend Open Integration Solution by lei ju, 1 year ago
via Stack Overflow by Appalachian Math
, 1 year ago
Job 2 cancelled because SparkContext was shut down
org.apache.spark.SparkException: Job 2 cancelled because SparkContext was shut down	at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:806)	at org.apache.spark.scheduler.DAGScheduler$$anonfun$cleanUpAfterSchedulerStop$1.apply(DAGScheduler.scala:804)	at scala.collection.mutable.HashSet.foreach(HashSet.scala:79)	at org.apache.spark.scheduler.DAGScheduler.cleanUpAfterSchedulerStop(DAGScheduler.scala:804)	at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onStop(DAGScheduler.scala:1658)	at org.apache.spark.util.EventLoop.stop(EventLoop.scala:84)	at org.apache.spark.scheduler.DAGScheduler.stop(DAGScheduler.scala:1581)	at org.apache.spark.SparkContext$$anonfun$stop$9.apply$mcV$sp(SparkContext.scala:1751)	at org.apache.spark.util.Utils$.tryLogNonFatalError(Utils.scala:1230)	at org.apache.spark.SparkContext.stop(SparkContext.scala:1750)	at org.apache.spark.SparkContext$$anonfun$3.apply$mcV$sp(SparkContext.scala:607)	at org.apache.spark.util.SparkShutdownHook.run(ShutdownHookManager.scala:267)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ShutdownHookManager.scala:239)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1$$anonfun$apply$mcV$sp$1.apply(ShutdownHookManager.scala:239)	at org.apache.spark.util.Utils$.logUncaughtExceptions(Utils.scala:1766)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply$mcV$sp(ShutdownHookManager.scala:239)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)	at org.apache.spark.util.SparkShutdownHookManager$$anonfun$runAll$1.apply(ShutdownHookManager.scala:239)	at scala.util.Try$.apply(Try.scala:161)	at org.apache.spark.util.SparkShutdownHookManager.runAll(ShutdownHookManager.scala:239)	at org.apache.spark.util.SparkShutdownHookManager$$anon$2.run(ShutdownHookManager.scala:218)	at org.apache.hadoop.util.ShutdownHookManager$1.run(ShutdownHookManager.java:54)	at org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:620)	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1843)	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1856)	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1933)	at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1.apply$mcV$sp(InsertIntoHadoopFsRelation.scala:150)	at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1.apply(InsertIntoHadoopFsRelation.scala:108)	at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation$$anonfun$run$1.apply(InsertIntoHadoopFsRelation.scala:108)	at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:53)	at org.apache.spark.sql.execution.datasources.InsertIntoHadoopFsRelation.run(InsertIntoHadoopFsRelation.scala:108)	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)	at org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:256)	at org.apache.spark.sql.hive.execution.CreateMetastoreDataSourceAsSelect.run(commands.scala:258)	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)	at org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)	at org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)	at org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)	at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)	at org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)	at org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)	at org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)	at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:251)	at org.apache.spark.sql.DataFrameWriter.saveAsTable(DataFrameWriter.scala:221)	at DataAccessService.saveToHive(DataAccessService.java:48)	at Main.main(Main.java:42)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:606)	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)