java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext at org.apache.spark.SparkContext.org$apache$spark$Spa rkContext$$assertNotStopped(SparkContext.scala:104 ) at org.apache.spark.SparkContext.defaultParallelism(S parkContext.scala:2063) at org.apache.spark.SparkContext.defaultMinPartitions (SparkContext.scala:2076) at org.apache.spark.sql.hive.HadoopTableReader.<init> (TableReader.scala:70) at org.apache.spark.sql.hive.execution.HiveTableScan. <init>(HiveTableScan.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.SQLContext$SparkPlanner.prune FilterProject(SQLContext.scala:853) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$.apply(HiveStrategies.scala:73) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.apply(SparkStrategies.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Bas icOperators$.apply(SparkStrategies.scala:346) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92)

Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via cloudera.com by Unknown author, 1 year ago
Cannot call methods on a stopped SparkContext at org.apache.spark.SparkContext.org$apache$spark$Spa rkContext$$assertNotStopped(SparkContext.scala:104 ) at org.apache.spark.SparkContext.defaultParallelism(S parkContext.scala:2063) at
java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext at org.apache.spark.SparkContext.org$apache$spark$Spa rkContext$$assertNotStopped(SparkContext.scala:104 ) at org.apache.spark.SparkContext.defaultParallelism(S parkContext.scala:2063) at org.apache.spark.SparkContext.defaultMinPartitions (SparkContext.scala:2076) at org.apache.spark.sql.hive.HadoopTableReader.<init> (TableReader.scala:70) at org.apache.spark.sql.hive.execution.HiveTableScan. <init>(HiveTableScan.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.SQLContext$SparkPlanner.prune FilterProject(SQLContext.scala:853) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$.apply(HiveStrategies.scala:73) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.apply(SparkStrategies.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Bas icOperators$.apply(SparkStrategies.scala:346) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92)
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59)
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59)
at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:926)
at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:924)
at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:930)
at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:930)
at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:53)
at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:1904)
at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1385)
at com.acnielsen.madras.pkgews_panel_extract$.p_signed_rank_yago(pkgews_panel_extract.scala:685)
at com.acnielsen.madras.pkgews_panel_extract$.p_main(pkgews_panel_extract.scala:4844)
at com.acnielsen.madras.pkgews_panel_extract$.main(pkgews_panel_extract.scala:4655)
at com.acnielsen.madras.pkgews_panel_extract.main(pkgews_panel_extract.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)

Users with the same issue

You are the first who have seen this exception. Write a tip to help other users and build your expert profile.

Know the solutions? Share your knowledge to help other developers to debug faster.