java.lang.IllegalStateException

There are no available Samebug tips for this exception. Do you have an idea how to solve this issue? A short tip would help users who saw this issue last week.

    • java.lang.IllegalStateException: Cannot call methods on a stopped SparkContext at org.apache.spark.SparkContext.org$apache$spark$Spa rkContext$$assertNotStopped(SparkContext.scala:104 ) at org.apache.spark.SparkContext.defaultParallelism(S parkContext.scala:2063) at org.apache.spark.SparkContext.defaultMinPartitions (SparkContext.scala:2076) at org.apache.spark.sql.hive.HadoopTableReader.<init> (TableReader.scala:70) at org.apache.spark.sql.hive.execution.HiveTableScan. <init>(HiveTableScan.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$$anonfun$3.apply(HiveStrategies.scala:77) at org.apache.spark.sql.SQLContext$SparkPlanner.prune FilterProject(SQLContext.scala:853) at org.apache.spark.sql.hive.HiveStrategies$HiveTable Scans$.apply(HiveStrategies.scala:73) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.apply(SparkStrategies.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Bas icOperators$.apply(SparkStrategies.scala:346) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanne r$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterato r.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanne r.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Equ iJoinSelection$.makeBroadcastHashJoin(SparkStrateg ies.scala:92) at org.apache.spark.sql.execution.SparkStrategies$EquiJoinSelection$.apply(SparkStrategies.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:346) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$Aggregation$.apply(SparkStrategies.scala:235) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:926) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:924) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:930) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:930) at org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:53) at org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:1904) at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1385) at com.acnielsen.madras.pkgews_panel_extract$.p_signed_rank_yago(pkgews_panel_extract.scala:685) at com.acnielsen.madras.pkgews_panel_extract$.p_main(pkgews_panel_extract.scala:4844) at com.acnielsen.madras.pkgews_panel_extract$.main(pkgews_panel_extract.scala:4655) at com.acnielsen.madras.pkgews_panel_extract.main(pkgews_panel_extract.scala) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606)
    No Bugmate found.