org.apache.spark.SparkException: Task not serializable

Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Google Groups by Todd Maegerle, 1 year ago
via GitHub by jbloom22
, 1 year ago
Task not serializable
via Stack Overflow by fanhk
, 2 years ago
via nabble.com by Unknown author, 2 years ago
via programwith.com by Unknown author, 2 years ago
java.lang.ArrayIndexOutOfBoundsException: 0
at java.io.ObjectStreamClass$FieldReflector.getObjFieldValues(ObjectStreamClass.java:2050)
at java.io.ObjectStreamClass.getObjFieldValues(ObjectStreamClass.java:1252)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at org.apache.spark.serializer.SerializationDebugger$ObjectStreamClassMethods$.getObjFieldValues$extension(SerializationDebugger.scala:240)
at org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:150)
at org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:99)
at org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visitSerializable(SerializationDebugger.scala:158)
at org.apache.spark.serializer.SerializationDebugger$SerializationDebugger.visit(SerializationDebugger.scala:99)
at org.apache.spark.serializer.SerializationDebugger$.find(SerializationDebugger.scala:58)
at org.apache.spark.serializer.SerializationDebugger$.improveException(SerializationDebugger.scala:39)
at org.apache.spark.serializer.JavaSerializationStream.writeObject(JavaSerializer.scala:47)
at org.apache.spark.serializer.JavaSerializerInstance.serialize(JavaSerializer.scala:80)
at org.apache.spark.util.ClosureCleaner$.ensureSerializable(ClosureCleaner.scala:164)
at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:158)
at org.apache.spark.SparkContext.clean(SparkContext.scala:1622)
at org.apache.spark.rdd.PairRDDFunctions.mapValues(PairRDDFunctions.scala:672)
at io.prediction.controller.P2LAlgorithm.batchPredict(P2LAlgorithm.scala:64)
at io.prediction.controller.P2LAlgorithm.batchPredictBase(P2LAlgorithm.scala:60)
at io.prediction.controller.Engine$$anonfun$27$$anonfun$29.apply(Engine.scala:737)
at io.prediction.controller.Engine$$anonfun$27$$anonfun$29.apply(Engine.scala:734)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.immutable.Range.foreach(Range.scala:141)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at io.prediction.controller.Engine$$anonfun$27.apply(Engine.scala:734)
at io.prediction.controller.Engine$$anonfun$27.apply(Engine.scala:728)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.immutable.Range.foreach(Range.scala:141)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at io.prediction.controller.Engine$.eval(Engine.scala:728)
at io.prediction.controller.Engine.eval(Engine.scala:325)
at io.prediction.core.BaseEngine$$anonfun$batchEval$1.apply(BaseEngine.scala:69)
at io.prediction.core.BaseEngine$$anonfun$batchEval$1.apply(BaseEngine.scala:68)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
at scala.collection.immutable.List.foreach(List.scala:318)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
at scala.collection.AbstractTraversable.map(Traversable.scala:105)
at io.prediction.core.BaseEngine.batchEval(BaseEngine.scala:68)
at io.prediction.workflow.EvaluationWorkflow$.runEvaluation(EvaluationWorkflow.scala:39)
at io.prediction.workflow.CoreWorkflow$.runEvaluation(CoreWorkflow.scala:120)
at io.prediction.workflow.Workflow$.runEvaluation(Workflow.scala:126)
at io.prediction.workflow.Workflow$.runEvaluationTypeless(Workflow.scala:106)
at io.prediction.workflow.Workflow$.runEvaluation(Workflow.scala:86)
at io.prediction.workflow.CreateWorkflow$.main(CreateWorkflow.scala:271)
at io.prediction.workflow.CreateWorkflow.main(CreateWorkflow.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:483)
at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:569)
at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:166)
at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:189)
at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:110)

Users with the same issue

You are the first who have seen this exception.

Write tip

Know the solutions? Share your knowledge to help other developers to debug faster.