java.lang.ClassNotFoundException: scala.Function0

There are no available Samebug tips for this exception. Do you have an idea how to solve this issue? A short tip would help users who saw this issue last week.

  • java.lang.ClassNotFoundException: scala.Function0 at sbt.classpath.ClasspathFilter.loadClass(ClassLoaders.scala:63) at java.lang.ClassLoader.loadClass(ClassLoader.java:357) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:348) at com.twitter.chill.KryoBase$$anonfun$1.apply(KryoBase.scala:41) at com.twitter.chill.KryoBase$$anonfun$1.apply(KryoBase.scala:41) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245) at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:245) at scala.collection.immutable.Range.foreach(Range.scala:166) at scala.collection.TraversableLike$class.map(TraversableLike.scala:245) at scala.collection.AbstractTraversable.map(Traversable.scala:104) at com.twitter.chill.KryoBase.<init>(KryoBase.scala:41) at com.twitter.chill.EmptyScalaKryoInstantiator.newKryo(ScalaKryoInstantiator.scala:57) at org.apache.spark.serializer.KryoSerializer.newKryo(KryoSerializer.scala:86) at org.apache.spark.serializer.KryoSerializerInstance.borrowKryo(KryoSerializer.scala:274) at org.apache.spark.serializer.KryoSerializerInstance.<init>(KryoSerializer.scala:259) at org.apache.spark.serializer.KryoSerializer.newInstance(KryoSerializer.scala:175) at org.apache.spark.serializer.KryoSerializer.supportsRelocationOfSerializedObjects$lzycompute(KryoSerializer.scala:182) at org.apache.spark.serializer.KryoSerializer.supportsRelocationOfSerializedObjects(KryoSerializer.scala:178) at org.apache.spark.shuffle.sort.SortShuffleManager$.canUseSerializedShuffle(SortShuffleManager.scala:187) at org.apache.spark.shuffle.sort.SortShuffleManager.registerShuffle(SortShuffleManager.scala:99) at org.apache.spark.ShuffleDependency.<init>(Dependency.scala:90) at org.apache.spark.rdd.ShuffledRDD.getDependencies(ShuffledRDD.scala:91) at org.apache.spark.rdd.RDD$$anonfun$dependencies$2.apply(RDD.scala:235) at org.apache.spark.rdd.RDD$$anonfun$dependencies$2.apply(RDD.scala:233) at scala.Option.getOrElse(Option.scala:121) at org.apache.spark.rdd.RDD.dependencies(RDD.scala:233) at org.apache.spark.scheduler.DAGScheduler.visit$2(DAGScheduler.scala:418) at org.apache.spark.scheduler.DAGScheduler.getAncestorShuffleDependencies(DAGScheduler.scala:433) at org.apache.spark.scheduler.DAGScheduler.org$apache$spark$scheduler$DAGScheduler$$getShuffleMapStage(DAGScheduler.scala:288) at org.apache.spark.scheduler.DAGScheduler$$anonfun$visit$1$1.apply(DAGScheduler.scala:394) at org.apache.spark.scheduler.DAGScheduler$$anonfun$visit$1$1.apply(DAGScheduler.scala:391) at scala.collection.immutable.List.foreach(List.scala:381) at org.apache.spark.scheduler.DAGScheduler.visit$1(DAGScheduler.scala:391) at org.apache.spark.scheduler.DAGScheduler.getParentStages(DAGScheduler.scala:403) at org.apache.spark.scheduler.DAGScheduler.getParentStagesAndId(DAGScheduler.scala:304) at org.apache.spark.scheduler.DAGScheduler.newResultStage(DAGScheduler.scala:339) at org.apache.spark.scheduler.DAGScheduler.handleJobSubmitted(DAGScheduler.scala:849) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.doOnReceive(DAGScheduler.scala:1626) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1618) at org.apache.spark.scheduler.DAGSchedulerEventProcessLoop.onReceive(DAGScheduler.scala:1607) at org.apache.spark.util.EventLoop$$anon$1.run(EventLoop.scala:48)
No Bugmate found.