Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

java.lang.OutOfMemoryError: GC overhead limit exceeded	at java.util.IdentityHashMap.resize(IdentityHashMap.java:471)	at java.util.IdentityHashMap.put(IdentityHashMap.java:440)	at org.apache.spark.util.SizeEstimator$SearchState.enqueue(SizeEstimator.scala:159)	at org.apache.spark.util.SizeEstimator$$anonfun$visitSingleObject$1.apply(SizeEstimator.scala:203)	at org.apache.spark.util.SizeEstimator$$anonfun$visitSingleObject$1.apply(SizeEstimator.scala:202)	at scala.collection.immutable.List.foreach(List.scala:318)	at org.apache.spark.util.SizeEstimator$.visitSingleObject(SizeEstimator.scala:202)	at org.apache.spark.util.SizeEstimator$.org$apache$spark$util$SizeEstimator$$estimate(SizeEstimator.scala:186)	at org.apache.spark.util.SizeEstimator$.estimate(SizeEstimator.scala:54)	at org.apache.spark.util.collection.SizeTracker$class.takeSample(SizeTracker.scala:78)	at org.apache.spark.util.collection.SizeTracker$class.afterUpdate(SizeTracker.scala:70)	at org.apache.spark.util.collection.SizeTrackingVector.$plus$eq(SizeTrackingVector.scala:31)	at org.apache.spark.storage.MemoryStore.unrollSafely(MemoryStore.scala:278)	at org.apache.spark.CacheManager.putInBlockManager(CacheManager.scala:171)	at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:78)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:262)	at org.apache.spark.rdd.UnionRDD.compute(UnionRDD.scala:87)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:264)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300)	at org.apache.spark.CacheManager.getOrCompute(CacheManager.scala:69)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:262)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:264)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:264)	at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:38)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:300)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:264)