Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by theMadKing
, 1 year ago
java.lang.OutOfMemoryError: GC overhead limit exceeded	at com.twitter.chill.Tuple4Serializer.read(TupleSerializers.scala:68)	at com.twitter.chill.Tuple4Serializer.read(TupleSerializers.scala:59)	at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:708)	at com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:396)	at com.esotericsoftware.kryo.serializers.DefaultArraySerializers$ObjectArraySerializer.read(DefaultArraySerializers.java:307)	at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:708)	at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:125)	at com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)	at com.esotericsoftware.kryo.Kryo.readClassAndObject(Kryo.java:790)	at org.apache.spark.serializer.KryoDeserializationStream.readObject(KryoSerializer.scala:229)	at org.apache.spark.serializer.DeserializationStream.readValue(Serializer.scala:159)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$DiskMapIterator.readNextItem(ExternalAppendOnlyMap.scala:515)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$DiskMapIterator.hasNext(ExternalAppendOnlyMap.scala:535)	at scala.collection.Iterator$$anon$1.hasNext(Iterator.scala:1004)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$ExternalIterator.org$apache$spark$util$collection$ExternalAppendOnlyMap$ExternalIterator$$readNextHashCode(ExternalAppendOnlyMap.scala:332)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$ExternalIterator$$anonfun$5.apply(ExternalAppendOnlyMap.scala:316)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$ExternalIterator$$anonfun$5.apply(ExternalAppendOnlyMap.scala:314)	at scala.collection.immutable.List.foreach(List.scala:381)	at org.apache.spark.util.collection.ExternalAppendOnlyMap$ExternalIterator.(ExternalAppendOnlyMap.scala:314)	at org.apache.spark.util.collection.ExternalAppendOnlyMap.iterator(ExternalAppendOnlyMap.scala:288)	at org.apache.spark.Aggregator.combineValuesByKey(Aggregator.scala:43)	at org.apache.spark.shuffle.BlockStoreShuffleReader.read(BlockStoreShuffleReader.scala:91)	at org.apache.spark.rdd.ShuffledRDD.compute(ShuffledRDD.scala:109)	at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:319)	at org.apache.spark.rdd.RDD.iterator(RDD.scala:283)	at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:70)	at org.apache.spark.scheduler.Task.run(Task.scala:85)	at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:274)	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)	at java.lang.Thread.run(Thread.java:745)