java.lang.OutOfMemoryError: GC overhead limit exceeded

GitHub | car2008 | 8 months ago
tip
Click on the to mark the solution that helps you, Samebug will learn from it.
As a community member, you’ll be rewarded for you help.
  1. 0

    GitHub comment 572#246607309

    GitHub | 8 months ago | car2008
    java.lang.OutOfMemoryError: GC overhead limit exceeded
  2. Speed up your debug routine!

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.OutOfMemoryError

      GC overhead limit exceeded

      at htsjdk.samtools.BinaryTagCodec.readTags()
    2. HTS JDK
      SAMRecord.getReadGroup
      1. htsjdk.samtools.BinaryTagCodec.readTags(BinaryTagCodec.java:282)
      2. htsjdk.samtools.BAMRecord.decodeAttributes(BAMRecord.java:308)
      3. htsjdk.samtools.BAMRecord.getAttribute(BAMRecord.java:288)
      4. htsjdk.samtools.SAMRecord.getReadGroup(SAMRecord.java:691)
      4 frames
    3. org.hammerlab.guacamole
      ReadSets$$anonfun$8.apply
      1. org.hammerlab.guacamole.reads.Read$.apply(Read.scala:77)
      2. org.hammerlab.guacamole.readsets.ReadSets$$anonfun$8.apply(ReadSets.scala:276)
      3. org.hammerlab.guacamole.readsets.ReadSets$$anonfun$8.apply(ReadSets.scala:266)
      3 frames
    4. Scala
      AbstractTraversable.toArray
      1. scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
      2. scala.collection.Iterator$class.toStream(Iterator.scala:1143)
      3. scala.collection.AbstractIterator.toStream(Iterator.scala:1157)
      4. scala.collection.Iterator$$anonfun$toStream$1.apply(Iterator.scala:1143)
      5. scala.collection.Iterator$$anonfun$toStream$1.apply(Iterator.scala:1143)
      6. scala.collection.immutable.Stream$Cons.tail(Stream.scala:1085)
      7. scala.collection.immutable.Stream$Cons.tail(Stream.scala:1077)
      8. scala.collection.immutable.Stream.length(Stream.scala:284)
      9. scala.collection.SeqLike$class.size(SeqLike.scala:106)
      10. scala.collection.AbstractSeq.size(Seq.scala:40)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:248)
      12. scala.collection.AbstractTraversable.toArray(Traversable.scala:105)
      12 frames
    5. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.ParallelCollectionRDD$.slice(ParallelCollectionRDD.scala:154)
      2. org.apache.spark.rdd.ParallelCollectionRDD.getPartitions(ParallelCollectionRDD.scala:97)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    6. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    7. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    8. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    9. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      3 frames