java.lang.OutOfMemoryError

There are no available Samebug tips for this exception. Do you have an idea how to solve this issue? A short tip would help users who saw this issue last week.

  • java.lang.OutOfMemoryError: GC overhead limit exceeded at htsjdk.samtools.BinaryTagCodec.readTags(BinaryTagCodec.java:282) at htsjdk.samtools.BAMRecord.decodeAttributes(BAMRecord.java:308) at htsjdk.samtools.BAMRecord.getAttribute(BAMRecord.java:288) at htsjdk.samtools.SAMRecord.getReadGroup(SAMRecord.java:691) at org.hammerlab.guacamole.reads.Read$.apply(Read.scala:77) at org.hammerlab.guacamole.readsets.ReadSets$$anonfun$8.apply(ReadSets.scala:276) at org.hammerlab.guacamole.readsets.ReadSets$$anonfun$8.apply(ReadSets.scala:266) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at scala.collection.Iterator$class.toStream(Iterator.scala:1143) at scala.collection.AbstractIterator.toStream(Iterator.scala:1157) at scala.collection.Iterator$$anonfun$toStream$1.apply(Iterator.scala:1143) at scala.collection.Iterator$$anonfun$toStream$1.apply(Iterator.scala:1143) at scala.collection.immutable.Stream$Cons.tail(Stream.scala:1085) at scala.collection.immutable.Stream$Cons.tail(Stream.scala:1077) at scala.collection.immutable.Stream.length(Stream.scala:284) at scala.collection.SeqLike$class.size(SeqLike.scala:106) at scala.collection.AbstractSeq.size(Seq.scala:40) at scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:248) at scala.collection.AbstractTraversable.toArray(Traversable.scala:105) at org.apache.spark.rdd.ParallelCollectionRDD$.slice(ParallelCollectionRDD.scala:154) at org.apache.spark.rdd.ParallelCollectionRDD.getPartitions(ParallelCollectionRDD.scala:97) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) at scala.Option.getOrElse(Option.scala:120) at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237) at scala.Option.getOrElse(Option.scala:120) at org.apache.spark.rdd.RDD.partitions(RDD.scala:237) at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
No Bugmate found.