org.bukkit.event.EventException

GitHub | L4BORG | 2 months ago
  1. 0

    GitHub comment 843#252642509

    GitHub | 2 months ago | L4BORG
    org.bukkit.event.EventException
  2. 0

    Graylog2-server 0.90.3 keeps crashing

    GitHub | 2 years ago | neilferreira
    com.google.common.util.concurrent.ExecutionError: java.lang.OutOfMemoryError: Java heap space
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    The following code that works with Spark 1.4.1 and respective connector version does not work with Spark 1.5.2: {code} import com.datastax.spark.connector._ import org.apache.spark.sql.cassandra.CassandraSQLContext val csc = new CassandraSQLContext(sc) csc.setKeyspace("mykeyspace") val result = csc.sql("SELECT COUNT(*) FROM mytable") {code} Exception: {code} com.google.common.util.concurrent.ExecutionError: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199) at com.google.common.cache.LocalCache.get(LocalCache.java:3934) at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3938) at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4821) at org.apache.spark.sql.cassandra.CassandraCatalog.lookupRelation(CassandraCatalog.scala:34) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:257) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:268) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:264) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:56) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:249) ... Caused by: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V at org.apache.spark.sql.cassandra.CassandraCatalog.org$apache$spark$sql$cassandra$CassandraCatalog$$buildRelation(CassandraCatalog.scala:43) at org.apache.spark.sql.cassandra.CassandraCatalog$$anon$1.load(CassandraCatalog.scala:26) at org.apache.spark.sql.cassandra.CassandraCatalog$$anon$1.load(CassandraCatalog.scala:23) at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3524) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2317) at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2280) at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2195) ... 92 more {code} Used Versions: Scala: 2.10 Spark Version: 1.5.2 Connector Package: 1.5.0-RC1-s_2.10 and 1.5.0-M3-s_2.10

    DataStax JIRA | 11 months ago | Philip Stroh
    com.google.common.util.concurrent.ExecutionError: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V
  5. 0

    The following code that works with Spark 1.4.1 and respective connector version does not work with Spark 1.5.2: {code} import com.datastax.spark.connector._ import org.apache.spark.sql.cassandra.CassandraSQLContext val csc = new CassandraSQLContext(sc) csc.setKeyspace("mykeyspace") val result = csc.sql("SELECT COUNT(*) FROM mytable") {code} Exception: {code} com.google.common.util.concurrent.ExecutionError: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199) at com.google.common.cache.LocalCache.get(LocalCache.java:3934) at com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3938) at com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4821) at org.apache.spark.sql.cassandra.CassandraCatalog.lookupRelation(CassandraCatalog.scala:34) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:257) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:268) at org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:264) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolveOperators$1.apply(LogicalPlan.scala:57) at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolveOperators(LogicalPlan.scala:56) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54) at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$1.apply(LogicalPlan.scala:54) at org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:249) ... Caused by: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V at org.apache.spark.sql.cassandra.CassandraCatalog.org$apache$spark$sql$cassandra$CassandraCatalog$$buildRelation(CassandraCatalog.scala:43) at org.apache.spark.sql.cassandra.CassandraCatalog$$anon$1.load(CassandraCatalog.scala:26) at org.apache.spark.sql.cassandra.CassandraCatalog$$anon$1.load(CassandraCatalog.scala:23) at com.google.common.cache.LocalCache$LoadingValueReference.loadFuture(LocalCache.java:3524) at com.google.common.cache.LocalCache$Segment.loadSync(LocalCache.java:2317) at com.google.common.cache.LocalCache$Segment.lockedGetOrLoad(LocalCache.java:2280) at com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2195) ... 92 more {code} Used Versions: Scala: 2.10 Spark Version: 1.5.2 Connector Package: 1.5.0-RC1-s_2.10 and 1.5.0-M3-s_2.10

    DataStax JIRA | 11 months ago | Philip Stroh
    com.google.common.util.concurrent.ExecutionError: java.lang.NoSuchMethodError: org.apache.spark.sql.execution.datasources.LogicalRelation.<init>(Lorg/apache/spark/sql/sources/BaseRelation;)V

  1. poroszd 2 times, last 3 months ago
  2. rp 5 times, last 8 months ago
3 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. com.google.common.util.concurrent.ExecutionError

    java.lang.StackOverflowError

    at com.google.common.cache.LocalCache$Segment.get()
  2. Guava
    LocalCache$LocalLoadingCache.get
    1. com.google.common.cache.LocalCache$Segment.get(LocalCache.java:2199)[patched.jar:git-TacoSpigot-"65fd35f"]
    2. com.google.common.cache.LocalCache.get(LocalCache.java:3934)[patched.jar:git-TacoSpigot-"65fd35f"]
    3. com.google.common.cache.LocalCache.getOrLoad(LocalCache.java:3938)[patched.jar:git-TacoSpigot-"65fd35f"]
    4. com.google.common.cache.LocalCache$LocalLoadingCache.get(LocalCache.java:4821)[patched.jar:git-TacoSpigot-"65fd35f"]
    4 frames
  3. com.earth2me.essentials
    Economy.add
    1. com.earth2me.essentials.UserMap.getUser(UserMap.java:111)[?:?]
    2. com.earth2me.essentials.UserMap.getUser(UserMap.java:92)[?:?]
    3. com.earth2me.essentials.Essentials.getOfflineUser(Essentials.java:585)[?:?]
    4. com.earth2me.essentials.Essentials.getUser(Essentials.java:573)[?:?]
    5. com.earth2me.essentials.api.Economy.getUserByName(Economy.java:67)[?:?]
    6. com.earth2me.essentials.api.Economy.getMoneyExact(Economy.java:85)[?:?]
    7. com.earth2me.essentials.api.Economy.add(Economy.java:148)[?:?]
    8. com.earth2me.essentials.api.Economy.add(Economy.java:141)[?:?]
    8 frames
  4. net.milkbowl.vault
    Economy_Essentials.depositPlayer
    1. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:146)[?:?]
    2. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    3. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    4. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    5. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    6. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    7. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    8. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    9. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    10. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    11. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    12. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    13. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    14. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    15. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    16. net.milkbowl.vault.economy.plugins.Economy_Essentials.depositPlayer(Economy_Essentials.java:151)[?:?]
    16 frames