java.lang.ExceptionInInitializerError

GitHub | a4712020502 | 5 months ago
  1. 0
    This happened when I tried to insert an empty list of elements to a collection.
  2. 0

    I flagged this as 1.1.0 and 1.0.1 without checking the latter. Link to build, command, and stacktrace follow. {{MutableHashTable.initTable}} is calling {{initBloomFilter}} when {{this.availableMemory.size()==0}}. https://s3.amazonaws.com/apache-flink/flink_bloomfilter_crash.tar.bz2 ./bin/flink run -class org.apache.flink.graph.examples.TriangleListing ~/flink-gelly-examples_2.10-1.1-SNAPSHOT.jar --clip_and_flip false --output print --output print --scale 14 --count {code} org.apache.flink.client.program.ProgramInvocationException: The program execution failed: Job execution failed. at org.apache.flink.client.program.Client.runBlocking(Client.java:381) at org.apache.flink.client.program.Client.runBlocking(Client.java:355) at org.apache.flink.client.program.Client.runBlocking(Client.java:315) at org.apache.flink.client.program.ContextEnvironment.execute(ContextEnvironment.java:61) at org.apache.flink.api.java.ExecutionEnvironment.execute(ExecutionEnvironment.java:898) at org.apache.flink.api.java.DataSet.collect(DataSet.java:410) at org.apache.flink.api.java.DataSet.print(DataSet.java:1605) at org.apache.flink.graph.examples.TriangleListing.main(TriangleListing.java:106) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.flink.client.program.PackagedProgram.callMainMethod(PackagedProgram.java:505) at org.apache.flink.client.program.PackagedProgram.invokeInteractiveModeForExecution(PackagedProgram.java:403) at org.apache.flink.client.program.Client.runBlocking(Client.java:248) at org.apache.flink.client.CliFrontend.executeProgramBlocking(CliFrontend.java:860) at org.apache.flink.client.CliFrontend.run(CliFrontend.java:327) at org.apache.flink.client.CliFrontend.parseParameters(CliFrontend.java:1187) at org.apache.flink.client.CliFrontend.main(CliFrontend.java:1238) Caused by: org.apache.flink.runtime.client.JobExecutionException: Job execution failed. at org.apache.flink.runtime.jobmanager.JobManager$$anonfun$handleMessage$1$$anonfun$applyOrElse$7.apply$mcV$sp(JobManager.scala:805) at org.apache.flink.runtime.jobmanager.JobManager$$anonfun$handleMessage$1$$anonfun$applyOrElse$7.apply(JobManager.scala:751) at org.apache.flink.runtime.jobmanager.JobManager$$anonfun$handleMessage$1$$anonfun$applyOrElse$7.apply(JobManager.scala:751) at scala.concurrent.impl.Future$PromiseCompletingRunnable.liftedTree1$1(Future.scala:24) at scala.concurrent.impl.Future$PromiseCompletingRunnable.run(Future.scala:24) at akka.dispatch.TaskInvocation.run(AbstractDispatcher.scala:41) at akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:401) at scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.pollAndExecAll(ForkJoinPool.java:1253) at scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1346) at scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) at scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) Caused by: java.lang.IllegalArgumentException: expectedEntries should be > 0 at org.apache.flink.shaded.com.google.common.base.Preconditions.checkArgument(Preconditions.java:88) at org.apache.flink.runtime.operators.util.BloomFilter.<init>(BloomFilter.java:53) at org.apache.flink.runtime.operators.hash.MutableHashTable.initBloomFilter(MutableHashTable.java:823) at org.apache.flink.runtime.operators.hash.MutableHashTable.initTable(MutableHashTable.java:1183) at org.apache.flink.runtime.operators.hash.MutableHashTable.buildTableFromSpilledPartition(MutableHashTable.java:887) at org.apache.flink.runtime.operators.hash.MutableHashTable.prepareNextPartition(MutableHashTable.java:631) at org.apache.flink.runtime.operators.hash.MutableHashTable.nextRecord(MutableHashTable.java:666) at org.apache.flink.runtime.operators.hash.NonReusingBuildFirstHashJoinIterator.callWithNextKey(NonReusingBuildFirstHashJoinIterator.java:116) at org.apache.flink.runtime.operators.JoinDriver.run(JoinDriver.java:216) at org.apache.flink.runtime.operators.BatchTask.run(BatchTask.java:480) at org.apache.flink.runtime.operators.BatchTask.invoke(BatchTask.java:345) at org.apache.flink.runtime.taskmanager.Task.run(Task.java:579) at java.lang.Thread.run(Thread.java:745) {code}

    Apache's JIRA Issue Tracker | 7 months ago | Greg Hogan
    org.apache.flink.client.program.ProgramInvocationException: The program execution failed: Job execution failed.
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    Example stack trace from GH test: {code} java.lang.IllegalArgumentException: defaultTimeout is <0L> should be > 0 at com.google.common.base.Preconditions.checkArgument(Preconditions.java:84) at com.atlassian.pageobjects.elements.query.AbstractPollingQuery.<init>(AbstractPollingQuery.java:21) at com.atlassian.pageobjects.elements.query.AbstractTimedQuery.<init>(AbstractTimedQuery.java:42) at com.atlassian.pageobjects.elements.query.AbstractTimedQuery.<init>(AbstractTimedQuery.java:50) at com.atlassian.pageobjects.elements.WebDriverLocators$WebDriverListLocator$1.<init>(WebDriverLocators.java:227) at com.atlassian.pageobjects.elements.WebDriverLocators$WebDriverListLocator.queryForElement(WebDriverLocators.java:226) at com.atlassian.pageobjects.elements.WebDriverLocators$WebDriverListLocator.waitUntilLocated(WebDriverLocators.java:210) at com.atlassian.pageobjects.elements.query.webdriver.WebDriverLocatableBasedTimedQuery$LocatableBasedSupplier.get(WebDriverLocatableBasedTimedQuery.java:92) at com.atlassian.pageobjects.elements.query.webdriver.GenericWebDriverTimedQuery.currentValue(GenericWebDriverTimedQuery.java:69) at com.atlassian.pageobjects.elements.query.AbstractTimedQuery.now(AbstractTimedQuery.java:95) at com.atlassian.pageobjects.elements.query.Conditions$MatchingCondition.currentValue(Conditions.java:465) at com.atlassian.pageobjects.elements.query.Conditions$MatchingCondition.currentValue(Conditions.java:448) at com.atlassian.pageobjects.elements.query.AbstractTimedQuery.by(AbstractTimedQuery.java:66) at com.atlassian.pageobjects.elements.query.AbstractTimedQuery.byDefaultTimeout(AbstractTimedQuery.java:90) at com.atlassian.pageobjects.elements.query.Poller$2.evaluate(Poller.java:261) at com.atlassian.pageobjects.elements.query.Poller.waitUntil(Poller.java:192) at com.atlassian.pageobjects.elements.query.Poller.waitUntil(Poller.java:112) at com.atlassian.pageobjects.elements.query.Poller.waitUntilTrue(Poller.java:34) at com.atlassian.webdriver.greenhopper.component.rapid.board.QuickFilter.toggle(QuickFilter.java:45) at it.com.atlassian.greenhopper.rapid.board.PoolTest.testQuickFilterToggling(PoolTest.java:127) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25) at java.lang.reflect.Method.invoke(Method.java:597) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:44) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:15) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:41) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:20) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:28) at com.atlassian.integrationtesting.runner.CompositeTestRunner$1.evaluate(CompositeTestRunner.java:161) at com.atlassian.integrationtesting.runner.CompositeTestRunner$2.evaluate(CompositeTestRunner.java:177) at org.junit.runners.BlockJUnit4ClassRunner.runNotIgnored(BlockJUnit4ClassRunner.java:79) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:71) at com.atlassian.integrationtesting.runner.CompositeTestRunner.runChild(CompositeTestRunner.java:143) at com.atlassian.integrationtesting.runner.CompositeTestRunner.runChild(CompositeTestRunner.java:92) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:193) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:52) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:191) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:42) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:184) at com.atlassian.integrationtesting.runner.CompositeTestRunner$3.evaluate(CompositeTestRunner.java:197) at com.atlassian.integrationtesting.runner.CompositeTestRunner$4.evaluate(CompositeTestRunner.java:213) at org.junit.runners.ParentRunner.run(ParentRunner.java:236) at com.atlassian.integrationtesting.runner.CompositeTestRunner.run(CompositeTestRunner.java:124) at org.eclipse.jdt.internal.junit4.runner.JUnit4TestReference.run(JUnit4TestReference.java:49) at org.eclipse.jdt.internal.junit.runner.TestExecution.run(TestExecution.java:38) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:467) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.runTests(RemoteTestRunner.java:683) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.run(RemoteTestRunner.java:390) at org.eclipse.jdt.internal.junit.runner.RemoteTestRunner.main(RemoteTestRunner.java:197) {code} This only happens when using page elements retrieved via findAll (i.e. locatable by list locator)

    Ecosystem JIRA | 5 years ago | Dariusz Kordonski
    java.lang.IllegalArgumentException: defaultTimeout is <0L> should be > 0
  5. 0

    Server crashes "Exception in server tick loop" | Bukkit Forums

    bukkit.org | 1 year ago
    java.lang.IllegalArgumentException: ChunkNibbleArrays should be 2048 bytes not: 0

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.IllegalArgumentException

      state should be: w >= 0

      at com.mongodb.assertions.Assertions.isTrueArgument()
    2. MongoDB Java Driver
      WriteConcern.<init>
      1. com.mongodb.assertions.Assertions.isTrueArgument(Assertions.java:99)
      2. com.mongodb.WriteConcern.<init>(WriteConcern.java:316)
      3. com.mongodb.WriteConcern.<init>(WriteConcern.java:227)
      3 frames
    3. casbah-core
      MongoClient.apply
      1. com.mongodb.casbah.WriteConcern$.<init>(WriteConcern.scala:41)
      2. com.mongodb.casbah.WriteConcern$.<clinit>(WriteConcern.scala)
      3. com.mongodb.casbah.BaseImports$class.$init$(Implicits.scala:162)
      4. com.mongodb.casbah.Imports$.<init>(Implicits.scala:142)
      5. com.mongodb.casbah.Imports$.<clinit>(Implicits.scala)
      6. com.mongodb.casbah.MongoClient.apply(MongoClient.scala:219)
      6 frames
    4. com.stratio.datasource
      MongodbRDD.getPartitions
      1. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner.isShardedCollection(MongodbPartitioner.scala:78)
      2. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner$$anonfun$computePartitions$1.apply(MongodbPartitioner.scala:67)
      3. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner$$anonfun$computePartitions$1.apply(MongodbPartitioner.scala:66)
      4. com.stratio.datasource.util.using$.apply(using.scala:38)
      5. com.stratio.datasource.mongodb.partitioner.MongodbPartitioner.computePartitions(MongodbPartitioner.scala:66)
      6. com.stratio.datasource.mongodb.rdd.MongodbRDD.getPartitions(MongodbRDD.scala:42)
      6 frames
    5. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      2. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      2 frames
    6. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    7. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    8. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    9. Spark
      PairRDDFunctions.reduceByKey
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.Partitioner$.defaultPartitioner(Partitioner.scala:65)
      3. org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:331)
      4. org.apache.spark.rdd.PairRDDFunctions$$anonfun$reduceByKey$3.apply(PairRDDFunctions.scala:331)
      5. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
      6. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:111)
      7. org.apache.spark.rdd.RDD.withScope(RDD.scala:316)
      8. org.apache.spark.rdd.PairRDDFunctions.reduceByKey(PairRDDFunctions.scala:330)
      8 frames
    10. com.stratio.datasource
      MongodbRelation$$anonfun$1.apply
      1. com.stratio.datasource.mongodb.schema.MongodbSchema.schema(MongodbSchema.scala:47)
      2. com.stratio.datasource.mongodb.MongodbRelation.com$stratio$datasource$mongodb$MongodbRelation$$lazySchema$lzycompute(MongodbRelation.scala:63)
      3. com.stratio.datasource.mongodb.MongodbRelation.com$stratio$datasource$mongodb$MongodbRelation$$lazySchema(MongodbRelation.scala:60)
      4. com.stratio.datasource.mongodb.MongodbRelation$$anonfun$1.apply(MongodbRelation.scala:65)
      5. com.stratio.datasource.mongodb.MongodbRelation$$anonfun$1.apply(MongodbRelation.scala:65)
      5 frames
    11. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    12. com.stratio.datasource
      DefaultSource.createRelation
      1. com.stratio.datasource.mongodb.MongodbRelation.<init>(MongodbRelation.scala:65)
      2. com.stratio.datasource.mongodb.DefaultSource.createRelation(DefaultSource.scala:36)
      2 frames
    13. org.apache.spark
      ResolvedDataSource$.apply
      1. org.apache.spark.sql.execution.datasources.ResolvedDataSource$.apply(ResolvedDataSource.scala:158)
      1 frame
    14. Spark Project SQL
      DataFrameReader.load
      1. org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:119)
      1 frame
    15. com.askingdata.test
      TestSpark.main
      1. com.askingdata.test.TestSpark.main(TestSpark.java:23)
      1 frame