java.lang.Error: java.lang.InterruptedException

GitHub | elyast | 1 year ago
  1. 0

    InterruptedException in starter-meta

    GitHub | 2 years ago | knizhnik
    java.lang.InterruptedException
  2. 0

    [Essentials-Protect] java.sql.SQLException when using Sqlite in Essentials config.yml

    GitHub | 4 years ago | eagl3s1ght
    java.sql.SQLException: An SQLException was provoked by the following failure: java.lang.InterruptedException
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    copy-to-slave never finishes

    Stack Overflow | 3 years ago | attrib
    java.io.InterruptedIOException

  1. treefolk 1 times, last 2 weeks ago
  2. danleyb2Interintel 1 times, last 2 weeks ago
  3. filpgame 1 times, last 2 months ago
  4. Nikolay Rybak 4 times, last 4 months ago
  5. Handemelindo 1 times, last 4 months ago
6 more registered users
18 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.InterruptedException

    No message provided

    at java.lang.Object.wait()
  2. Java RT
    Object.wait
    1. java.lang.Object.wait(Native Method)
    2. java.lang.Object.wait(Object.java:503)
    2 frames
  3. Spark
    RDD.saveAsTextFile
    1. org.apache.spark.scheduler.JobWaiter.awaitResult(JobWaiter.scala:73)
    2. org.apache.spark.scheduler.DAGScheduler.runJob(DAGScheduler.scala:559)
    3. org.apache.spark.SparkContext.runJob(SparkContext.scala:1822)
    4. org.apache.spark.SparkContext.runJob(SparkContext.scala:1835)
    5. org.apache.spark.SparkContext.runJob(SparkContext.scala:1912)
    6. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply$mcV$sp(PairRDDFunctions.scala:1124)
    7. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1065)
    8. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopDataset$1.apply(PairRDDFunctions.scala:1065)
    9. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    10. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    11. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    12. org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopDataset(PairRDDFunctions.scala:1065)
    13. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply$mcV$sp(PairRDDFunctions.scala:989)
    14. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:965)
    15. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$4.apply(PairRDDFunctions.scala:965)
    16. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    17. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    18. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    19. org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:965)
    20. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$3.apply$mcV$sp(PairRDDFunctions.scala:951)
    21. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$3.apply(PairRDDFunctions.scala:951)
    22. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$3.apply(PairRDDFunctions.scala:951)
    23. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    24. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    25. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    26. org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:950)
    27. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$2.apply$mcV$sp(PairRDDFunctions.scala:909)
    28. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$2.apply(PairRDDFunctions.scala:907)
    29. org.apache.spark.rdd.PairRDDFunctions$$anonfun$saveAsHadoopFile$2.apply(PairRDDFunctions.scala:907)
    30. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    31. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    32. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    33. org.apache.spark.rdd.PairRDDFunctions.saveAsHadoopFile(PairRDDFunctions.scala:907)
    34. org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$2.apply$mcV$sp(RDD.scala:1444)
    35. org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$2.apply(RDD.scala:1432)
    36. org.apache.spark.rdd.RDD$$anonfun$saveAsTextFile$2.apply(RDD.scala:1432)
    37. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    38. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    39. org.apache.spark.rdd.RDD.withScope(RDD.scala:306)
    40. org.apache.spark.rdd.RDD.saveAsTextFile(RDD.scala:1432)
    40 frames
  4. com.nokia.ph
    StreamToFileDumper$$anonfun$process$1.apply
    1. com.nokia.ph.kinesis2s3.spark.StreamToFileDumper.save(StreamToFileDumper.scala:45)
    2. com.nokia.ph.kinesis2s3.spark.StreamToFileDumper$$anonfun$process$1.apply(StreamToFileDumper.scala:57)
    3. com.nokia.ph.kinesis2s3.spark.StreamToFileDumper$$anonfun$process$1.apply(StreamToFileDumper.scala:57)
    3 frames
  5. Spark Project Streaming
    ForEachDStream$$anonfun$1.apply
    1. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:42)
    2. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:40)
    3. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:40)
    4. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:399)
    5. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:40)
    6. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
    7. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:40)
    7 frames
  6. Scala
    Try$.apply
    1. scala.util.Try$.apply(Try.scala:161)
    1 frame
  7. Spark Project Streaming
    JobScheduler$JobHandler$$anonfun$run$1.apply
    1. org.apache.spark.streaming.scheduler.Job.run(Job.scala:34)
    2. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:218)
    3. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:218)
    4. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:218)
    4 frames
  8. Scala
    DynamicVariable.withValue
    1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
    1 frame
  9. Spark Project Streaming
    JobScheduler$JobHandler.run
    1. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:217)
    1 frame
  10. Java RT
    Thread.run
    1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    3. java.lang.Thread.run(Thread.java:745)
    3 frames