java.io.IOException: FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####))

JIRA | Calvin Jia | 2 years ago
tip
Do you know that we can give you better hits? Get more relevant results from Samebug’s stack trace search.
  1. 0

    {code:java} java.io.IOException: FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####)) at tachyon.worker.WorkerClient.requestBlockLocation(WorkerClient.java:378) at tachyon.client.TachyonFS.getLocalBlockTemporaryPath(TachyonFS.java:633) at tachyon.client.BlockOutStream.<init>(BlockOutStream.java:96) at tachyon.client.BlockOutStream.<init>(BlockOutStream.java:65) at tachyon.client.RemoteBlockInStream.<init>(RemoteBlockInStream.java:128) at tachyon.client.BlockInStream.get(BlockInStream.java:62) at tachyon.client.FileInStream.seek(FileInStream.java:157) at tachyon.hadoop.HdfsFileInputStream.seek(HdfsFileInputStream.java:244) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:48) at org.apache.hadoop.mapred.LineRecordReader.<init>(LineRecordReader.java:103) at org.apache.hadoop.mapred.TextInputFormat.getRecordReader(TextInputFormat.java:54) at org.apache.spark.rdd.HadoopRDD$$anon$1.<init>(HadoopRDD.scala:236) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:212) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:101) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:64) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) {code}

    JIRA | 2 years ago | Calvin Jia
    java.io.IOException: FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####))
  2. 0

    {code:java} java.io.IOException: FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####)) at tachyon.worker.WorkerClient.requestBlockLocation(WorkerClient.java:378) at tachyon.client.TachyonFS.getLocalBlockTemporaryPath(TachyonFS.java:633) at tachyon.client.BlockOutStream.<init>(BlockOutStream.java:96) at tachyon.client.BlockOutStream.<init>(BlockOutStream.java:65) at tachyon.client.RemoteBlockInStream.<init>(RemoteBlockInStream.java:128) at tachyon.client.BlockInStream.get(BlockInStream.java:62) at tachyon.client.FileInStream.seek(FileInStream.java:157) at tachyon.hadoop.HdfsFileInputStream.seek(HdfsFileInputStream.java:244) at org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:48) at org.apache.hadoop.mapred.LineRecordReader.<init>(LineRecordReader.java:103) at org.apache.hadoop.mapred.TextInputFormat.getRecordReader(TextInputFormat.java:54) at org.apache.spark.rdd.HadoopRDD$$anon$1.<init>(HadoopRDD.scala:236) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:212) at org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:101) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35) at org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277) at org.apache.spark.rdd.RDD.iterator(RDD.scala:244) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68) at org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41) at org.apache.spark.scheduler.Task.run(Task.scala:64) at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:745) {code}

    JIRA | 2 years ago | Calvin Jia
    java.io.IOException: FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####))
  3. 0

    ADB Connection Error

    Stack Overflow | 4 years ago | Raveesh Lawrance
    java.io.IOException: An existing connection was forcibly closed by the remote host at sun.nio.ch.SocketDispatcher.read0(Native Method) at sun.nio.ch.SocketDispatcher.read(Unknown Source) at sun.nio.ch.IOUtil.readIntoNativeBuffer(Unknown Source) at sun.nio.ch.IOUtil.read(Unknown Source) at sun.nio.ch.SocketChannelImpl.read(Unknown Source) at com.android.ddmlib.AdbHelper.executeRemoteCommand(AdbHelper.java:395) at com.android.ddmlib.Device.executeShellCommand(Device.java:462) at com.android.ddmuilib.logcat.LogCatReceiver$1.run(LogCatReceiver.java:110)
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    java.lang.IllegalArgumentException: URI scheme is not "file"

    java-forums.org | 9 months ago
    java.io.IOException: Server returned HTTP response code: 403 for URL: at sun.net. at slideshowapplet.SlideshowApplet.doListofImagefiles (SlideshowApplet.java:461) at slideshowapplet.SlideshowApplet.init(SlideshowAppl et.java:180) at sun.plugin2.applet.Plugin2Manager$AppletExecutionR unnable.run(Plugin2Manager.java:1658)
  6. 0

    java.lang.IllegalArgumentException: URI scheme is not "file"

    java-forums.org | 9 months ago
    java.io.IOException: Server returned HTTP response code: 403 for URL: at sun.net. at slideshowapplet.SlideshowApplet.doListofImagefiles (SlideshowApplet.java:484) at slideshowapplet.SlideshowApplet.init(SlideshowAppl et.java:182) at sun.plugin2.applet.Plugin2Manager$AppletExecutionR unnable.run(Plugin2Manager.java:1658)

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.io.IOException

      FileAlreadyExistException(message:Block file is being written! userId(##) blockId(####))

      at tachyon.worker.WorkerClient.requestBlockLocation()
    2. Tachyon Project Core
      HdfsFileInputStream.seek
      1. tachyon.worker.WorkerClient.requestBlockLocation(WorkerClient.java:378)
      2. tachyon.client.TachyonFS.getLocalBlockTemporaryPath(TachyonFS.java:633)
      3. tachyon.client.BlockOutStream.<init>(BlockOutStream.java:96)
      4. tachyon.client.BlockOutStream.<init>(BlockOutStream.java:65)
      5. tachyon.client.RemoteBlockInStream.<init>(RemoteBlockInStream.java:128)
      6. tachyon.client.BlockInStream.get(BlockInStream.java:62)
      7. tachyon.client.FileInStream.seek(FileInStream.java:157)
      8. tachyon.hadoop.HdfsFileInputStream.seek(HdfsFileInputStream.java:244)
      8 frames
    3. Hadoop
      FSDataInputStream.seek
      1. org.apache.hadoop.fs.FSDataInputStream.seek(FSDataInputStream.java:48)
      1 frame
    4. Hadoop
      TextInputFormat.getRecordReader
      1. org.apache.hadoop.mapred.LineRecordReader.<init>(LineRecordReader.java:103)
      2. org.apache.hadoop.mapred.TextInputFormat.getRecordReader(TextInputFormat.java:54)
      2 frames
    5. Spark
      Executor$TaskRunner.run
      1. org.apache.spark.rdd.HadoopRDD$$anon$1.<init>(HadoopRDD.scala:236)
      2. org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:212)
      3. org.apache.spark.rdd.HadoopRDD.compute(HadoopRDD.scala:101)
      4. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
      5. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
      6. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
      7. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
      8. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
      9. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
      10. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
      11. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
      12. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
      13. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
      14. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
      15. org.apache.spark.rdd.MapPartitionsRDD.compute(MapPartitionsRDD.scala:35)
      16. org.apache.spark.rdd.RDD.computeOrReadCheckpoint(RDD.scala:277)
      17. org.apache.spark.rdd.RDD.iterator(RDD.scala:244)
      18. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:68)
      19. org.apache.spark.scheduler.ShuffleMapTask.runTask(ShuffleMapTask.scala:41)
      20. org.apache.spark.scheduler.Task.run(Task.scala:64)
      21. org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:203)
      21 frames
    6. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames