org.apache.spark.SparkException: Exception thrown in awaitResult:

search-hadoop.com | 9 months ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    Spark, mail # user - SPARK SQL and join pipeline issue - 2016-07-25, 12:10

    search-hadoop.com | 9 months ago
    org.apache.spark.SparkException: Exception thrown in awaitResult:
  2. 0

    Spark, mail # user - Re: SPARK Exception thrown in awaitResult - 2016-07-28, 10:06

    search-hadoop.com | 8 months ago
    org.apache.spark.SparkException: Exception thrown in awaitResult:
  3. 0

    Spark, mail # user - SPARK Exception thrown in awaitResult - 2016-07-28, 09:45

    search-hadoop.com | 8 months ago
    org.apache.spark.SparkException: Exception thrown in awaitResult:
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    python+pyspark: error on inner join with multiple column comparison in pyspark

    Stack Overflow | 7 months ago | Satya
    org.apache.spark.SparkException: Exception thrown in awaitResult:
  6. 0

    Pyspark Column.isin() for a large set

    Stack Overflow | 8 months ago | Leonidas
    org.apache.spark.SparkException: Exception thrown in awaitResult:

    1 unregistered visitors

    Root Cause Analysis

    1. org.apache.spark.SparkException

      Exception thrown in awaitResult:

      at org.apache.spark.util.ThreadUtils$.awaitResult()
    2. Spark
      ThreadUtils$.awaitResult
      1. org.apache.spark.util.ThreadUtils$.awaitResult(ThreadUtils.scala:194)
      1 frame
    3. org.apache.spark
      BroadcastExchangeExec.doExecuteBroadcast
      1. org.apache.spark.sql.execution.exchange.BroadcastExchangeExec.doExecuteBroadcast(BroadcastExchangeExec.scala:102)
      1 frame
    4. Spark Project SQL
      SparkPlan$$anonfun$executeQuery$1.apply
      1. org.apache.spark.sql.execution.InputAdapter.doExecuteBroadcast(WholeStageCodegenExec.scala:229)
      2. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:125)
      3. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeBroadcast$1.apply(SparkPlan.scala:125)
      4. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
      4 frames
    5. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      1 frame
    6. Spark Project SQL
      SparkPlan$$anonfun$executeQuery$1.apply
      1. org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
      2. org.apache.spark.sql.execution.SparkPlan.executeBroadcast(SparkPlan.scala:124)
      3. org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.prepareBroadcast(BroadcastHashJoinExec.scala:98)
      4. org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.codegenInner(BroadcastHashJoinExec.scala:197)
      5. org.apache.spark.sql.execution.joins.BroadcastHashJoinExec.doConsume(BroadcastHashJoinExec.scala:82)
      6. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      7. org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:30)
      8. org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:62)
      9. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      10. org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
      11. org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
      12. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      13. org.apache.spark.sql.execution.ExpandExec.consume(ExpandExec.scala:36)
      14. org.apache.spark.sql.execution.ExpandExec.doConsume(ExpandExec.scala:198)
      15. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      16. org.apache.spark.sql.execution.ProjectExec.consume(basicPhysicalOperators.scala:30)
      17. org.apache.spark.sql.execution.ProjectExec.doConsume(basicPhysicalOperators.scala:62)
      18. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      19. org.apache.spark.sql.execution.FilterExec.consume(basicPhysicalOperators.scala:79)
      20. org.apache.spark.sql.execution.FilterExec.doConsume(basicPhysicalOperators.scala:194)
      21. org.apache.spark.sql.execution.CodegenSupport$class.consume(WholeStageCodegenExec.scala:153)
      22. org.apache.spark.sql.execution.RowDataSourceScanExec.consume(ExistingRDD.scala:146)
      23. org.apache.spark.sql.execution.RowDataSourceScanExec.doProduce(ExistingRDD.scala:211)
      24. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
      25. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
      26. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
      26 frames
    7. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      1 frame
    8. Spark Project SQL
      SparkPlan$$anonfun$executeQuery$1.apply
      1. org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
      2. org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
      3. org.apache.spark.sql.execution.RowDataSourceScanExec.produce(ExistingRDD.scala:146)
      4. org.apache.spark.sql.execution.FilterExec.doProduce(basicPhysicalOperators.scala:113)
      5. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
      6. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
      7. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
      7 frames
    9. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      1 frame
    10. Spark Project SQL
      SparkPlan$$anonfun$executeQuery$1.apply
      1. org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
      2. org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
      3. org.apache.spark.sql.execution.FilterExec.produce(basicPhysicalOperators.scala:79)
      4. org.apache.spark.sql.execution.ProjectExec.doProduce(basicPhysicalOperators.scala:40)
      5. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:83)
      6. org.apache.spark.sql.execution.CodegenSupport$$anonfun$produce$1.apply(WholeStageCodegenExec.scala:78)
      7. org.apache.spark.sql.execution.SparkPlan$$anonfun$executeQuery$1.apply(SparkPlan.scala:136)
      7 frames
    11. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:151)
      1 frame
    12. Spark Project SQL
      ExpandExec.doProduce
      1. org.apache.spark.sql.execution.SparkPlan.executeQuery(SparkPlan.scala:133)
      2. org.apache.spark.sql.execution.CodegenSupport$class.produce(WholeStageCodegenExec.scala:78)
      3. org.apache.spark.sql.execution.ProjectExec.produce(basicPhysicalOperators.scala:30)
      4. org.apache.spark.sql.execution.ExpandExec.doProduce(ExpandExec.scala:93)
      4 frames