java.lang.IllegalArgumentException: orcFileOperator: path hdfs://dobbindata/apps/hive/warehouse/tbl does not have valid orc files matching the pattern

hortonworks.com | 2 months ago
  1. 0

    Spark SQL fails on empty ORC table, HDP 2.4.2 - Hortonworks

    hortonworks.com | 2 months ago
    java.lang.IllegalArgumentException: orcFileOperator: path hdfs://dobbindata/apps/hive/warehouse/tbl does not have valid orc files matching the pattern
  2. 0

    Import data from hdfs

    Google Groups | 2 years ago | Thiago Henrique dos Santos Bento
    java.lang.reflect.InvocationTargetException
  3. 0

    eco-release-metadata/RELEASENOTES.1.2.0.md at master · aw-was-here/eco-release-metadata · GitHub

    github.com | 3 months ago
    java.lang.IllegalArgumentException: Could not find Parquet metadata at path file:/user/hive/warehouse/test_parquet
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Would like a way to add dependencies outside of the capsule root

    GitHub | 2 years ago | yrro
    java.lang.RuntimeException: Could not resolve item /etc/hive/conf
  6. 0

    using amazon s3 as input,output and to store intermediate results in EMR map reduce job

    Stack Overflow | 4 years ago | Timnit Gebru
    java.lang.IllegalArgumentException: This file system object (hdfs://10.254.37.109:9000) does not support access to the request path 's3n://energydata/input/centers_200_10k_norm.csv' You possibly called FileSystem.get(conf) when you should have called FileSystem.get(uri, conf) to obtain a file system supporting your path.

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.IllegalArgumentException

      orcFileOperator: path hdfs://dobbindata/apps/hive/warehouse/tbl does not have valid orc files matching the pattern

      at org.apache.spark.sql.hive.orc.OrcFileOperator$.listOrcFiles()
    2. org.apache.spark
      OrcRelation$$anonfun$2.apply
      1. org.apache.spark.sql.hive.orc.OrcFileOperator$.listOrcFiles(OrcFileOperator.scala:104)
      2. org.apache.spark.sql.hive.orc.OrcFileOperator$.getFileReader(OrcFileOperator.scala:69)
      3. org.apache.spark.sql.hive.orc.OrcFileOperator$.readSchema(OrcFileOperator.scala:77)
      4. org.apache.spark.sql.hive.orc.OrcRelation$$anonfun$2.apply(OrcRelation.scala:185)
      5. org.apache.spark.sql.hive.orc.OrcRelation$$anonfun$2.apply(OrcRelation.scala:185)
      5 frames
    3. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    4. org.apache.spark
      OrcRelation.<init>
      1. org.apache.spark.sql.hive.orc.OrcRelation.<init>(OrcRelation.scala:184)
      1 frame
    5. Spark Project Hive
      HiveMetastoreCatalog$$anonfun$20.apply
      1. org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$20.apply(HiveMetastoreCatalog.scala:580)
      2. org.apache.spark.sql.hive.HiveMetastoreCatalog$$anonfun$20.apply(HiveMetastoreCatalog.scala:578)
      2 frames
    6. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    7. Spark Project Hive
      HiveMetastoreCatalog$OrcConversions$$anonfun$apply$2.applyOrElse
      1. org.apache.spark.sql.hive.HiveMetastoreCatalog.org$apache$spark$sql$hive$HiveMetastoreCatalog$$convertToOrcRelation(HiveMetastoreCatalog.scala:578)
      2. org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$$anonfun$apply$2.applyOrElse(HiveMetastoreCatalog.scala:647)
      3. org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$$anonfun$apply$2.applyOrElse(HiveMetastoreCatalog.scala:643)
      3 frames
    8. Spark Project Catalyst
      TreeNode$$anonfun$4.apply
      1. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:335)
      2. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:335)
      3. org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
      4. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:334)
      5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:332)
      6. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:332)
      7. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:281)
      7 frames
    9. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      12 frames
    10. Spark Project Catalyst
      TreeNode.transformUp
      1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:321)
      2. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:332)
      2 frames
    11. Spark Project Hive
      HiveMetastoreCatalog$OrcConversions$.apply
      1. org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$.apply(HiveMetastoreCatalog.scala:643)
      2. org.apache.spark.sql.hive.HiveMetastoreCatalog$OrcConversions$.apply(HiveMetastoreCatalog.scala:637)
      2 frames
    12. Spark Project Catalyst
      RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply
      1. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:83)
      2. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:80)
      2 frames
    13. Scala
      List.foldLeft
      1. scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)
      2. scala.collection.immutable.List.foldLeft(List.scala:84)
      2 frames
    14. Spark Project Catalyst
      RuleExecutor$$anonfun$execute$1.apply
      1. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:80)
      2. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:72)
      2 frames
    15. Scala
      List.foreach
      1. scala.collection.immutable.List.foreach(List.scala:318)
      1 frame
    16. Spark Project Catalyst
      RuleExecutor.execute
      1. org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:72)
      1 frame
    17. Spark Project SQL
      SQLContext.sql
      1. org.apache.spark.sql.execution.QueryExecution.analyzed$lzycompute(QueryExecution.scala:36)
      2. org.apache.spark.sql.execution.QueryExecution.analyzed(QueryExecution.scala:36)
      3. org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:34)
      4. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:133)
      5. org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
      6. org.apache.spark.sql.SQLContext.sql(SQLContext.scala:817)
      6 frames
    18. Unknown
      $iwC.<init>
      1. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:26)
      2. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:31)
      3. $iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:33)
      4. $iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
      5. $iwC$$iwC$$iwC$$iwC.<init>(<console>:37)
      6. $iwC$$iwC$$iwC.<init>(<console>:39)
      7. $iwC$$iwC.<init>(<console>:41)
      8. $iwC.<init>(<console>:43)
      8 frames