org.apache.spark.sql.AnalysisException: Unable to resolve impressionid.1 given [impressionid.1];

GitHub | anshbansal | 4 months ago
  1. 0

    Spark 2.0.0 - Dot in column name causes reading csv to fail

    GitHub | 4 months ago | anshbansal
    org.apache.spark.sql.AnalysisException: Unable to resolve impressionid.1 given [impressionid.1];
  2. 0

    GitHub comment 9#223693655

    GitHub | 6 months ago | kevinushey
    org.apache.spark.sql.AnalysisException: Unable to resolve Sepal.Length given [Sepal.Length, Sepal.Width, Petal.Length, Petal.Width, Species];
  3. 0

    How to deal with dots (.) in column names in SparkR

    Stack Overflow | 3 months ago | Mohit Bansal
    org.apache.spark.sql.AnalysisException: Unable to resolve Sepal.Length given [Sepal.Length, Sepal.Width, Petal.Length, Petal.Width, Species];
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    [SPARK-5817] [SQL] Fix bug of udtf with column names by chenghao-intel · Pull Request #4602 · apache/spark · GitHub

    github.com | 4 months ago
    org.apache.spark.sql.AnalysisException: cannot resolve 'd' given input columns _c0; line 1 pos 7
  6. 0

    GitHub comment 364#264595498

    GitHub | 2 days ago | samurainate
    org.apache.spark.sql.AnalysisException: cannot resolve '`rectype.x`' given input columns: [..., rectype.x, ..., rectype.y, ...]; line 1 pos 0

    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. org.apache.spark.sql.AnalysisException

      Unable to resolve impressionid.1 given [impressionid.1];

      at org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolve$1$$anonfun$apply$5.apply()
    2. Spark Project Catalyst
      LogicalPlan$$anonfun$resolve$1$$anonfun$apply$5.apply
      1. org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolve$1$$anonfun$apply$5.apply(LogicalPlan.scala:134)
      2. org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolve$1$$anonfun$apply$5.apply(LogicalPlan.scala:134)
      2 frames
    3. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:121)
      1 frame
    4. Spark Project Catalyst
      LogicalPlan$$anonfun$resolve$1.apply
      1. org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolve$1.apply(LogicalPlan.scala:133)
      2. org.apache.spark.sql.catalyst.plans.logical.LogicalPlan$$anonfun$resolve$1.apply(LogicalPlan.scala:129)
      2 frames
    5. Scala
      IterableLike$class.foreach
      1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:234)
      3. scala.collection.Iterator$class.foreach(Iterator.scala:893)
      4. scala.collection.AbstractIterator.foreach(Iterator.scala:1336)
      5. scala.collection.IterableLike$class.foreach(IterableLike.scala:72)
      5 frames
    6. org.apache.spark
      StructType.foreach
      1. org.apache.spark.sql.types.StructType.foreach(StructType.scala:95)
      1 frame
    7. Scala
      TraversableLike$class.map
      1. scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
      1 frame
    8. org.apache.spark
      StructType.map
      1. org.apache.spark.sql.types.StructType.map(StructType.scala:95)
      1 frame
    9. Spark Project Catalyst
      LogicalPlan.resolve
      1. org.apache.spark.sql.catalyst.plans.logical.LogicalPlan.resolve(LogicalPlan.scala:129)
      1 frame
    10. org.apache.spark
      FileSourceStrategy$.apply
      1. org.apache.spark.sql.execution.datasources.FileSourceStrategy$.apply(FileSourceStrategy.scala:87)
      1 frame
    11. Spark Project Catalyst
      QueryPlanner$$anonfun$1.apply
      1. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:60)
      2. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:60)
      2 frames
    12. Scala
      Iterator$$anon$12.hasNext
      1. scala.collection.Iterator$$anon$12.nextCur(Iterator.scala:434)
      2. scala.collection.Iterator$$anon$12.hasNext(Iterator.scala:440)
      2 frames
    13. Spark Project Catalyst
      QueryPlanner.plan
      1. org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:61)
      1 frame
    14. Spark Project SQL
      SparkPlanner$$anonfun$plan$1$$anonfun$apply$1.applyOrElse
      1. org.apache.spark.sql.execution.SparkPlanner.plan(SparkPlanner.scala:47)
      2. org.apache.spark.sql.execution.SparkPlanner$$anonfun$plan$1$$anonfun$apply$1.applyOrElse(SparkPlanner.scala:51)
      3. org.apache.spark.sql.execution.SparkPlanner$$anonfun$plan$1$$anonfun$apply$1.applyOrElse(SparkPlanner.scala:48)
      3 frames
    15. Spark Project Catalyst
      TreeNode.transformUp
      1. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:301)
      2. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:301)
      3. org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
      4. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:300)
      5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:298)
      6. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:298)
      7. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:321)
      8. org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:179)
      9. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:319)
      10. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:298)
      10 frames
    16. Spark Project SQL
      SparkPlanner$$anonfun$plan$1.apply
      1. org.apache.spark.sql.execution.SparkPlanner$$anonfun$plan$1.apply(SparkPlanner.scala:48)
      2. org.apache.spark.sql.execution.SparkPlanner$$anonfun$plan$1.apply(SparkPlanner.scala:48)
      2 frames