org.apache.spark.sql.catalyst.analysis.UnresolvedException: Invalid call to dataType on unresolved object, tree: unresolvedalias(cast(string_field#219 as string))

spark-dev | Reynold Xin | 1 year ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    Re: [1.6] Coalesce/binary operator on casted named column

    spark-dev | 1 year ago | Reynold Xin
    org.apache.spark.sql.catalyst.analysis.UnresolvedException: Invalid call to dataType on unresolved object, tree: unresolvedalias(cast(string_field#219 as string))
  2. 0

    eco-release-metadata/RELEASENOTES.1.2.0.md at master · aw-was-here/eco-release-metadata · GitHub

    github.com | 5 months ago
    org.apache.spark.sql.catalyst.analysis.UnresolvedException: Invalid call to datatype. Can not resolve due to differing types DoubleType, IntegerType on unresolved object, tree: (CAST(key#3, DoubleType) / 2)

    Root Cause Analysis

    1. org.apache.spark.sql.catalyst.analysis.UnresolvedException

      Invalid call to dataType on unresolved object, tree: unresolvedalias(cast(string_field#219 as string))

      at org.apache.spark.sql.catalyst.analysis.UnresolvedAlias.dataType()
    2. Spark Project Catalyst
      TreeNode$$anonfun$4.apply
      1. org.apache.spark.sql.catalyst.analysis.UnresolvedAlias.dataType(unresolved.scala:295)
      2. org.apache.spark.sql.catalyst.expressions.BinaryOperator.checkInputDataTypes(Expression.scala:467)
      3. org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:62)
      4. org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1$$anonfun$apply$2.applyOrElse(CheckAnalysis.scala:57)
      5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:319)
      6. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$transformUp$1.apply(TreeNode.scala:319)
      7. org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:53)
      8. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:318)
      9. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:316)
      10. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:316)
      11. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
      11 frames
    3. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      12 frames
    4. Spark Project Catalyst
      TreeNode$$anonfun$4.apply
      1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
      2. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:316)
      3. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:316)
      4. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$5.apply(TreeNode.scala:316)
      5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:265)
      5 frames
    5. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      12 frames
    6. Spark Project Catalyst
      QueryPlan$$anonfun$2.apply
      1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:305)
      2. org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:316)
      3. org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionUp$1(QueryPlan.scala:107)
      4. org.apache.spark.sql.catalyst.plans.QueryPlan.org$apache$spark$sql$catalyst$plans$QueryPlan$$recursiveTransform$2(QueryPlan.scala:117)
      5. org.apache.spark.sql.catalyst.plans.QueryPlan$$anonfun$2.apply(QueryPlan.scala:125)
      5 frames
    7. Scala
      AbstractIterator.toArray
      1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
      2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
      3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
      4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
      5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
      6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
      7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
      8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
      9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
      10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
      11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
      12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
      12 frames
    8. Spark Project Catalyst
      Analyzer.checkAnalysis
      1. org.apache.spark.sql.catalyst.plans.QueryPlan.transformExpressionsUp(QueryPlan.scala:125)
      2. org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:57)
      3. org.apache.spark.sql.catalyst.analysis.CheckAnalysis$$anonfun$checkAnalysis$1.apply(CheckAnalysis.scala:50)
      4. org.apache.spark.sql.catalyst.trees.TreeNode.foreachUp(TreeNode.scala:105)
      5. org.apache.spark.sql.catalyst.analysis.CheckAnalysis$class.checkAnalysis(CheckAnalysis.scala:50)
      6. org.apache.spark.sql.catalyst.analysis.Analyzer.checkAnalysis(Analyzer.scala:44)
      6 frames
    9. Spark Project SQL
      DataFrame.where
      1. org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:34)
      2. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:133)
      3. org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$withPlan(DataFrame.scala:2165)
      4. org.apache.spark.sql.DataFrame.filter(DataFrame.scala:799)
      5. org.apache.spark.sql.DataFrame.where(DataFrame.scala:825)
      5 frames