java.lang.RuntimeException: Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$

Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via JIRA by Nick Karpov, 10 months ago
Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$
via JIRA by Nidhi Mehta, 11 months ago
Rollups not possible, because Vec was deleted: $04ff93010000ffffffff$nfs://Users/nidhimehta/Desktop/auto_sklearn_csv/car.arff.txt
via Stack Overflow by user90772
, 2 months ago
Rollups not possible, because Vec was deleted: $04ff12000000ffffffff29c907ce45483f9f244f54c4d0$%;K
java.lang.RuntimeException: Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$
at water.fvec.RollupStats.get(RollupStats.java:319)
at water.fvec.RollupStats.get(RollupStats.java:346)
at water.fvec.Vec.rollupStats(Vec.java:806)
at water.fvec.Vec.isInt(Vec.java:773)
at org.apache.spark.h2o.utils.ReflectionUtils$.detectSupportedNumericType(ReflectionUtils.scala:158)
at org.apache.spark.h2o.utils.ReflectionUtils$.supportedType(ReflectionUtils.scala:148)
at org.apache.spark.h2o.utils.ReflectionUtils$.dataTypeFor(ReflectionUtils.scala:141)
at org.apache.spark.h2o.converters.H2ODataFrame$anonfun$1.apply(H2ODataFrame.scala:51)
at org.apache.spark.h2o.converters.H2ODataFrame$anonfun$1.apply(H2ODataFrame.scala:51)
at scala.collection.TraversableLike$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.TraversableLike$anonfun$map$1.apply(TraversableLike.scala:234)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
at org.apache.spark.h2o.converters.H2ODataFrame.(H2ODataFrame.scala:51)
at org.apache.spark.sql.H2OFrameRelation.buildScan(H2OSQLContextUtils.scala:59)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$9.apply(DataSourceStrategy.scala:267)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$9.apply(DataSourceStrategy.scala:267)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:303)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:302)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.pruneFilterProjectRaw(DataSourceStrategy.scala:379)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.pruneFilterProject(DataSourceStrategy.scala:298)
at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.apply(DataSourceStrategy.scala:263)
at org.apache.spark.sql.catalyst.planning.QueryPlanner$anonfun$1.apply(QueryPlanner.scala:60)
at scala.collection.Iterator$anon$12.nextCur(Iterator.scala:434)
at org.apache.spark.sql.execution.SparkPlanner.plan(SparkPlanner.scala:47)
at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:51)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:307)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$5.apply(TreeNode.scala:328)
at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:326)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:305)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$5.apply(TreeNode.scala:328)
at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186)
at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:326)
at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:51)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:78)
at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:76)
at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:83)
at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:83)
at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2555)
at org.apache.spark.sql.Dataset.count(Dataset.scala:2226)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:606)
at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
at py4j.Gateway.invoke(Gateway.java:280)
at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
at py4j.commands.CallCommand.execute(CallCommand.java:79)
at py4j.GatewayConnection.run(GatewayConnection.java:214)
at java.lang.Thread.run(Thread.java:745)

Users with the same issue

You are the first who have seen this exception.

Write tip

Know the solutions? Share your knowledge to help other developers to debug faster.