java.lang.RuntimeException: Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$


Solutions on the web4

Solution icon of web
via JIRA by Nick Karpov, 8 months ago
Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$

Solution icon of stackoverflow
Rollups not possible, because Vec was deleted: $04ff12000000ffffffff29c907ce45483f9f244f54c4d0$%;K

Solution icon of web
via JIRA by Nidhi Mehta, 10 months ago
Rollups not possible, because Vec was deleted: $04ff57010000ffffffff$nfs://Users/nidhimehta/Desktop/auto_sklearn_csv/car.arff.txt

Solution icon of web
Rollups not possible, because Vec was deleted: $04ffb6520100ffffffff$nfs://ncvprod/apps/toolbox/7/15009/59/dayrhencvp001.enterprisenet.org/temp2StageDump-c0e7ffeb-ee32-468a-adae-34e25cccee4e

Stack trace

java.lang.RuntimeException: Rollups not possible, because Vec was deleted: $04ff09000000ffffffffff7196961d66889eac470028e14b8eaa$
	at water.fvec.RollupStats.get(RollupStats.java:319)
	at water.fvec.RollupStats.get(RollupStats.java:346)
	at water.fvec.Vec.rollupStats(Vec.java:806)
	at water.fvec.Vec.isInt(Vec.java:773)
	at org.apache.spark.h2o.utils.ReflectionUtils$.detectSupportedNumericType(ReflectionUtils.scala:158)
	at org.apache.spark.h2o.utils.ReflectionUtils$.supportedType(ReflectionUtils.scala:148)
	at org.apache.spark.h2o.utils.ReflectionUtils$.dataTypeFor(ReflectionUtils.scala:141)
	at org.apache.spark.h2o.converters.H2ODataFrame$anonfun$1.apply(H2ODataFrame.scala:51)
	at org.apache.spark.h2o.converters.H2ODataFrame$anonfun$1.apply(H2ODataFrame.scala:51)
	at scala.collection.TraversableLike$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.TraversableLike$anonfun$map$1.apply(TraversableLike.scala:234)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:33)
	at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:234)
	at scala.collection.mutable.ArrayOps$ofRef.map(ArrayOps.scala:186)
	at org.apache.spark.h2o.converters.H2ODataFrame.<init>(H2ODataFrame.scala:51)
	at org.apache.spark.sql.H2OFrameRelation.buildScan(H2OSQLContextUtils.scala:59)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$9.apply(DataSourceStrategy.scala:267)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$9.apply(DataSourceStrategy.scala:267)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:303)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$anonfun$pruneFilterProject$1.apply(DataSourceStrategy.scala:302)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.pruneFilterProjectRaw(DataSourceStrategy.scala:379)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.pruneFilterProject(DataSourceStrategy.scala:298)
	at org.apache.spark.sql.execution.datasources.DataSourceStrategy$.apply(DataSourceStrategy.scala:263)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner$anonfun$1.apply(QueryPlanner.scala:60)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner$anonfun$1.apply(QueryPlanner.scala:60)
	at scala.collection.Iterator$anon$12.nextCur(Iterator.scala:434)
	at scala.collection.Iterator$anon$12.hasNext(Iterator.scala:440)
	at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:61)
	at org.apache.spark.sql.execution.SparkPlanner.plan(SparkPlanner.scala:47)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:51)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:48)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:307)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$5.apply(TreeNode.scala:328)
	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:326)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:305)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$4.apply(TreeNode.scala:305)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$5.apply(TreeNode.scala:328)
	at org.apache.spark.sql.catalyst.trees.TreeNode.mapProductIterator(TreeNode.scala:186)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformChildren(TreeNode.scala:326)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:305)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
	at scala.collection.Iterator$anon$11.next(Iterator.scala:409)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:51)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1$anonfun$apply$1.applyOrElse(SparkPlanner.scala:48)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
	at org.apache.spark.sql.catalyst.trees.TreeNode$anonfun$transformUp$1.apply(TreeNode.scala:308)
	at org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:69)
	at org.apache.spark.sql.catalyst.trees.TreeNode.transformUp(TreeNode.scala:307)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
	at org.apache.spark.sql.execution.SparkPlanner$anonfun$plan$1.apply(SparkPlanner.scala:48)
	at scala.collection.Iterator$anon$11.next(Iterator.scala:409)
	at org.apache.spark.sql.execution.QueryExecution.sparkPlan$lzycompute(QueryExecution.scala:78)
	at org.apache.spark.sql.execution.QueryExecution.sparkPlan(QueryExecution.scala:76)
	at org.apache.spark.sql.execution.QueryExecution.executedPlan$lzycompute(QueryExecution.scala:83)
	at org.apache.spark.sql.execution.QueryExecution.executedPlan(QueryExecution.scala:83)
	at org.apache.spark.sql.Dataset.withCallback(Dataset.scala:2555)
	at org.apache.spark.sql.Dataset.count(Dataset.scala:2226)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)
	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)
	at py4j.Gateway.invoke(Gateway.java:280)
	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:132)
	at py4j.commands.CallCommand.execute(CallCommand.java:79)
	at py4j.GatewayConnection.run(GatewayConnection.java:214)
	at java.lang.Thread.run(Thread.java:745)

Write tip

You have a different solution? A short tip here would help you and many other users who saw this issue last week.

Users with the same issue

You are the first who have seen this exception. Write a tip to help other users and build your expert profile.