java.lang.Exception: Failed to generate global dictionary files

GitHub | ustczen | 8 months ago
tip
Your exception is missing from the Samebug knowledge base.
Here are the best solutions we found on the Internet.
Click on the to mark the helpful solution and get rewards for you help.
  1. 0

    GitHub comment 855#234583317

    GitHub | 8 months ago | ustczen
    java.lang.Exception: Failed to generate global dictionary files

    Root Cause Analysis

    1. java.lang.Exception

      Failed to generate global dictionary files

      at org.carbondata.spark.util.GlobalDictionaryUtil$.org$carbondata$spark$util$GlobalDictionaryUtil$$checkStatus()
    2. org.carbondata.spark
      GlobalDictionaryUtil$.generateGlobalDictionary
      1. org.carbondata.spark.util.GlobalDictionaryUtil$.org$carbondata$spark$util$GlobalDictionaryUtil$$checkStatus(GlobalDictionaryUtil.scala:441)
      2. org.carbondata.spark.util.GlobalDictionaryUtil$.generateGlobalDictionary(GlobalDictionaryUtil.scala:485)
      2 frames
    3. org.apache.spark
      LoadTable.run
      1. org.apache.spark.sql.execution.command.LoadTable.run(carbonTableSchema.scala:1144)
      1 frame
    4. Spark Project SQL
      SparkPlan$$anonfun$execute$5.apply
      1. org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult$lzycompute(commands.scala:58)
      2. org.apache.spark.sql.execution.ExecutedCommand.sideEffectResult(commands.scala:56)
      3. org.apache.spark.sql.execution.ExecutedCommand.doExecute(commands.scala:70)
      4. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
      5. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
      5 frames
    5. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
      1 frame
    6. Spark Project SQL
      DataFrame.<init>
      1. org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
      2. org.apache.spark.sql.execution.QueryExecution.toRdd$lzycompute(QueryExecution.scala:55)
      3. org.apache.spark.sql.execution.QueryExecution.toRdd(QueryExecution.scala:55)
      4. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:145)
      5. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:130)
      5 frames
    7. org.carbondata.spark
      CarbonDataFrameRDD.<init>
      1. org.carbondata.spark.rdd.CarbonDataFrameRDD.<init>(CarbonDataFrameRDD.scala:23)
      1 frame
    8. Spark Project SQL
      CarbonContext.sql
      1. org.apache.spark.sql.CarbonContext.sql(CarbonContext.scala:109)
      1 frame
    9. $line27
      $eval.$print
      1. $line27.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:35)
      2. $line27.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:40)
      3. $line27.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:42)
      4. $line27.$read$$iwC$$iwC$$iwC$$iwC$$iwC.<init>(<console>:44)
      5. $line27.$read$$iwC$$iwC$$iwC$$iwC.<init>(<console>:46)
      6. $line27.$read$$iwC$$iwC$$iwC.<init>(<console>:48)
      7. $line27.$read$$iwC$$iwC.<init>(<console>:50)
      8. $line27.$read$$iwC.<init>(<console>:52)
      9. $line27.$read.<init>(<console>:54)
      10. $line27.$read$.<init>(<console>:58)
      11. $line27.$read$.<clinit>(<console>)
      12. $line27.$eval$.<init>(<console>:7)
      13. $line27.$eval$.<clinit>(<console>)
      14. $line27.$eval.$print(<console>)
      14 frames
    10. Java RT
      Method.invoke
      1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
      3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      4. java.lang.reflect.Method.invoke(Method.java:606)
      4 frames
    11. Spark REPL
      SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply
      1. org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
      2. org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
      3. org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
      4. org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
      5. org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
      6. org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
      7. org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
      8. org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
      9. org.apache.spark.repl.SparkILoop.processLine$1(SparkILoop.scala:657)
      10. org.apache.spark.repl.SparkILoop.innerLoop$1(SparkILoop.scala:665)
      11. org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$loop(SparkILoop.scala:670)
      12. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:997)
      13. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
      14. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
      14 frames
    12. Scala Compiler
      ScalaClassLoader$.savingContextLoader
      1. scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
      1 frame
    13. Spark REPL
      Main.main
      1. org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
      2. org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
      3. org.apache.spark.repl.Main$.main(Main.scala:31)
      4. org.apache.spark.repl.Main.main(Main.scala)
      4 frames
    14. Java RT
      Method.invoke
      1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
      3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      4. java.lang.reflect.Method.invoke(Method.java:606)
      4 frames
    15. Spark
      SparkSubmit.main
      1. org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
      2. org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
      3. org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
      4. org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
      5. org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
      5 frames