org.apache.zeppelin.interpreter.InterpreterException: java.lang.reflect.InvocationTargetException

Apache's JIRA Issue Tracker | Alexander Bezzubov | 1 year ago
  1. 0

    In local spark exception happens on {{%sql select * from ... limit 10}} but {{z.show(df)}} for same dataset shows well. Steps to reproduce: {code} %dep z.load("com.databricks:spark-csv_2.10:1.1.0") %sh wget https://s3.amazonaws.com/apache-zeppelin/tutorial/bank/bank.csv %spark val df = sqlContext.read.format("com.databricks.spark.csv").option("header", "true").load("bank.csv") df.registerTempTable("bank") z.show(df) %sql select * from bank limit 10 {code} Exception: {code} INFO [2015-08-01 18:06:38,986] ({pool-2-thread-3} Logging.scala[logInfo]:59) - Created broadcast 2 from textFile at CsvRelation.scala:66 ERROR [2015-08-01 18:06:39,002] ({pool-2-thread-3} Job.java[run]:183) - Job failed org.apache.zeppelin.interpreter.InterpreterException: java.lang.reflect.InvocationTargetException at org.apache.zeppelin.spark.ZeppelinContext.showRDD(ZeppelinContext.java:301) at org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:134) at org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276) at org.apache.zeppelin.scheduler.Job.run(Job.java:170) at org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) at java.util.concurrent.FutureTask.run(FutureTask.java:262) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.zeppelin.spark.ZeppelinContext.showRDD(ZeppelinContext.java:296) ... 13 more Caused by: java.lang.ClassNotFoundException: com.databricks.spark.csv.CsvRelation$$anonfun$buildScan$1$$anonfun$1 at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:270) at org.apache.spark.util.InnerClosureFinder$$anon$4.visitMethodInsn(ClosureCleaner.scala:455) at com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source) at com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source) at org.apache.spark.util.ClosureCleaner$.getInnerClosureClasses(ClosureCleaner.scala:101) at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:197) at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:132) at org.apache.spark.SparkContext.clean(SparkContext.scala:1893) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:683) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:682) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:286) at org.apache.spark.rdd.RDD.mapPartitions(RDD.scala:682) at com.databricks.spark.csv.CsvRelation.buildScan(CsvRelation.scala:83) at org.apache.spark.sql.sources.DataSourceStrategy$.apply(DataSourceStrategy.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:314) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:943) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:941) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:947) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:947) at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1269) at org.apache.spark.sql.DataFrame.head(DataFrame.scala:1203) at org.apache.spark.sql.DataFrame.take(DataFrame.scala:1262) ... 18 more {code}

    Apache's JIRA Issue Tracker | 1 year ago | Alexander Bezzubov
    org.apache.zeppelin.interpreter.InterpreterException: java.lang.reflect.InvocationTargetException
  2. 0

    In local spark exception happens on {{%sql select * from ... limit 10}} but {{z.show(df)}} for same dataset shows well. Steps to reproduce: {code} %dep z.load("com.databricks:spark-csv_2.10:1.1.0") %sh wget https://s3.amazonaws.com/apache-zeppelin/tutorial/bank/bank.csv %spark val df = sqlContext.read.format("com.databricks.spark.csv").option("header", "true").load("bank.csv") df.registerTempTable("bank") z.show(df) %sql select * from bank limit 10 {code} Exception: {code} INFO [2015-08-01 18:06:38,986] ({pool-2-thread-3} Logging.scala[logInfo]:59) - Created broadcast 2 from textFile at CsvRelation.scala:66 ERROR [2015-08-01 18:06:39,002] ({pool-2-thread-3} Job.java[run]:183) - Job failed org.apache.zeppelin.interpreter.InterpreterException: java.lang.reflect.InvocationTargetException at org.apache.zeppelin.spark.ZeppelinContext.showRDD(ZeppelinContext.java:301) at org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:134) at org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57) at org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93) at org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276) at org.apache.zeppelin.scheduler.Job.run(Job.java:170) at org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118) at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) at java.util.concurrent.FutureTask.run(FutureTask.java:262) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178) at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292) at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) at java.lang.Thread.run(Thread.java:744) Caused by: java.lang.reflect.InvocationTargetException at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.zeppelin.spark.ZeppelinContext.showRDD(ZeppelinContext.java:296) ... 13 more Caused by: java.lang.ClassNotFoundException: com.databricks.spark.csv.CsvRelation$$anonfun$buildScan$1$$anonfun$1 at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.forName0(Native Method) at java.lang.Class.forName(Class.java:270) at org.apache.spark.util.InnerClosureFinder$$anon$4.visitMethodInsn(ClosureCleaner.scala:455) at com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source) at com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source) at org.apache.spark.util.ClosureCleaner$.getInnerClosureClasses(ClosureCleaner.scala:101) at org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:197) at org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:132) at org.apache.spark.SparkContext.clean(SparkContext.scala:1893) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:683) at org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:682) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147) at org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108) at org.apache.spark.rdd.RDD.withScope(RDD.scala:286) at org.apache.spark.rdd.RDD.mapPartitions(RDD.scala:682) at com.databricks.spark.csv.CsvRelation.buildScan(CsvRelation.scala:83) at org.apache.spark.sql.sources.DataSourceStrategy$.apply(DataSourceStrategy.scala:101) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54) at org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:314) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58) at scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371) at org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:943) at org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:941) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:947) at org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:947) at org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1269) at org.apache.spark.sql.DataFrame.head(DataFrame.scala:1203) at org.apache.spark.sql.DataFrame.take(DataFrame.scala:1262) ... 18 more {code}

    Apache's JIRA Issue Tracker | 1 year ago | Alexander Bezzubov
    org.apache.zeppelin.interpreter.InterpreterException: java.lang.reflect.InvocationTargetException
  3. 0

    Apache Spark Developers List - A confusing ClassNotFoundException error

    nabble.com | 1 year ago
    java.lang.ClassNotFoundException: cn.zhaishidan.trans.service.SparkHiveService$$anonfun$mapHandle$1$1$$anonfun$apply$1
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    Apache Spark User List - java.lang.ClassNotFoundException: TestT$$anonfun$buildLabeledPoints$3$$anonfun$apply$1

    nabble.com | 1 year ago
    java.lang.ClassNotFoundException: TestT$$anonfun$buildLabeledPoints$3$$anonfun$apply$1
  6. 0

    [SPARK-8368] ClassNotFoundException in closure for map - ASF JIRA

    apache.org | 1 year ago
    java.lang.ClassNotFoundException: com.yhd.ycache.magic.Model$$anonfun$9$$anonfun$10

  1. gehel 1 times, last 6 days ago
  2. davidvanlaatum 3 times, last 6 days ago
  3. danleyb2Interintel 1 times, last 1 week ago
  4. malhobayyeb 3 times, last 2 weeks ago
  5. Kialandei 1 times, last 2 weeks ago
49 more registered users
46 unregistered visitors
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.ClassNotFoundException

    com.databricks.spark.csv.CsvRelation$$anonfun$buildScan$1$$anonfun$1

    at java.net.URLClassLoader$1.run()
  2. Java RT
    Class.forName
    1. java.net.URLClassLoader$1.run(URLClassLoader.java:366)
    2. java.net.URLClassLoader$1.run(URLClassLoader.java:355)
    3. java.security.AccessController.doPrivileged(Native Method)
    4. java.net.URLClassLoader.findClass(URLClassLoader.java:354)
    5. java.lang.ClassLoader.loadClass(ClassLoader.java:425)
    6. sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
    7. java.lang.ClassLoader.loadClass(ClassLoader.java:358)
    8. java.lang.Class.forName0(Native Method)
    9. java.lang.Class.forName(Class.java:270)
    9 frames
  3. Spark
    InnerClosureFinder$$anon$4.visitMethodInsn
    1. org.apache.spark.util.InnerClosureFinder$$anon$4.visitMethodInsn(ClosureCleaner.scala:455)
    1 frame
  4. Kryo
    ClassReader.accept
    1. com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source)
    2. com.esotericsoftware.reflectasm.shaded.org.objectweb.asm.ClassReader.accept(Unknown Source)
    2 frames
  5. Spark
    RDD.mapPartitions
    1. org.apache.spark.util.ClosureCleaner$.getInnerClosureClasses(ClosureCleaner.scala:101)
    2. org.apache.spark.util.ClosureCleaner$.org$apache$spark$util$ClosureCleaner$$clean(ClosureCleaner.scala:197)
    3. org.apache.spark.util.ClosureCleaner$.clean(ClosureCleaner.scala:132)
    4. org.apache.spark.SparkContext.clean(SparkContext.scala:1893)
    5. org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:683)
    6. org.apache.spark.rdd.RDD$$anonfun$mapPartitions$1.apply(RDD.scala:682)
    7. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:147)
    8. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:108)
    9. org.apache.spark.rdd.RDD.withScope(RDD.scala:286)
    10. org.apache.spark.rdd.RDD.mapPartitions(RDD.scala:682)
    10 frames
  6. com.databricks.spark
    CsvRelation.buildScan
    1. com.databricks.spark.csv.CsvRelation.buildScan(CsvRelation.scala:83)
    1 frame
  7. Spark Project SQL
    DataSourceStrategy$.apply
    1. org.apache.spark.sql.sources.DataSourceStrategy$.apply(DataSourceStrategy.scala:101)
    1 frame
  8. Spark Project Catalyst
    QueryPlanner$$anonfun$1.apply
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2 frames
  9. Scala
    Iterator$$anon$13.hasNext
    1. scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
    1 frame
  10. Spark Project Catalyst
    QueryPlanner.planLater
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59)
    2. org.apache.spark.sql.catalyst.planning.QueryPlanner.planLater(QueryPlanner.scala:54)
    2 frames
  11. Spark Project SQL
    SparkStrategies$BasicOperators$.apply
    1. org.apache.spark.sql.execution.SparkStrategies$BasicOperators$.apply(SparkStrategies.scala:314)
    1 frame
  12. Spark Project Catalyst
    QueryPlanner$$anonfun$1.apply
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2. org.apache.spark.sql.catalyst.planning.QueryPlanner$$anonfun$1.apply(QueryPlanner.scala:58)
    2 frames
  13. Scala
    Iterator$$anon$13.hasNext
    1. scala.collection.Iterator$$anon$13.hasNext(Iterator.scala:371)
    1 frame
  14. Spark Project Catalyst
    QueryPlanner.plan
    1. org.apache.spark.sql.catalyst.planning.QueryPlanner.plan(QueryPlanner.scala:59)
    1 frame
  15. Spark Project SQL
    DataFrame.take
    1. org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan$lzycompute(SQLContext.scala:943)
    2. org.apache.spark.sql.SQLContext$QueryExecution.sparkPlan(SQLContext.scala:941)
    3. org.apache.spark.sql.SQLContext$QueryExecution.executedPlan$lzycompute(SQLContext.scala:947)
    4. org.apache.spark.sql.SQLContext$QueryExecution.executedPlan(SQLContext.scala:947)
    5. org.apache.spark.sql.DataFrame.collect(DataFrame.scala:1269)
    6. org.apache.spark.sql.DataFrame.head(DataFrame.scala:1203)
    7. org.apache.spark.sql.DataFrame.take(DataFrame.scala:1262)
    7 frames
  16. Java RT
    Method.invoke
    1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    4. java.lang.reflect.Method.invoke(Method.java:606)
    4 frames
  17. org.apache.zeppelin
    FIFOScheduler$1.run
    1. org.apache.zeppelin.spark.ZeppelinContext.showRDD(ZeppelinContext.java:296)
    2. org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:134)
    3. org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
    4. org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
    5. org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
    6. org.apache.zeppelin.scheduler.Job.run(Job.java:170)
    7. org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
    7 frames
  18. Java RT
    Thread.run
    1. java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
    2. java.util.concurrent.FutureTask.run(FutureTask.java:262)
    3. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:178)
    4. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:292)
    5. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
    6. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
    7. java.lang.Thread.run(Thread.java:744)
    7 frames