java.lang.ClassNotFoundException: ibp.big.hive.serde.CSVSerde

zeppelin-users | COUERON Damien (i-BP - MICROPOLE) | 1 year ago
tip
Do you find the tips below useful? Click on the to mark them and say thanks to rafael . Or join the community to write better ones.
  1. 0

    Custom Serde loading with %spark but not with %sql

    zeppelin-users | 1 year ago | COUERON Damien (i-BP - MICROPOLE)
    java.lang.ClassNotFoundException: ibp.big.hive.serde.CSVSerde
  2. 0
    samebug tip
    This might be an issue with the file location in the Spark submit command. Try it with "spark-submit --master spark://master:7077 hello_world_from_pyspark.py {file location}"
  3. 0

    Re: Custom Serde loading with %spark but not with %sql

    zeppelin-users | 1 year ago | Mina Lee
    java.lang.ClassNotFoundException: ibp.big.hive.serde.CSVSerde
  4. Speed up your debug routine!

    Automated exception search integrated into your IDE

  5. 0

    ClassLoader and Class.forName()

    Stack Overflow | 4 years ago | Aerospace
    java.lang.ClassNotFoundException: help.Simple
  6. 0

    GitHub comment 85#147288996

    GitHub | 1 year ago | akara
    java.lang.ClassNotFoundException: org.squbs.pipeline.TestResolver3
Not finding the right solution?
Take a tour to get the most out of Samebug.

Tired of useless tips?

Automated exception search integrated into your IDE

Root Cause Analysis

  1. java.lang.ClassNotFoundException

    ibp.big.hive.serde.CSVSerde

    at java.net.URLClassLoader.findClass()
  2. Java RT
    Class.forName
    1. java.net.URLClassLoader.findClass(URLClassLoader.java:381)
    2. java.lang.ClassLoader.loadClass(ClassLoader.java:424)
    3. sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:331)
    4. java.lang.ClassLoader.loadClass(ClassLoader.java:357)
    5. java.lang.Class.forName0(Native Method)
    6. java.lang.Class.forName(Class.java:348)
    6 frames
  3. Spark Project Hive
    HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation
    1. org.apache.spark.sql.hive.MetastoreRelation.<init>(HiveMetastoreCatalog.scala:701)
    2. org.apache.spark.sql.hive.HiveMetastoreCatalog.lookupRelation(HiveMetastoreCatalog.scala:248)
    3. org.apache.spark.sql.hive.HiveContext$$anon$2.org$apache$spark$sql$catalyst$analysis$OverrideCatalog$$super$lookupRelation(HiveContext.scala:373)
    3 frames
  4. Spark Project Catalyst
    OverrideCatalog$$anonfun$lookupRelation$3.apply
    1. org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:165)
    2. org.apache.spark.sql.catalyst.analysis.OverrideCatalog$$anonfun$lookupRelation$3.apply(Catalog.scala:165)
    2 frames
  5. Scala
    Option.getOrElse
    1. scala.Option.getOrElse(Option.scala:120)
    1 frame
  6. Spark Project Catalyst
    OverrideCatalog$class.lookupRelation
    1. org.apache.spark.sql.catalyst.analysis.OverrideCatalog$class.lookupRelation(Catalog.scala:165)
    1 frame
  7. Spark Project Hive
    HiveContext$$anon$2.lookupRelation
    1. org.apache.spark.sql.hive.HiveContext$$anon$2.lookupRelation(HiveContext.scala:373)
    1 frame
  8. Spark Project Catalyst
    TreeNode$$anonfun$4.apply
    1. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.getTable(Analyzer.scala:222)
    2. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:233)
    3. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$$anonfun$apply$7.applyOrElse(Analyzer.scala:229)
    4. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
    5. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$3.apply(TreeNode.scala:222)
    6. org.apache.spark.sql.catalyst.trees.CurrentOrigin$.withOrigin(TreeNode.scala:51)
    7. org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:221)
    8. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:242)
    8 frames
  9. Scala
    AbstractIterator.toArray
    1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
    2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
    3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
    5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
    6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
    7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
    8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
    9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
    10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
    11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
    12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
    12 frames
  10. Spark Project Catalyst
    TreeNode$$anonfun$4.apply
    1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:272)
    2. org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:227)
    3. org.apache.spark.sql.catalyst.trees.TreeNode$$anonfun$4.apply(TreeNode.scala:242)
    3 frames
  11. Scala
    AbstractIterator.toArray
    1. scala.collection.Iterator$$anon$11.next(Iterator.scala:328)
    2. scala.collection.Iterator$class.foreach(Iterator.scala:727)
    3. scala.collection.AbstractIterator.foreach(Iterator.scala:1157)
    4. scala.collection.generic.Growable$class.$plus$plus$eq(Growable.scala:48)
    5. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:103)
    6. scala.collection.mutable.ArrayBuffer.$plus$plus$eq(ArrayBuffer.scala:47)
    7. scala.collection.TraversableOnce$class.to(TraversableOnce.scala:273)
    8. scala.collection.AbstractIterator.to(Iterator.scala:1157)
    9. scala.collection.TraversableOnce$class.toBuffer(TraversableOnce.scala:265)
    10. scala.collection.AbstractIterator.toBuffer(Iterator.scala:1157)
    11. scala.collection.TraversableOnce$class.toArray(TraversableOnce.scala:252)
    12. scala.collection.AbstractIterator.toArray(Iterator.scala:1157)
    12 frames
  12. Spark Project Catalyst
    RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply
    1. org.apache.spark.sql.catalyst.trees.TreeNode.transformChildrenDown(TreeNode.scala:272)
    2. org.apache.spark.sql.catalyst.trees.TreeNode.transformDown(TreeNode.scala:227)
    3. org.apache.spark.sql.catalyst.trees.TreeNode.transform(TreeNode.scala:212)
    4. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:229)
    5. org.apache.spark.sql.catalyst.analysis.Analyzer$ResolveRelations$.apply(Analyzer.scala:219)
    6. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:61)
    7. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1$$anonfun$apply$1.apply(RuleExecutor.scala:59)
    7 frames
  13. Scala
    List.foldLeft
    1. scala.collection.LinearSeqOptimized$class.foldLeft(LinearSeqOptimized.scala:111)
    2. scala.collection.immutable.List.foldLeft(List.scala:84)
    2 frames
  14. Spark Project Catalyst
    RuleExecutor$$anonfun$execute$1.apply
    1. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:59)
    2. org.apache.spark.sql.catalyst.rules.RuleExecutor$$anonfun$execute$1.apply(RuleExecutor.scala:51)
    2 frames
  15. Scala
    List.foreach
    1. scala.collection.immutable.List.foreach(List.scala:318)
    1 frame
  16. Spark Project Catalyst
    RuleExecutor.execute
    1. org.apache.spark.sql.catalyst.rules.RuleExecutor.execute(RuleExecutor.scala:51)
    1 frame
  17. Spark Project SQL
    SQLContext.sql
    1. org.apache.spark.sql.SQLContext$QueryExecution.analyzed$lzycompute(SQLContext.scala:933)
    2. org.apache.spark.sql.SQLContext$QueryExecution.analyzed(SQLContext.scala:933)
    3. org.apache.spark.sql.SQLContext$QueryExecution.assertAnalyzed(SQLContext.scala:931)
    4. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:131)
    5. org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:51)
    6. org.apache.spark.sql.SQLContext.sql(SQLContext.scala:755)
    6 frames
  18. Java RT
    Method.invoke
    1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    4. java.lang.reflect.Method.invoke(Method.java:497)
    4 frames
  19. org.apache.zeppelin
    FIFOScheduler$1.run
    1. org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:136)
    2. org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
    3. org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
    4. org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:276)
    5. org.apache.zeppelin.scheduler.Job.run(Job.java:170)
    6. org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:118)
    6 frames
  20. Java RT
    Thread.run
    1. java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
    2. java.util.concurrent.FutureTask.run(FutureTask.java:266)
    3. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
    4. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
    5. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    6. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    7. java.lang.Thread.run(Thread.java:745)
    7 frames