java.lang.NullPointerException

Stack Overflow | Idan Fischman | 6 months ago
tip
Do you know that we can give you better hits? Get more relevant results from Samebug’s stack trace search.
  1. Speed up your debug routine!

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.lang.NullPointerException

      No message provided

      at org.apache.spark.sql.hive.client.ClientWrapper.conf()
    2. org.apache.spark
      ClientWrapper.conf
      1. org.apache.spark.sql.hive.client.ClientWrapper.conf(ClientWrapper.scala:205)
      1 frame
    3. Spark Project Hive
      HiveContext$$anonfun$configure$1.apply
      1. org.apache.spark.sql.hive.HiveContext.hiveconf$lzycompute(HiveContext.scala:554)
      2. org.apache.spark.sql.hive.HiveContext.hiveconf(HiveContext.scala:553)
      3. org.apache.spark.sql.hive.HiveContext$$anonfun$configure$1.apply(HiveContext.scala:540)
      4. org.apache.spark.sql.hive.HiveContext$$anonfun$configure$1.apply(HiveContext.scala:539)
      4 frames
    4. Scala
      AbstractTraversable.map
      1. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
      2. scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:244)
      3. scala.collection.immutable.List.foreach(List.scala:318)
      4. scala.collection.TraversableLike$class.map(TraversableLike.scala:244)
      5. scala.collection.AbstractTraversable.map(Traversable.scala:105)
      5 frames
    5. Spark Project Hive
      HiveContext.analyzer
      1. org.apache.spark.sql.hive.HiveContext.configure(HiveContext.scala:539)
      2. org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:252)
      3. org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:239)
      4. org.apache.spark.sql.hive.HiveContext$$anon$2.<init>(HiveContext.scala:459)
      5. org.apache.spark.sql.hive.HiveContext.catalog$lzycompute(HiveContext.scala:459)
      6. org.apache.spark.sql.hive.HiveContext.catalog(HiveContext.scala:458)
      7. org.apache.spark.sql.hive.HiveContext$$anon$3.<init>(HiveContext.scala:475)
      8. org.apache.spark.sql.hive.HiveContext.analyzer$lzycompute(HiveContext.scala:475)
      9. org.apache.spark.sql.hive.HiveContext.analyzer(HiveContext.scala:474)
      9 frames
    6. Spark Project SQL
      SQLImplicits.rddToDataFrameHolder
      1. org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:34)
      2. org.apache.spark.sql.DataFrame.<init>(DataFrame.scala:133)
      3. org.apache.spark.sql.DataFrame$.apply(DataFrame.scala:52)
      4. org.apache.spark.sql.SQLContext.createDataFrame(SQLContext.scala:417)
      5. org.apache.spark.sql.SQLImplicits.rddToDataFrameHolder(SQLImplicits.scala:155)
      5 frames
    7. $line46
      $read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply
      1. $line46.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:58)
      2. $line46.$read$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$iwC$$anonfun$1.apply(<console>:48)
      2 frames
    8. Spark Project Streaming
      ForEachDStream$$anonfun$1.apply
      1. org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
      2. org.apache.spark.streaming.dstream.DStream$$anonfun$foreachRDD$1$$anonfun$apply$mcV$sp$3.apply(DStream.scala:661)
      3. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply$mcV$sp(ForEachDStream.scala:50)
      4. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
      5. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1$$anonfun$apply$mcV$sp$1.apply(ForEachDStream.scala:50)
      6. org.apache.spark.streaming.dstream.DStream.createRDDWithLocalProperties(DStream.scala:426)
      7. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply$mcV$sp(ForEachDStream.scala:49)
      8. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
      9. org.apache.spark.streaming.dstream.ForEachDStream$$anonfun$1.apply(ForEachDStream.scala:49)
      9 frames
    9. Scala
      Try$.apply
      1. scala.util.Try$.apply(Try.scala:161)
      1 frame
    10. Spark Project Streaming
      JobScheduler$JobHandler$$anonfun$run$1.apply
      1. org.apache.spark.streaming.scheduler.Job.run(Job.scala:39)
      2. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply$mcV$sp(JobScheduler.scala:224)
      3. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
      4. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler$$anonfun$run$1.apply(JobScheduler.scala:224)
      4 frames
    11. Scala
      DynamicVariable.withValue
      1. scala.util.DynamicVariable.withValue(DynamicVariable.scala:57)
      1 frame
    12. Spark Project Streaming
      JobScheduler$JobHandler.run
      1. org.apache.spark.streaming.scheduler.JobScheduler$JobHandler.run(JobScheduler.scala:223)
      1 frame
    13. Java RT
      Thread.run
      1. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
      2. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
      3. java.lang.Thread.run(Thread.java:745)
      3 frames