java.net.UnknownHostException: namenode1.hdfs.mesos

GitHub | martinstuder | 4 months ago
  1. 0

    Hadoop Ingestion Service fails with kerberos error

    Google Groups | 2 years ago | Deepak Jain
    java.net.UnknownHostException: root
  2. 0

    Hbase master fails to construct

    Stack Overflow | 2 years ago | chenab
    op.hbase.master.HMaster
  3. Speed up your debug routine!

    Automated exception search integrated into your IDE

  4. 0

    hive read/write hbase

    hive-user | 9 months ago | songj songj
    java.net.UnknownHostException: A
  5. 0

    Service 'hdfs' check failed: java.net.UnknownHostException - Hortonworks

    Stack Overflow | 3 weeks ago | User Learning
    java.net.UnknownHostException: sandbox.hortonworks.com Collapse Stack Trace Service 'hdfs' check failed: java.lang.IllegalArgumentException: java.net.UnknownHostException: sandbox.hortonworks.com

    1 unregistered visitors
    Not finding the right solution?
    Take a tour to get the most out of Samebug.

    Tired of useless tips?

    Automated exception search integrated into your IDE

    Root Cause Analysis

    1. java.net.UnknownHostException

      namenode1.hdfs.mesos

      at org.apache.hadoop.security.SecurityUtil.buildTokenService()
    2. Hadoop
      SecurityUtil.buildTokenService
      1. org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:377)
      1 frame
    3. Apache Hadoop HDFS
      ConfiguredFailoverProxyProvider.getProxy
      1. org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:240)
      2. org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:124)
      2 frames
    4. Hadoop
      RetryProxy.create
      1. org.apache.hadoop.io.retry.RetryInvocationHandler.<init>(RetryInvocationHandler.java:74)
      2. org.apache.hadoop.io.retry.RetryInvocationHandler.<init>(RetryInvocationHandler.java:65)
      3. org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)
      3 frames
    5. Apache Hadoop HDFS
      DistributedFileSystem.initialize
      1. org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:152)
      2. org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:579)
      3. org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:524)
      4. org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:146)
      4 frames
    6. Hadoop
      FileSystem.get
      1. org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2397)
      2. org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89)
      3. org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2431)
      4. org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2413)
      5. org.apache.hadoop.fs.FileSystem.get(FileSystem.java:368)
      6. org.apache.hadoop.fs.FileSystem.get(FileSystem.java:167)
      6 frames
    7. Hadoop
      FileInputFormat.setInputPaths
      1. org.apache.hadoop.mapred.JobConf.getWorkingDirectory(JobConf.java:653)
      2. org.apache.hadoop.mapred.FileInputFormat.setInputPaths(FileInputFormat.java:427)
      3. org.apache.hadoop.mapred.FileInputFormat.setInputPaths(FileInputFormat.java:400)
      3 frames
    8. Spark
      HadoopRDD$$anonfun$getJobConf$6.apply
      1. org.apache.spark.SparkContext$$anonfun$hadoopFile$1$$anonfun$33.apply(SparkContext.scala:1015)
      2. org.apache.spark.SparkContext$$anonfun$hadoopFile$1$$anonfun$33.apply(SparkContext.scala:1015)
      3. org.apache.spark.rdd.HadoopRDD$$anonfun$getJobConf$6.apply(HadoopRDD.scala:176)
      4. org.apache.spark.rdd.HadoopRDD$$anonfun$getJobConf$6.apply(HadoopRDD.scala:176)
      4 frames
    9. Scala
      Option.map
      1. scala.Option.map(Option.scala:145)
      1 frame
    10. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.HadoopRDD.getJobConf(HadoopRDD.scala:176)
      2. org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:195)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    11. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    12. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    13. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    14. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    15. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    16. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    17. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    18. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    19. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    20. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    21. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    22. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    23. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    24. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    25. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    26. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    27. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    28. Spark
      RDD$$anonfun$partitions$2.apply
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)
      3. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)
      4. org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)
      4 frames
    29. Scala
      Option.getOrElse
      1. scala.Option.getOrElse(Option.scala:120)
      1 frame
    30. Spark
      ShuffleDependency.<init>
      1. org.apache.spark.rdd.RDD.partitions(RDD.scala:237)
      2. org.apache.spark.ShuffleDependency.<init>(Dependency.scala:91)
      2 frames
    31. Spark Project SQL
      Exchange$$anonfun$doExecute$1.apply
      1. org.apache.spark.sql.execution.Exchange.prepareShuffleDependency(Exchange.scala:220)
      2. org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:254)
      3. org.apache.spark.sql.execution.Exchange$$anonfun$doExecute$1.apply(Exchange.scala:248)
      3 frames
    32. Spark Project Catalyst
      package$.attachTree
      1. org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
      1 frame
    33. Spark Project SQL
      SparkPlan$$anonfun$execute$5.apply
      1. org.apache.spark.sql.execution.Exchange.doExecute(Exchange.scala:247)
      2. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
      3. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
      3 frames
    34. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
      1 frame
    35. Spark Project SQL
      SparkPlan.execute
      1. org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
      1 frame
    36. org.apache.spark
      TungstenAggregate$$anonfun$doExecute$1.apply
      1. org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:86)
      2. org.apache.spark.sql.execution.aggregate.TungstenAggregate$$anonfun$doExecute$1.apply(TungstenAggregate.scala:80)
      2 frames
    37. Spark Project Catalyst
      package$.attachTree
      1. org.apache.spark.sql.catalyst.errors.package$.attachTree(package.scala:48)
      1 frame
    38. org.apache.spark
      TungstenAggregate.doExecute
      1. org.apache.spark.sql.execution.aggregate.TungstenAggregate.doExecute(TungstenAggregate.scala:80)
      1 frame
    39. Spark Project SQL
      SparkPlan$$anonfun$execute$5.apply
      1. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
      2. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
      2 frames
    40. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
      1 frame
    41. Spark Project SQL
      SparkPlan$$anonfun$execute$5.apply
      1. org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
      2. org.apache.spark.sql.execution.ConvertToSafe.doExecute(rowFormatConverters.scala:56)
      3. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:132)
      4. org.apache.spark.sql.execution.SparkPlan$$anonfun$execute$5.apply(SparkPlan.scala:130)
      4 frames
    42. Spark
      RDDOperationScope$.withScope
      1. org.apache.spark.rdd.RDDOperationScope$.withScope(RDDOperationScope.scala:150)
      1 frame
    43. Spark Project SQL
      DataFrame.take
      1. org.apache.spark.sql.execution.SparkPlan.execute(SparkPlan.scala:130)
      2. org.apache.spark.sql.execution.TakeOrderedAndProject.collectData(basicOperators.scala:213)
      3. org.apache.spark.sql.execution.TakeOrderedAndProject.executeCollect(basicOperators.scala:218)
      4. org.apache.spark.sql.execution.SparkPlan.executeCollectPublic(SparkPlan.scala:174)
      5. org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
      6. org.apache.spark.sql.DataFrame$$anonfun$org$apache$spark$sql$DataFrame$$execute$1$1.apply(DataFrame.scala:1499)
      7. org.apache.spark.sql.execution.SQLExecution$.withNewExecutionId(SQLExecution.scala:56)
      8. org.apache.spark.sql.DataFrame.withNewExecutionId(DataFrame.scala:2086)
      9. org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$execute$1(DataFrame.scala:1498)
      10. org.apache.spark.sql.DataFrame.org$apache$spark$sql$DataFrame$$collect(DataFrame.scala:1505)
      11. org.apache.spark.sql.DataFrame$$anonfun$head$1.apply(DataFrame.scala:1375)
      12. org.apache.spark.sql.DataFrame$$anonfun$head$1.apply(DataFrame.scala:1374)
      13. org.apache.spark.sql.DataFrame.withCallback(DataFrame.scala:2099)
      14. org.apache.spark.sql.DataFrame.head(DataFrame.scala:1374)
      15. org.apache.spark.sql.DataFrame.take(DataFrame.scala:1456)
      15 frames
    44. Java RT
      Method.invoke
      1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
      2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
      3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      4. java.lang.reflect.Method.invoke(Method.java:498)
      4 frames
    45. org.apache.zeppelin
      FIFOScheduler$1.run
      1. org.apache.zeppelin.spark.ZeppelinContext.showDF(ZeppelinContext.java:199)
      2. org.apache.zeppelin.spark.SparkSqlInterpreter.interpret(SparkSqlInterpreter.java:151)
      3. org.apache.zeppelin.interpreter.ClassloaderInterpreter.interpret(ClassloaderInterpreter.java:57)
      4. org.apache.zeppelin.interpreter.LazyOpenInterpreter.interpret(LazyOpenInterpreter.java:93)
      5. org.apache.zeppelin.interpreter.remote.RemoteInterpreterServer$InterpretJob.jobRun(RemoteInterpreterServer.java:345)
      6. org.apache.zeppelin.scheduler.Job.run(Job.java:176)
      7. org.apache.zeppelin.scheduler.FIFOScheduler$1.run(FIFOScheduler.java:139)
      7 frames
    46. Java RT
      Thread.run
      1. java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
      2. java.util.concurrent.FutureTask.run(FutureTask.java:266)
      3. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
      4. java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
      5. java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
      6. java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
      7. java.lang.Thread.run(Thread.java:745)
      7 frames