Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by ktaube
, 1 year ago
via GitHub by martinstuder
, 1 year ago
java.net.UnknownHostException: namenode1.hdfs.mesos
via GitHub by yogeshnath
, 1 year ago
java.net.UnknownHostException: namenode1.hdfs.mesos
via GitHub by radek1st
, 1 year ago
java.net.UnknownHostException: namenode1.hdfs.mesos
via Stack Overflow by Anup Ash
, 1 year ago
via GitHub by anupash147
, 1 year ago
java.net.UnknownHostException: nameservice1
java.net.UnknownHostException: namenode1.hdfs.mesos	at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:377)	at org.apache.hadoop.hdfs.NameNodeProxies.createNonHAProxy(NameNodeProxies.java:240)	at org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider.getProxy(ConfiguredFailoverProxyProvider.java:124)	at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:74)	at org.apache.hadoop.io.retry.RetryInvocationHandler.(RetryInvocationHandler.java:65)	at org.apache.hadoop.io.retry.RetryProxy.create(RetryProxy.java:58)	at org.apache.hadoop.hdfs.NameNodeProxies.createProxy(NameNodeProxies.java:152)	at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:579)	at org.apache.hadoop.hdfs.DFSClient.(DFSClient.java:524)	at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:146)	at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:2397)	at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:89)	at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:2431)	at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:2413)	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:368)	at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:167)	at org.apache.hadoop.mapred.JobConf.getWorkingDirectory(JobConf.java:653)	at org.apache.hadoop.mapred.FileInputFormat.setInputPaths(FileInputFormat.java:427)	at org.apache.hadoop.mapred.FileInputFormat.setInputPaths(FileInputFormat.java:400)	at org.apache.spark.SparkContext$$anonfun$hadoopFile$1$$anonfun$33.apply(SparkContext.scala:1015)	at org.apache.spark.SparkContext$$anonfun$hadoopFile$1$$anonfun$33.apply(SparkContext.scala:1015)	at org.apache.spark.rdd.HadoopRDD$$anonfun$getJobConf$6.apply(HadoopRDD.scala:176)	at org.apache.spark.rdd.HadoopRDD$$anonfun$getJobConf$6.apply(HadoopRDD.scala:176)	at scala.Option.map(Option.scala:145)	at org.apache.spark.rdd.HadoopRDD.getJobConf(HadoopRDD.scala:176)	at org.apache.spark.rdd.HadoopRDD.getPartitions(HadoopRDD.scala:195)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.rdd.MapPartitionsRDD.getPartitions(MapPartitionsRDD.scala:35)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:239)	at org.apache.spark.rdd.RDD$$anonfun$partitions$2.apply(RDD.scala:237)	at scala.Option.getOrElse(Option.scala:120)	at org.apache.spark.rdd.RDD.partitions(RDD.scala:237)	at org.apache.spark.SparkContext.runJob(SparkContext.scala:1929)	at org.apache.spark.rdd.RDD.count(RDD.scala:1157)	at SimpleApp$.main(SimpleApp.scala:15)	at SimpleApp.main(SimpleApp.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:498)	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:786)	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:183)	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:208)	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:123)	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)