Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

Solutions on the web

via Stack Overflow by Michael
, 1 year ago
This exception has no message.
java.net.URISyntaxException: Relative path in absolute URI: file:c:/Spark/bin/spark-warehouse	at java.net.URI.checkPath(URI.java:1823)	at java.net.URI.(URI.java:745)	at org.apache.hadoop.fs.Path.initialize(Path.java:202)	at org.apache.hadoop.fs.Path.(Path.java:171)	at org.apache.hadoop.hive.metastore.Warehouse.getWhRoot(Warehouse.java:159)	at org.apache.hadoop.hive.metastore.Warehouse.getDefaultDatabasePath(Warehouse.java:177)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB_core(HiveMetaStore.java:600)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:66)	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:199)	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:74)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:86)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)	at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:166)	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)	at org.apache.spark.sql.hive.client.HiveClientImpl.(HiveClientImpl.scala:171)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:423)	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:258)	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:359)	at org.apache.spark.sql.hive.HiveUtils$.newClientForMetadata(HiveUtils.scala:263)	at org.apache.spark.sql.hive.HiveSharedState.metadataHive$lzycompute(HiveSharedState.scala:39)	at org.apache.spark.sql.hive.HiveSharedState.metadataHive(HiveSharedState.scala:38)	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog$lzycompute(HiveSharedState.scala:46)	at org.apache.spark.sql.hive.HiveSharedState.externalCatalog(HiveSharedState.scala:45)	at org.apache.spark.sql.hive.HiveSessionState.catalog$lzycompute(HiveSessionState.scala:50)	at org.apache.spark.sql.hive.HiveSessionState.catalog(HiveSessionState.scala:48)	at org.apache.spark.sql.hive.HiveSessionState$$anon$1.(HiveSessionState.scala:63)	at org.apache.spark.sql.hive.HiveSessionState.analyzer$lzycompute(HiveSessionState.scala:63)	at org.apache.spark.sql.hive.HiveSessionState.analyzer(HiveSessionState.scala:62)	at org.apache.spark.sql.execution.QueryExecution.assertAnalyzed(QueryExecution.scala:49)	at org.apache.spark.sql.Dataset$.ofRows(Dataset.scala:64)	at org.apache.spark.sql.SparkSession.baseRelationToDataFrame(SparkSession.scala:382)	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:143)	at org.apache.spark.sql.DataFrameReader.load(DataFrameReader.scala:132)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:498)	at py4j.reflection.MethodInvoker.invoke(MethodInvoker.java:237)	at py4j.reflection.ReflectionEngine.invoke(ReflectionEngine.java:357)	at py4j.Gateway.invoke(Gateway.java:280)	at py4j.commands.AbstractCommand.invokeMethod(AbstractCommand.java:128)	at py4j.commands.CallCommand.execute(CallCommand.java:79)	at py4j.GatewayConnection.run(GatewayConnection.java:211)	at java.lang.Thread.run(Thread.java:745)