Searched on Google with the first line of a JAVA stack trace?

We can recommend more relevant solutions and speed up debugging when you paste your entire stack trace with the exception message. Try a sample exception.

Recommended solutions based on your search

java.sql.SQLException: No current connection.	at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source)	at org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source)	at org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source)	at org.apache.derby.impl.jdbc.Util.newEmbedSQLException(Unknown Source)	at org.apache.derby.impl.jdbc.Util.newEmbedSQLException(Unknown Source)	at org.apache.derby.impl.jdbc.Util.noCurrentConnection(Unknown Source)	at org.apache.derby.impl.jdbc.EmbedConnection.checkIfClosed(Unknown Source)	at org.apache.derby.impl.jdbc.EmbedConnection.getAutoCommit(Unknown Source)	at org.apache.commons.dbcp.DelegatingConnection.getAutoCommit(DelegatingConnection.java:337)	at org.apache.commons.dbcp.PoolingDataSource$PoolGuardConnectionWrapper.getAutoCommit(PoolingDataSource.java:235)	at org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:131)	at org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605)	at org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954)	at org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679)	at org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:408)	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:947)	at org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:370)	at org.datanucleus.store.query.Query.executeQuery(Query.java:1744)	at org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)	at org.datanucleus.store.query.Query.execute(Query.java:1654)	at org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)	at org.apache.hadoop.hive.metastore.MetaStoreDirectSql.(MetaStoreDirectSql.java:137)	at org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)	at org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)	at org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)	at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)	at org.apache.hadoop.hive.metastore.RawStoreProxy.(RawStoreProxy.java:57)	at org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)	at org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.(RetryingHMSHandler.java:66)	at org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)	at org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)	at org.apache.hadoop.hive.metastore.HiveMetaStoreClient.(HiveMetaStoreClient.java:199)	at org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.(SessionHiveMetaStoreClient.java:74)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:525)	at org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.(RetryingMetaStoreClient.java:86)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)	at org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)	at org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)	at org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)	at org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)	at org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)	at org.apache.hadoop.hive.ql.metadata.Hive.(Hive.java:166)	at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)	at org.apache.spark.sql.hive.client.ClientWrapper.(ClientWrapper.scala:194)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:525)	at org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:249)	at org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:327)	at org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:237)	at org.apache.spark.sql.hive.HiveContext.setConf(HiveContext.scala:441)	at org.apache.spark.sql.hive.HiveContext.defaultOverrides(HiveContext.scala:226)	at org.apache.spark.sql.hive.HiveContext.(HiveContext.scala:229)	at org.apache.spark.sql.hive.HiveContext.(HiveContext.scala:101)	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)	at java.lang.reflect.Constructor.newInstance(Constructor.java:525)	at org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)	at $line4.$read$$iwC$$iwC.(:15)	at $line4.$read$$iwC.(:24)	at $line4.$read.(:26)	at $line4.$read$.(:30)	at $line4.$read$.()	at $line4.$eval$.(:7)	at $line4.$eval$.()	at $line4.$eval.$print()	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:601)	at org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)	at org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)	at org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)	at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)	at org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)	at org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)	at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)	at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)	at org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)	at org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)	at org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)	at org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)	at org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)	at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)	at org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)	at org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)	at org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)	at scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)	at org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)	at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)	at org.apache.spark.repl.Main$.main(Main.scala:31)	at org.apache.spark.repl.Main.main(Main.scala)	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)	at java.lang.reflect.Method.invoke(Method.java:601)	at org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)	at org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)	at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)	at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)	at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)