java.sql.SQLNonTransientConnectionException: No current connection.

Stack Overflow | Brandon Lin | 7 months ago
tip
Click on the to mark the solution that helps you, Samebug will learn from it.
As a community member, you’ll be rewarded for you help.

Root Cause Analysis

  1. java.sql.SQLException

    No current connection.

    at org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException()
  2. Derby
    EmbedConnection.getAutoCommit
    1. org.apache.derby.impl.jdbc.SQLExceptionFactory.getSQLException(Unknown Source)
    2. org.apache.derby.impl.jdbc.SQLExceptionFactory40.wrapArgsForTransportAcrossDRDA(Unknown Source)
    3. org.apache.derby.impl.jdbc.SQLExceptionFactory40.getSQLException(Unknown Source)
    4. org.apache.derby.impl.jdbc.Util.newEmbedSQLException(Unknown Source)
    5. org.apache.derby.impl.jdbc.Util.newEmbedSQLException(Unknown Source)
    6. org.apache.derby.impl.jdbc.Util.noCurrentConnection(Unknown Source)
    7. org.apache.derby.impl.jdbc.EmbedConnection.checkIfClosed(Unknown Source)
    8. org.apache.derby.impl.jdbc.EmbedConnection.getAutoCommit(Unknown Source)
    8 frames
  3. Commons DBCP
    PoolingDataSource$PoolGuardConnectionWrapper.getAutoCommit
    1. org.apache.commons.dbcp.DelegatingConnection.getAutoCommit(DelegatingConnection.java:337)
    2. org.apache.commons.dbcp.PoolingDataSource$PoolGuardConnectionWrapper.getAutoCommit(PoolingDataSource.java:235)
    2 frames
  4. DataNucleus RDBMS plugin
    RDBMSStoreManager.addClasses
    1. org.datanucleus.store.rdbms.AbstractSchemaTransaction.execute(AbstractSchemaTransaction.java:131)
    2. org.datanucleus.store.rdbms.RDBMSStoreManager.addClasses(RDBMSStoreManager.java:1605)
    2 frames
  5. DataNucleus Core
    AbstractStoreManager.addClass
    1. org.datanucleus.store.AbstractStoreManager.addClass(AbstractStoreManager.java:954)
    1 frame
  6. DataNucleus RDBMS plugin
    JDOQLQuery.compileInternal
    1. org.datanucleus.store.rdbms.RDBMSStoreManager.getDatastoreClass(RDBMSStoreManager.java:679)
    2. org.datanucleus.store.rdbms.query.RDBMSQueryUtils.getStatementForCandidates(RDBMSQueryUtils.java:408)
    3. org.datanucleus.store.rdbms.query.JDOQLQuery.compileQueryFull(JDOQLQuery.java:947)
    4. org.datanucleus.store.rdbms.query.JDOQLQuery.compileInternal(JDOQLQuery.java:370)
    4 frames
  7. DataNucleus Core
    Query.execute
    1. org.datanucleus.store.query.Query.executeQuery(Query.java:1744)
    2. org.datanucleus.store.query.Query.executeWithArray(Query.java:1672)
    3. org.datanucleus.store.query.Query.execute(Query.java:1654)
    3 frames
  8. DataNucleus JDO API plugin
    JDOQuery.execute
    1. org.datanucleus.api.jdo.JDOQuery.execute(JDOQuery.java:221)
    1 frame
  9. Hive Metastore
    ObjectStore.setConf
    1. org.apache.hadoop.hive.metastore.MetaStoreDirectSql.ensureDbInit(MetaStoreDirectSql.java:183)
    2. org.apache.hadoop.hive.metastore.MetaStoreDirectSql.<init>(MetaStoreDirectSql.java:137)
    3. org.apache.hadoop.hive.metastore.ObjectStore.initialize(ObjectStore.java:295)
    4. org.apache.hadoop.hive.metastore.ObjectStore.setConf(ObjectStore.java:258)
    4 frames
  10. Hadoop
    ReflectionUtils.newInstance
    1. org.apache.hadoop.util.ReflectionUtils.setConf(ReflectionUtils.java:73)
    2. org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:133)
    2 frames
  11. Hive Metastore
    HiveMetaStoreClient.<init>
    1. org.apache.hadoop.hive.metastore.RawStoreProxy.<init>(RawStoreProxy.java:57)
    2. org.apache.hadoop.hive.metastore.RawStoreProxy.getProxy(RawStoreProxy.java:66)
    3. org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.newRawStore(HiveMetaStore.java:593)
    4. org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.getMS(HiveMetaStore.java:571)
    5. org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.createDefaultDB(HiveMetaStore.java:620)
    6. org.apache.hadoop.hive.metastore.HiveMetaStore$HMSHandler.init(HiveMetaStore.java:461)
    7. org.apache.hadoop.hive.metastore.RetryingHMSHandler.<init>(RetryingHMSHandler.java:66)
    8. org.apache.hadoop.hive.metastore.RetryingHMSHandler.getProxy(RetryingHMSHandler.java:72)
    9. org.apache.hadoop.hive.metastore.HiveMetaStore.newRetryingHMSHandler(HiveMetaStore.java:5762)
    10. org.apache.hadoop.hive.metastore.HiveMetaStoreClient.<init>(HiveMetaStoreClient.java:199)
    10 frames
  12. Hive Query Language
    SessionHiveMetaStoreClient.<init>
    1. org.apache.hadoop.hive.ql.metadata.SessionHiveMetaStoreClient.<init>(SessionHiveMetaStoreClient.java:74)
    1 frame
  13. Java RT
    Constructor.newInstance
    1. sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    2. sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    3. sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    4. java.lang.reflect.Constructor.newInstance(Constructor.java:525)
    4 frames
  14. Hive Metastore
    RetryingMetaStoreClient.getProxy
    1. org.apache.hadoop.hive.metastore.MetaStoreUtils.newInstance(MetaStoreUtils.java:1521)
    2. org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.<init>(RetryingMetaStoreClient.java:86)
    3. org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:132)
    4. org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.getProxy(RetryingMetaStoreClient.java:104)
    4 frames
  15. Hive Query Language
    SessionState.start
    1. org.apache.hadoop.hive.ql.metadata.Hive.createMetaStoreClient(Hive.java:3005)
    2. org.apache.hadoop.hive.ql.metadata.Hive.getMSC(Hive.java:3024)
    3. org.apache.hadoop.hive.ql.metadata.Hive.getAllDatabases(Hive.java:1234)
    4. org.apache.hadoop.hive.ql.metadata.Hive.reloadFunctions(Hive.java:174)
    5. org.apache.hadoop.hive.ql.metadata.Hive.<clinit>(Hive.java:166)
    6. org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:503)
    6 frames
  16. org.apache.spark
    ClientWrapper.<init>
    1. org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:194)
    1 frame
  17. Java RT
    Constructor.newInstance
    1. sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    2. sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    3. sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    4. java.lang.reflect.Constructor.newInstance(Constructor.java:525)
    4 frames
  18. org.apache.spark
    IsolatedClientLoader.createClient
    1. org.apache.spark.sql.hive.client.IsolatedClientLoader.createClient(IsolatedClientLoader.scala:249)
    1 frame
  19. Spark Project Hive
    HiveContext.<init>
    1. org.apache.spark.sql.hive.HiveContext.metadataHive$lzycompute(HiveContext.scala:327)
    2. org.apache.spark.sql.hive.HiveContext.metadataHive(HiveContext.scala:237)
    3. org.apache.spark.sql.hive.HiveContext.setConf(HiveContext.scala:441)
    4. org.apache.spark.sql.hive.HiveContext.defaultOverrides(HiveContext.scala:226)
    5. org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:229)
    6. org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:101)
    6 frames
  20. Java RT
    Constructor.newInstance
    1. sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
    2. sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
    3. sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
    4. java.lang.reflect.Constructor.newInstance(Constructor.java:525)
    4 frames
  21. Spark REPL
    SparkILoop.createSQLContext
    1. org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028)
    1 frame
  22. $line4
    $eval.$print
    1. $line4.$read$$iwC$$iwC.<init>(<console>:15)
    2. $line4.$read$$iwC.<init>(<console>:24)
    3. $line4.$read.<init>(<console>:26)
    4. $line4.$read$.<init>(<console>:30)
    5. $line4.$read$.<clinit>(<console>)
    6. $line4.$eval$.<init>(<console>:7)
    7. $line4.$eval$.<clinit>(<console>)
    8. $line4.$eval.$print(<console>)
    8 frames
  23. Java RT
    Method.invoke
    1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    4. java.lang.reflect.Method.invoke(Method.java:601)
    4 frames
  24. Spark REPL
    SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply
    1. org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065)
    2. org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1346)
    3. org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840)
    4. org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871)
    5. org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819)
    6. org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857)
    7. org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902)
    8. org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814)
    9. org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132)
    10. org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124)
    11. org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324)
    12. org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124)
    13. org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64)
    14. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974)
    15. org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159)
    16. org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64)
    17. org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108)
    18. org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64)
    19. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991)
    20. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    21. org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945)
    21 frames
  25. Scala Compiler
    ScalaClassLoader$.savingContextLoader
    1. scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135)
    1 frame
  26. Spark REPL
    Main.main
    1. org.apache.spark.repl.SparkILoop.org$apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945)
    2. org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059)
    3. org.apache.spark.repl.Main$.main(Main.scala:31)
    4. org.apache.spark.repl.Main.main(Main.scala)
    4 frames
  27. Java RT
    Method.invoke
    1. sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    2. sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
    3. sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    4. java.lang.reflect.Method.invoke(Method.java:601)
    4 frames
  28. Spark
    SparkSubmit.main
    1. org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:731)
    2. org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:181)
    3. org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:206)
    4. org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:121)
    5. org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala)
    5 frames